1. Packages
  2. Scaleway
  3. API Docs
  4. InferenceDeployment
Scaleway v1.26.0 published on Friday, Mar 28, 2025 by pulumiverse

scaleway.InferenceDeployment

Explore with Pulumi AI

Deprecated: scaleway.index/inferencedeployment.InferenceDeployment has been deprecated in favor of scaleway.inference/deployment.Deployment

Creates and manages Scaleway Managed Inference deployments. For more information, see the API documentation.

Example Usage

Basic

import * as pulumi from "@pulumi/pulumi";
import * as scaleway from "@pulumiverse/scaleway";

const deployment = new scaleway.inference.Deployment("deployment", {
    name: "tf-inference-deployment",
    nodeType: "L4",
    modelName: "meta/llama-3.1-8b-instruct:fp8",
    publicEndpoint: {
        isEnabled: true,
    },
    acceptEula: true,
});
Copy
import pulumi
import pulumiverse_scaleway as scaleway

deployment = scaleway.inference.Deployment("deployment",
    name="tf-inference-deployment",
    node_type="L4",
    model_name="meta/llama-3.1-8b-instruct:fp8",
    public_endpoint={
        "is_enabled": True,
    },
    accept_eula=True)
Copy
package main

import (
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
	"github.com/pulumiverse/pulumi-scaleway/sdk/go/scaleway/inference"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := inference.NewDeployment(ctx, "deployment", &inference.DeploymentArgs{
			Name:      pulumi.String("tf-inference-deployment"),
			NodeType:  pulumi.String("L4"),
			ModelName: pulumi.String("meta/llama-3.1-8b-instruct:fp8"),
			PublicEndpoint: &inference.DeploymentPublicEndpointArgs{
				IsEnabled: pulumi.Bool(true),
			},
			AcceptEula: pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Scaleway = Pulumiverse.Scaleway;

return await Deployment.RunAsync(() => 
{
    var deployment = new Scaleway.Inference.Deployment("deployment", new()
    {
        Name = "tf-inference-deployment",
        NodeType = "L4",
        ModelName = "meta/llama-3.1-8b-instruct:fp8",
        PublicEndpoint = new Scaleway.Inference.Inputs.DeploymentPublicEndpointArgs
        {
            IsEnabled = true,
        },
        AcceptEula = true,
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.scaleway.inference.Deployment;
import com.pulumi.scaleway.inference.DeploymentArgs;
import com.pulumi.scaleway.inference.inputs.DeploymentPublicEndpointArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var deployment = new Deployment("deployment", DeploymentArgs.builder()
            .name("tf-inference-deployment")
            .nodeType("L4")
            .modelName("meta/llama-3.1-8b-instruct:fp8")
            .publicEndpoint(DeploymentPublicEndpointArgs.builder()
                .isEnabled(true)
                .build())
            .acceptEula(true)
            .build());

    }
}
Copy
resources:
  deployment:
    type: scaleway:inference:Deployment
    properties:
      name: tf-inference-deployment
      nodeType: L4
      modelName: meta/llama-3.1-8b-instruct:fp8
      publicEndpoint:
        isEnabled: true
      acceptEula: true
Copy

Create InferenceDeployment Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new InferenceDeployment(name: string, args: InferenceDeploymentArgs, opts?: CustomResourceOptions);
@overload
def InferenceDeployment(resource_name: str,
                        args: InferenceDeploymentArgs,
                        opts: Optional[ResourceOptions] = None)

@overload
def InferenceDeployment(resource_name: str,
                        opts: Optional[ResourceOptions] = None,
                        accept_eula: Optional[bool] = None,
                        max_size: Optional[int] = None,
                        min_size: Optional[int] = None,
                        model_name: Optional[str] = None,
                        name: Optional[str] = None,
                        node_type: Optional[str] = None,
                        private_endpoint: Optional[InferenceDeploymentPrivateEndpointArgs] = None,
                        project_id: Optional[str] = None,
                        public_endpoint: Optional[InferenceDeploymentPublicEndpointArgs] = None,
                        region: Optional[str] = None,
                        tags: Optional[Sequence[str]] = None)
func NewInferenceDeployment(ctx *Context, name string, args InferenceDeploymentArgs, opts ...ResourceOption) (*InferenceDeployment, error)
public InferenceDeployment(string name, InferenceDeploymentArgs args, CustomResourceOptions? opts = null)
public InferenceDeployment(String name, InferenceDeploymentArgs args)
public InferenceDeployment(String name, InferenceDeploymentArgs args, CustomResourceOptions options)
type: scaleway:InferenceDeployment
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. InferenceDeploymentArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. InferenceDeploymentArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. InferenceDeploymentArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. InferenceDeploymentArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. InferenceDeploymentArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

InferenceDeployment Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The InferenceDeployment resource accepts the following input properties:

ModelName This property is required. string
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
NodeType This property is required. string
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
AcceptEula bool
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
MaxSize int
The maximum size of the pool.
MinSize int
The minimum size of the pool.
Name string
The deployment name.
PrivateEndpoint Pulumiverse.Scaleway.Inputs.InferenceDeploymentPrivateEndpoint
Configuration of the deployment's private endpoint.
ProjectId Changes to this property will trigger replacement. string
project_id) The ID of the project the deployment is associated with.
PublicEndpoint Pulumiverse.Scaleway.Inputs.InferenceDeploymentPublicEndpoint
Configuration of the deployment's public endpoint.
Region Changes to this property will trigger replacement. string
region) The region in which the deployment is created.
Tags List<string>
The tags associated with the deployment.
ModelName This property is required. string
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
NodeType This property is required. string
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
AcceptEula bool
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
MaxSize int
The maximum size of the pool.
MinSize int
The minimum size of the pool.
Name string
The deployment name.
PrivateEndpoint InferenceDeploymentPrivateEndpointArgs
Configuration of the deployment's private endpoint.
ProjectId Changes to this property will trigger replacement. string
project_id) The ID of the project the deployment is associated with.
PublicEndpoint InferenceDeploymentPublicEndpointArgs
Configuration of the deployment's public endpoint.
Region Changes to this property will trigger replacement. string
region) The region in which the deployment is created.
Tags []string
The tags associated with the deployment.
modelName This property is required. String
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
nodeType This property is required. String
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
acceptEula Boolean
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
maxSize Integer
The maximum size of the pool.
minSize Integer
The minimum size of the pool.
name String
The deployment name.
privateEndpoint InferenceDeploymentPrivateEndpoint
Configuration of the deployment's private endpoint.
projectId Changes to this property will trigger replacement. String
project_id) The ID of the project the deployment is associated with.
publicEndpoint InferenceDeploymentPublicEndpoint
Configuration of the deployment's public endpoint.
region Changes to this property will trigger replacement. String
region) The region in which the deployment is created.
tags List<String>
The tags associated with the deployment.
modelName This property is required. string
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
nodeType This property is required. string
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
acceptEula boolean
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
maxSize number
The maximum size of the pool.
minSize number
The minimum size of the pool.
name string
The deployment name.
privateEndpoint InferenceDeploymentPrivateEndpoint
Configuration of the deployment's private endpoint.
projectId Changes to this property will trigger replacement. string
project_id) The ID of the project the deployment is associated with.
publicEndpoint InferenceDeploymentPublicEndpoint
Configuration of the deployment's public endpoint.
region Changes to this property will trigger replacement. string
region) The region in which the deployment is created.
tags string[]
The tags associated with the deployment.
model_name This property is required. str
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
node_type This property is required. str
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
accept_eula bool
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
max_size int
The maximum size of the pool.
min_size int
The minimum size of the pool.
name str
The deployment name.
private_endpoint InferenceDeploymentPrivateEndpointArgs
Configuration of the deployment's private endpoint.
project_id Changes to this property will trigger replacement. str
project_id) The ID of the project the deployment is associated with.
public_endpoint InferenceDeploymentPublicEndpointArgs
Configuration of the deployment's public endpoint.
region Changes to this property will trigger replacement. str
region) The region in which the deployment is created.
tags Sequence[str]
The tags associated with the deployment.
modelName This property is required. String
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
nodeType This property is required. String
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
acceptEula Boolean
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
maxSize Number
The maximum size of the pool.
minSize Number
The minimum size of the pool.
name String
The deployment name.
privateEndpoint Property Map
Configuration of the deployment's private endpoint.
projectId Changes to this property will trigger replacement. String
project_id) The ID of the project the deployment is associated with.
publicEndpoint Property Map
Configuration of the deployment's public endpoint.
region Changes to this property will trigger replacement. String
region) The region in which the deployment is created.
tags List<String>
The tags associated with the deployment.

Outputs

All input properties are implicitly available as output properties. Additionally, the InferenceDeployment resource produces the following output properties:

CreatedAt string
The date and time of the creation of the deployment.
Id string
The provider-assigned unique ID for this managed resource.
ModelId string
The model id used for the deployment.
Size int
The size of the pool.
Status string
The status of the deployment.
UpdatedAt string
The date and time of the last update of the deployment.
CreatedAt string
The date and time of the creation of the deployment.
Id string
The provider-assigned unique ID for this managed resource.
ModelId string
The model id used for the deployment.
Size int
The size of the pool.
Status string
The status of the deployment.
UpdatedAt string
The date and time of the last update of the deployment.
createdAt String
The date and time of the creation of the deployment.
id String
The provider-assigned unique ID for this managed resource.
modelId String
The model id used for the deployment.
size Integer
The size of the pool.
status String
The status of the deployment.
updatedAt String
The date and time of the last update of the deployment.
createdAt string
The date and time of the creation of the deployment.
id string
The provider-assigned unique ID for this managed resource.
modelId string
The model id used for the deployment.
size number
The size of the pool.
status string
The status of the deployment.
updatedAt string
The date and time of the last update of the deployment.
created_at str
The date and time of the creation of the deployment.
id str
The provider-assigned unique ID for this managed resource.
model_id str
The model id used for the deployment.
size int
The size of the pool.
status str
The status of the deployment.
updated_at str
The date and time of the last update of the deployment.
createdAt String
The date and time of the creation of the deployment.
id String
The provider-assigned unique ID for this managed resource.
modelId String
The model id used for the deployment.
size Number
The size of the pool.
status String
The status of the deployment.
updatedAt String
The date and time of the last update of the deployment.

Look up Existing InferenceDeployment Resource

Get an existing InferenceDeployment resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: InferenceDeploymentState, opts?: CustomResourceOptions): InferenceDeployment
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        accept_eula: Optional[bool] = None,
        created_at: Optional[str] = None,
        max_size: Optional[int] = None,
        min_size: Optional[int] = None,
        model_id: Optional[str] = None,
        model_name: Optional[str] = None,
        name: Optional[str] = None,
        node_type: Optional[str] = None,
        private_endpoint: Optional[InferenceDeploymentPrivateEndpointArgs] = None,
        project_id: Optional[str] = None,
        public_endpoint: Optional[InferenceDeploymentPublicEndpointArgs] = None,
        region: Optional[str] = None,
        size: Optional[int] = None,
        status: Optional[str] = None,
        tags: Optional[Sequence[str]] = None,
        updated_at: Optional[str] = None) -> InferenceDeployment
func GetInferenceDeployment(ctx *Context, name string, id IDInput, state *InferenceDeploymentState, opts ...ResourceOption) (*InferenceDeployment, error)
public static InferenceDeployment Get(string name, Input<string> id, InferenceDeploymentState? state, CustomResourceOptions? opts = null)
public static InferenceDeployment get(String name, Output<String> id, InferenceDeploymentState state, CustomResourceOptions options)
resources:  _:    type: scaleway:InferenceDeployment    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
AcceptEula bool
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
CreatedAt string
The date and time of the creation of the deployment.
MaxSize int
The maximum size of the pool.
MinSize int
The minimum size of the pool.
ModelId string
The model id used for the deployment.
ModelName string
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
Name string
The deployment name.
NodeType string
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
PrivateEndpoint Pulumiverse.Scaleway.Inputs.InferenceDeploymentPrivateEndpoint
Configuration of the deployment's private endpoint.
ProjectId Changes to this property will trigger replacement. string
project_id) The ID of the project the deployment is associated with.
PublicEndpoint Pulumiverse.Scaleway.Inputs.InferenceDeploymentPublicEndpoint
Configuration of the deployment's public endpoint.
Region Changes to this property will trigger replacement. string
region) The region in which the deployment is created.
Size int
The size of the pool.
Status string
The status of the deployment.
Tags List<string>
The tags associated with the deployment.
UpdatedAt string
The date and time of the last update of the deployment.
AcceptEula bool
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
CreatedAt string
The date and time of the creation of the deployment.
MaxSize int
The maximum size of the pool.
MinSize int
The minimum size of the pool.
ModelId string
The model id used for the deployment.
ModelName string
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
Name string
The deployment name.
NodeType string
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
PrivateEndpoint InferenceDeploymentPrivateEndpointArgs
Configuration of the deployment's private endpoint.
ProjectId Changes to this property will trigger replacement. string
project_id) The ID of the project the deployment is associated with.
PublicEndpoint InferenceDeploymentPublicEndpointArgs
Configuration of the deployment's public endpoint.
Region Changes to this property will trigger replacement. string
region) The region in which the deployment is created.
Size int
The size of the pool.
Status string
The status of the deployment.
Tags []string
The tags associated with the deployment.
UpdatedAt string
The date and time of the last update of the deployment.
acceptEula Boolean
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
createdAt String
The date and time of the creation of the deployment.
maxSize Integer
The maximum size of the pool.
minSize Integer
The minimum size of the pool.
modelId String
The model id used for the deployment.
modelName String
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
name String
The deployment name.
nodeType String
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
privateEndpoint InferenceDeploymentPrivateEndpoint
Configuration of the deployment's private endpoint.
projectId Changes to this property will trigger replacement. String
project_id) The ID of the project the deployment is associated with.
publicEndpoint InferenceDeploymentPublicEndpoint
Configuration of the deployment's public endpoint.
region Changes to this property will trigger replacement. String
region) The region in which the deployment is created.
size Integer
The size of the pool.
status String
The status of the deployment.
tags List<String>
The tags associated with the deployment.
updatedAt String
The date and time of the last update of the deployment.
acceptEula boolean
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
createdAt string
The date and time of the creation of the deployment.
maxSize number
The maximum size of the pool.
minSize number
The minimum size of the pool.
modelId string
The model id used for the deployment.
modelName string
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
name string
The deployment name.
nodeType string
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
privateEndpoint InferenceDeploymentPrivateEndpoint
Configuration of the deployment's private endpoint.
projectId Changes to this property will trigger replacement. string
project_id) The ID of the project the deployment is associated with.
publicEndpoint InferenceDeploymentPublicEndpoint
Configuration of the deployment's public endpoint.
region Changes to this property will trigger replacement. string
region) The region in which the deployment is created.
size number
The size of the pool.
status string
The status of the deployment.
tags string[]
The tags associated with the deployment.
updatedAt string
The date and time of the last update of the deployment.
accept_eula bool
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
created_at str
The date and time of the creation of the deployment.
max_size int
The maximum size of the pool.
min_size int
The minimum size of the pool.
model_id str
The model id used for the deployment.
model_name str
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
name str
The deployment name.
node_type str
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
private_endpoint InferenceDeploymentPrivateEndpointArgs
Configuration of the deployment's private endpoint.
project_id Changes to this property will trigger replacement. str
project_id) The ID of the project the deployment is associated with.
public_endpoint InferenceDeploymentPublicEndpointArgs
Configuration of the deployment's public endpoint.
region Changes to this property will trigger replacement. str
region) The region in which the deployment is created.
size int
The size of the pool.
status str
The status of the deployment.
tags Sequence[str]
The tags associated with the deployment.
updated_at str
The date and time of the last update of the deployment.
acceptEula Boolean
Some models (e.g Meta Llama) require end-user license agreements. Set true to accept.
createdAt String
The date and time of the creation of the deployment.
maxSize Number
The maximum size of the pool.
minSize Number
The minimum size of the pool.
modelId String
The model id used for the deployment.
modelName String
The model name to use for the deployment. Model names can be found in Console or using Scaleway's CLI (scw inference model list)
name String
The deployment name.
nodeType String
The node type to use for the deployment. Node types can be found using Scaleway's CLI (scw inference node-type list)
privateEndpoint Property Map
Configuration of the deployment's private endpoint.
projectId Changes to this property will trigger replacement. String
project_id) The ID of the project the deployment is associated with.
publicEndpoint Property Map
Configuration of the deployment's public endpoint.
region Changes to this property will trigger replacement. String
region) The region in which the deployment is created.
size Number
The size of the pool.
status String
The status of the deployment.
tags List<String>
The tags associated with the deployment.
updatedAt String
The date and time of the last update of the deployment.

Supporting Types

InferenceDeploymentPrivateEndpoint
, InferenceDeploymentPrivateEndpointArgs

DisableAuth bool
Disable the authentication on the endpoint.
Id string
(Optional) The id of the public endpoint.
PrivateNetworkId string
The ID of the private network to use.
Url string
(Optional) The URL of the endpoint.
DisableAuth bool
Disable the authentication on the endpoint.
Id string
(Optional) The id of the public endpoint.
PrivateNetworkId string
The ID of the private network to use.
Url string
(Optional) The URL of the endpoint.
disableAuth Boolean
Disable the authentication on the endpoint.
id String
(Optional) The id of the public endpoint.
privateNetworkId String
The ID of the private network to use.
url String
(Optional) The URL of the endpoint.
disableAuth boolean
Disable the authentication on the endpoint.
id string
(Optional) The id of the public endpoint.
privateNetworkId string
The ID of the private network to use.
url string
(Optional) The URL of the endpoint.
disable_auth bool
Disable the authentication on the endpoint.
id str
(Optional) The id of the public endpoint.
private_network_id str
The ID of the private network to use.
url str
(Optional) The URL of the endpoint.
disableAuth Boolean
Disable the authentication on the endpoint.
id String
(Optional) The id of the public endpoint.
privateNetworkId String
The ID of the private network to use.
url String
(Optional) The URL of the endpoint.

InferenceDeploymentPublicEndpoint
, InferenceDeploymentPublicEndpointArgs

DisableAuth bool
Disable the authentication on the endpoint.
Id string
(Optional) The id of the public endpoint.
IsEnabled bool
Enable or disable public endpoint.
Url string
(Optional) The URL of the endpoint.
DisableAuth bool
Disable the authentication on the endpoint.
Id string
(Optional) The id of the public endpoint.
IsEnabled bool
Enable or disable public endpoint.
Url string
(Optional) The URL of the endpoint.
disableAuth Boolean
Disable the authentication on the endpoint.
id String
(Optional) The id of the public endpoint.
isEnabled Boolean
Enable or disable public endpoint.
url String
(Optional) The URL of the endpoint.
disableAuth boolean
Disable the authentication on the endpoint.
id string
(Optional) The id of the public endpoint.
isEnabled boolean
Enable or disable public endpoint.
url string
(Optional) The URL of the endpoint.
disable_auth bool
Disable the authentication on the endpoint.
id str
(Optional) The id of the public endpoint.
is_enabled bool
Enable or disable public endpoint.
url str
(Optional) The URL of the endpoint.
disableAuth Boolean
Disable the authentication on the endpoint.
id String
(Optional) The id of the public endpoint.
isEnabled Boolean
Enable or disable public endpoint.
url String
(Optional) The URL of the endpoint.

Import

Functions can be imported using, {region}/{id}, as shown below:

bash

$ pulumi import scaleway:index/inferenceDeployment:InferenceDeployment deployment fr-par/11111111-1111-1111-1111-111111111111
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
scaleway pulumiverse/pulumi-scaleway
License
Apache-2.0
Notes
This Pulumi package is based on the scaleway Terraform Provider.