1. Packages
  2. Konnect Provider
  3. API Docs
  4. GatewayPluginAiRequestTransformer
konnect 2.4.1 published on Thursday, Mar 13, 2025 by kong

konnect.GatewayPluginAiRequestTransformer

Explore with Pulumi AI

GatewayPluginAiRequestTransformer Resource

Example Usage

Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.konnect.GatewayPluginAiRequestTransformer;
import com.pulumi.konnect.GatewayPluginAiRequestTransformerArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerConfigArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerConfigLlmArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerConfigLlmAuthArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerConfigLlmLoggingArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerConfigLlmModelArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerConfigLlmModelOptionsArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrockArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerConfigLlmModelOptionsGeminiArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingfaceArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerConsumerGroupArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerOrderingArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerOrderingAfterArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerOrderingBeforeArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerRouteArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiRequestTransformerServiceArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var myGatewaypluginairequesttransformer = new GatewayPluginAiRequestTransformer("myGatewaypluginairequesttransformer", GatewayPluginAiRequestTransformerArgs.builder()
            .config(GatewayPluginAiRequestTransformerConfigArgs.builder()
                .http_proxy_host("...my_http_proxy_host...")
                .http_proxy_port(19860)
                .http_timeout(10)
                .https_proxy_host("...my_https_proxy_host...")
                .https_proxy_port(20590)
                .https_verify(false)
                .llm(GatewayPluginAiRequestTransformerConfigLlmArgs.builder()
                    .auth(GatewayPluginAiRequestTransformerConfigLlmAuthArgs.builder()
                        .allowOverride(false)
                        .awsAccessKeyId("...my_aws_access_key_id...")
                        .awsSecretAccessKey("...my_aws_secret_access_key...")
                        .azureClientId("...my_azure_client_id...")
                        .azureClientSecret("...my_azure_client_secret...")
                        .azureTenantId("...my_azure_tenant_id...")
                        .azureUseManagedIdentity(true)
                        .gcpServiceAccountJson("...my_gcp_service_account_json...")
                        .gcpUseServiceAccount(true)
                        .headerName("...my_header_name...")
                        .headerValue("...my_header_value...")
                        .paramLocation("query")
                        .paramName("...my_param_name...")
                        .paramValue("...my_param_value...")
                        .build())
                    .logging(GatewayPluginAiRequestTransformerConfigLlmLoggingArgs.builder()
                        .logPayloads(false)
                        .logStatistics(false)
                        .build())
                    .model(GatewayPluginAiRequestTransformerConfigLlmModelArgs.builder()
                        .name("...my_name...")
                        .options(GatewayPluginAiRequestTransformerConfigLlmModelOptionsArgs.builder()
                            .anthropicVersion("...my_anthropic_version...")
                            .azureApiVersion("...my_azure_api_version...")
                            .azureDeploymentId("...my_azure_deployment_id...")
                            .azureInstance("...my_azure_instance...")
                            .bedrock(GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrockArgs.builder()
                                .awsRegion("...my_aws_region...")
                                .build())
                            .gemini(GatewayPluginAiRequestTransformerConfigLlmModelOptionsGeminiArgs.builder()
                                .apiEndpoint("...my_api_endpoint...")
                                .locationId("...my_location_id...")
                                .projectId("...my_project_id...")
                                .build())
                            .huggingface(GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingfaceArgs.builder()
                                .useCache(false)
                                .waitForModel(true)
                                .build())
                            .inputCost(6.37)
                            .llama2Format("ollama")
                            .maxTokens(5)
                            .mistralFormat("ollama")
                            .outputCost(8.25)
                            .temperature(0.7)
                            .topK(420)
                            .topP(0.54)
                            .upstreamPath("...my_upstream_path...")
                            .upstreamUrl("...my_upstream_url...")
                            .build())
                        .provider("mistral")
                        .build())
                    .routeType("preserve")
                    .build())
                .max_request_body_size(7)
                .prompt("...my_prompt...")
                .transformation_extract_pattern("...my_transformation_extract_pattern...")
                .build())
            .consumerGroup(GatewayPluginAiRequestTransformerConsumerGroupArgs.builder()
                .id("...my_id...")
                .build())
            .controlPlaneId("9524ec7d-36d9-465d-a8c5-83a3c9390458")
            .enabled(true)
            .gatewayPluginAiRequestTransformerId("...my_id...")
            .instanceName("...my_instance_name...")
            .ordering(GatewayPluginAiRequestTransformerOrderingArgs.builder()
                .after(GatewayPluginAiRequestTransformerOrderingAfterArgs.builder()
                    .access("...")
                    .build())
                .before(GatewayPluginAiRequestTransformerOrderingBeforeArgs.builder()
                    .access("...")
                    .build())
                .build())
            .protocols("http")
            .route(GatewayPluginAiRequestTransformerRouteArgs.builder()
                .id("...my_id...")
                .build())
            .service(GatewayPluginAiRequestTransformerServiceArgs.builder()
                .id("...my_id...")
                .build())
            .tags("...")
            .build());

    }
}
Copy
resources:
  myGatewaypluginairequesttransformer:
    type: konnect:GatewayPluginAiRequestTransformer
    properties:
      config:
        http_proxy_host: '...my_http_proxy_host...'
        http_proxy_port: 19860
        http_timeout: 10
        https_proxy_host: '...my_https_proxy_host...'
        https_proxy_port: 20590
        https_verify: false
        llm:
          auth:
            allowOverride: false
            awsAccessKeyId: '...my_aws_access_key_id...'
            awsSecretAccessKey: '...my_aws_secret_access_key...'
            azureClientId: '...my_azure_client_id...'
            azureClientSecret: '...my_azure_client_secret...'
            azureTenantId: '...my_azure_tenant_id...'
            azureUseManagedIdentity: true
            gcpServiceAccountJson: '...my_gcp_service_account_json...'
            gcpUseServiceAccount: true
            headerName: '...my_header_name...'
            headerValue: '...my_header_value...'
            paramLocation: query
            paramName: '...my_param_name...'
            paramValue: '...my_param_value...'
          logging:
            logPayloads: false
            logStatistics: false
          model:
            name: '...my_name...'
            options:
              anthropicVersion: '...my_anthropic_version...'
              azureApiVersion: '...my_azure_api_version...'
              azureDeploymentId: '...my_azure_deployment_id...'
              azureInstance: '...my_azure_instance...'
              bedrock:
                awsRegion: '...my_aws_region...'
              gemini:
                apiEndpoint: '...my_api_endpoint...'
                locationId: '...my_location_id...'
                projectId: '...my_project_id...'
              huggingface:
                useCache: false
                waitForModel: true
              inputCost: 6.37
              llama2Format: ollama
              maxTokens: 5
              mistralFormat: ollama
              outputCost: 8.25
              temperature: 0.7
              topK: 420
              topP: 0.54
              upstreamPath: '...my_upstream_path...'
              upstreamUrl: '...my_upstream_url...'
            provider: mistral
          routeType: preserve
        max_request_body_size: 7
        prompt: '...my_prompt...'
        transformation_extract_pattern: '...my_transformation_extract_pattern...'
      consumerGroup:
        id: '...my_id...'
      controlPlaneId: 9524ec7d-36d9-465d-a8c5-83a3c9390458
      enabled: true
      gatewayPluginAiRequestTransformerId: '...my_id...'
      instanceName: '...my_instance_name...'
      ordering:
        after:
          access:
            - '...'
        before:
          access:
            - '...'
      protocols:
        - http
      route:
        id: '...my_id...'
      service:
        id: '...my_id...'
      tags:
        - '...'
Copy

Create GatewayPluginAiRequestTransformer Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new GatewayPluginAiRequestTransformer(name: string, args: GatewayPluginAiRequestTransformerArgs, opts?: CustomResourceOptions);
@overload
def GatewayPluginAiRequestTransformer(resource_name: str,
                                      args: GatewayPluginAiRequestTransformerArgs,
                                      opts: Optional[ResourceOptions] = None)

@overload
def GatewayPluginAiRequestTransformer(resource_name: str,
                                      opts: Optional[ResourceOptions] = None,
                                      config: Optional[GatewayPluginAiRequestTransformerConfigArgs] = None,
                                      control_plane_id: Optional[str] = None,
                                      consumer_group: Optional[GatewayPluginAiRequestTransformerConsumerGroupArgs] = None,
                                      enabled: Optional[bool] = None,
                                      gateway_plugin_ai_request_transformer_id: Optional[str] = None,
                                      instance_name: Optional[str] = None,
                                      ordering: Optional[GatewayPluginAiRequestTransformerOrderingArgs] = None,
                                      protocols: Optional[Sequence[str]] = None,
                                      route: Optional[GatewayPluginAiRequestTransformerRouteArgs] = None,
                                      service: Optional[GatewayPluginAiRequestTransformerServiceArgs] = None,
                                      tags: Optional[Sequence[str]] = None)
func NewGatewayPluginAiRequestTransformer(ctx *Context, name string, args GatewayPluginAiRequestTransformerArgs, opts ...ResourceOption) (*GatewayPluginAiRequestTransformer, error)
public GatewayPluginAiRequestTransformer(string name, GatewayPluginAiRequestTransformerArgs args, CustomResourceOptions? opts = null)
public GatewayPluginAiRequestTransformer(String name, GatewayPluginAiRequestTransformerArgs args)
public GatewayPluginAiRequestTransformer(String name, GatewayPluginAiRequestTransformerArgs args, CustomResourceOptions options)
type: konnect:GatewayPluginAiRequestTransformer
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. GatewayPluginAiRequestTransformerArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. GatewayPluginAiRequestTransformerArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. GatewayPluginAiRequestTransformerArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. GatewayPluginAiRequestTransformerArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. GatewayPluginAiRequestTransformerArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var gatewayPluginAiRequestTransformerResource = new Konnect.GatewayPluginAiRequestTransformer("gatewayPluginAiRequestTransformerResource", new()
{
    Config = new Konnect.Inputs.GatewayPluginAiRequestTransformerConfigArgs
    {
        HttpProxyHost = "string",
        HttpProxyPort = 0,
        HttpTimeout = 0,
        HttpsProxyHost = "string",
        HttpsProxyPort = 0,
        HttpsVerify = false,
        Llm = new Konnect.Inputs.GatewayPluginAiRequestTransformerConfigLlmArgs
        {
            Auth = new Konnect.Inputs.GatewayPluginAiRequestTransformerConfigLlmAuthArgs
            {
                AllowOverride = false,
                AwsAccessKeyId = "string",
                AwsSecretAccessKey = "string",
                AzureClientId = "string",
                AzureClientSecret = "string",
                AzureTenantId = "string",
                AzureUseManagedIdentity = false,
                GcpServiceAccountJson = "string",
                GcpUseServiceAccount = false,
                HeaderName = "string",
                HeaderValue = "string",
                ParamLocation = "string",
                ParamName = "string",
                ParamValue = "string",
            },
            Logging = new Konnect.Inputs.GatewayPluginAiRequestTransformerConfigLlmLoggingArgs
            {
                LogPayloads = false,
                LogStatistics = false,
            },
            Model = new Konnect.Inputs.GatewayPluginAiRequestTransformerConfigLlmModelArgs
            {
                Name = "string",
                Options = new Konnect.Inputs.GatewayPluginAiRequestTransformerConfigLlmModelOptionsArgs
                {
                    AnthropicVersion = "string",
                    AzureApiVersion = "string",
                    AzureDeploymentId = "string",
                    AzureInstance = "string",
                    Bedrock = new Konnect.Inputs.GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrockArgs
                    {
                        AwsRegion = "string",
                    },
                    Gemini = new Konnect.Inputs.GatewayPluginAiRequestTransformerConfigLlmModelOptionsGeminiArgs
                    {
                        ApiEndpoint = "string",
                        LocationId = "string",
                        ProjectId = "string",
                    },
                    Huggingface = new Konnect.Inputs.GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingfaceArgs
                    {
                        UseCache = false,
                        WaitForModel = false,
                    },
                    InputCost = 0,
                    Llama2Format = "string",
                    MaxTokens = 0,
                    MistralFormat = "string",
                    OutputCost = 0,
                    Temperature = 0,
                    TopK = 0,
                    TopP = 0,
                    UpstreamPath = "string",
                    UpstreamUrl = "string",
                },
                Provider = "string",
            },
            RouteType = "string",
        },
        MaxRequestBodySize = 0,
        Prompt = "string",
        TransformationExtractPattern = "string",
    },
    ControlPlaneId = "string",
    ConsumerGroup = new Konnect.Inputs.GatewayPluginAiRequestTransformerConsumerGroupArgs
    {
        Id = "string",
    },
    Enabled = false,
    GatewayPluginAiRequestTransformerId = "string",
    InstanceName = "string",
    Ordering = new Konnect.Inputs.GatewayPluginAiRequestTransformerOrderingArgs
    {
        After = new Konnect.Inputs.GatewayPluginAiRequestTransformerOrderingAfterArgs
        {
            Accesses = new[]
            {
                "string",
            },
        },
        Before = new Konnect.Inputs.GatewayPluginAiRequestTransformerOrderingBeforeArgs
        {
            Accesses = new[]
            {
                "string",
            },
        },
    },
    Protocols = new[]
    {
        "string",
    },
    Route = new Konnect.Inputs.GatewayPluginAiRequestTransformerRouteArgs
    {
        Id = "string",
    },
    Service = new Konnect.Inputs.GatewayPluginAiRequestTransformerServiceArgs
    {
        Id = "string",
    },
    Tags = new[]
    {
        "string",
    },
});
Copy
example, err := konnect.NewGatewayPluginAiRequestTransformer(ctx, "gatewayPluginAiRequestTransformerResource", &konnect.GatewayPluginAiRequestTransformerArgs{
Config: &.GatewayPluginAiRequestTransformerConfigArgs{
HttpProxyHost: pulumi.String("string"),
HttpProxyPort: pulumi.Float64(0),
HttpTimeout: pulumi.Float64(0),
HttpsProxyHost: pulumi.String("string"),
HttpsProxyPort: pulumi.Float64(0),
HttpsVerify: pulumi.Bool(false),
Llm: &.GatewayPluginAiRequestTransformerConfigLlmArgs{
Auth: &.GatewayPluginAiRequestTransformerConfigLlmAuthArgs{
AllowOverride: pulumi.Bool(false),
AwsAccessKeyId: pulumi.String("string"),
AwsSecretAccessKey: pulumi.String("string"),
AzureClientId: pulumi.String("string"),
AzureClientSecret: pulumi.String("string"),
AzureTenantId: pulumi.String("string"),
AzureUseManagedIdentity: pulumi.Bool(false),
GcpServiceAccountJson: pulumi.String("string"),
GcpUseServiceAccount: pulumi.Bool(false),
HeaderName: pulumi.String("string"),
HeaderValue: pulumi.String("string"),
ParamLocation: pulumi.String("string"),
ParamName: pulumi.String("string"),
ParamValue: pulumi.String("string"),
},
Logging: &.GatewayPluginAiRequestTransformerConfigLlmLoggingArgs{
LogPayloads: pulumi.Bool(false),
LogStatistics: pulumi.Bool(false),
},
Model: &.GatewayPluginAiRequestTransformerConfigLlmModelArgs{
Name: pulumi.String("string"),
Options: &.GatewayPluginAiRequestTransformerConfigLlmModelOptionsArgs{
AnthropicVersion: pulumi.String("string"),
AzureApiVersion: pulumi.String("string"),
AzureDeploymentId: pulumi.String("string"),
AzureInstance: pulumi.String("string"),
Bedrock: &.GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrockArgs{
AwsRegion: pulumi.String("string"),
},
Gemini: &.GatewayPluginAiRequestTransformerConfigLlmModelOptionsGeminiArgs{
ApiEndpoint: pulumi.String("string"),
LocationId: pulumi.String("string"),
ProjectId: pulumi.String("string"),
},
Huggingface: &.GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingfaceArgs{
UseCache: pulumi.Bool(false),
WaitForModel: pulumi.Bool(false),
},
InputCost: pulumi.Float64(0),
Llama2Format: pulumi.String("string"),
MaxTokens: pulumi.Float64(0),
MistralFormat: pulumi.String("string"),
OutputCost: pulumi.Float64(0),
Temperature: pulumi.Float64(0),
TopK: pulumi.Float64(0),
TopP: pulumi.Float64(0),
UpstreamPath: pulumi.String("string"),
UpstreamUrl: pulumi.String("string"),
},
Provider: pulumi.String("string"),
},
RouteType: pulumi.String("string"),
},
MaxRequestBodySize: pulumi.Float64(0),
Prompt: pulumi.String("string"),
TransformationExtractPattern: pulumi.String("string"),
},
ControlPlaneId: pulumi.String("string"),
ConsumerGroup: &.GatewayPluginAiRequestTransformerConsumerGroupArgs{
Id: pulumi.String("string"),
},
Enabled: pulumi.Bool(false),
GatewayPluginAiRequestTransformerId: pulumi.String("string"),
InstanceName: pulumi.String("string"),
Ordering: &.GatewayPluginAiRequestTransformerOrderingArgs{
After: &.GatewayPluginAiRequestTransformerOrderingAfterArgs{
Accesses: pulumi.StringArray{
pulumi.String("string"),
},
},
Before: &.GatewayPluginAiRequestTransformerOrderingBeforeArgs{
Accesses: pulumi.StringArray{
pulumi.String("string"),
},
},
},
Protocols: pulumi.StringArray{
pulumi.String("string"),
},
Route: &.GatewayPluginAiRequestTransformerRouteArgs{
Id: pulumi.String("string"),
},
Service: &.GatewayPluginAiRequestTransformerServiceArgs{
Id: pulumi.String("string"),
},
Tags: pulumi.StringArray{
pulumi.String("string"),
},
})
Copy
var gatewayPluginAiRequestTransformerResource = new GatewayPluginAiRequestTransformer("gatewayPluginAiRequestTransformerResource", GatewayPluginAiRequestTransformerArgs.builder()
    .config(GatewayPluginAiRequestTransformerConfigArgs.builder()
        .httpProxyHost("string")
        .httpProxyPort(0)
        .httpTimeout(0)
        .httpsProxyHost("string")
        .httpsProxyPort(0)
        .httpsVerify(false)
        .llm(GatewayPluginAiRequestTransformerConfigLlmArgs.builder()
            .auth(GatewayPluginAiRequestTransformerConfigLlmAuthArgs.builder()
                .allowOverride(false)
                .awsAccessKeyId("string")
                .awsSecretAccessKey("string")
                .azureClientId("string")
                .azureClientSecret("string")
                .azureTenantId("string")
                .azureUseManagedIdentity(false)
                .gcpServiceAccountJson("string")
                .gcpUseServiceAccount(false)
                .headerName("string")
                .headerValue("string")
                .paramLocation("string")
                .paramName("string")
                .paramValue("string")
                .build())
            .logging(GatewayPluginAiRequestTransformerConfigLlmLoggingArgs.builder()
                .logPayloads(false)
                .logStatistics(false)
                .build())
            .model(GatewayPluginAiRequestTransformerConfigLlmModelArgs.builder()
                .name("string")
                .options(GatewayPluginAiRequestTransformerConfigLlmModelOptionsArgs.builder()
                    .anthropicVersion("string")
                    .azureApiVersion("string")
                    .azureDeploymentId("string")
                    .azureInstance("string")
                    .bedrock(GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrockArgs.builder()
                        .awsRegion("string")
                        .build())
                    .gemini(GatewayPluginAiRequestTransformerConfigLlmModelOptionsGeminiArgs.builder()
                        .apiEndpoint("string")
                        .locationId("string")
                        .projectId("string")
                        .build())
                    .huggingface(GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingfaceArgs.builder()
                        .useCache(false)
                        .waitForModel(false)
                        .build())
                    .inputCost(0)
                    .llama2Format("string")
                    .maxTokens(0)
                    .mistralFormat("string")
                    .outputCost(0)
                    .temperature(0)
                    .topK(0)
                    .topP(0)
                    .upstreamPath("string")
                    .upstreamUrl("string")
                    .build())
                .provider("string")
                .build())
            .routeType("string")
            .build())
        .maxRequestBodySize(0)
        .prompt("string")
        .transformationExtractPattern("string")
        .build())
    .controlPlaneId("string")
    .consumerGroup(GatewayPluginAiRequestTransformerConsumerGroupArgs.builder()
        .id("string")
        .build())
    .enabled(false)
    .gatewayPluginAiRequestTransformerId("string")
    .instanceName("string")
    .ordering(GatewayPluginAiRequestTransformerOrderingArgs.builder()
        .after(GatewayPluginAiRequestTransformerOrderingAfterArgs.builder()
            .accesses("string")
            .build())
        .before(GatewayPluginAiRequestTransformerOrderingBeforeArgs.builder()
            .accesses("string")
            .build())
        .build())
    .protocols("string")
    .route(GatewayPluginAiRequestTransformerRouteArgs.builder()
        .id("string")
        .build())
    .service(GatewayPluginAiRequestTransformerServiceArgs.builder()
        .id("string")
        .build())
    .tags("string")
    .build());
Copy
gateway_plugin_ai_request_transformer_resource = konnect.GatewayPluginAiRequestTransformer("gatewayPluginAiRequestTransformerResource",
    config={
        "http_proxy_host": "string",
        "http_proxy_port": 0,
        "http_timeout": 0,
        "https_proxy_host": "string",
        "https_proxy_port": 0,
        "https_verify": False,
        "llm": {
            "auth": {
                "allow_override": False,
                "aws_access_key_id": "string",
                "aws_secret_access_key": "string",
                "azure_client_id": "string",
                "azure_client_secret": "string",
                "azure_tenant_id": "string",
                "azure_use_managed_identity": False,
                "gcp_service_account_json": "string",
                "gcp_use_service_account": False,
                "header_name": "string",
                "header_value": "string",
                "param_location": "string",
                "param_name": "string",
                "param_value": "string",
            },
            "logging": {
                "log_payloads": False,
                "log_statistics": False,
            },
            "model": {
                "name": "string",
                "options": {
                    "anthropic_version": "string",
                    "azure_api_version": "string",
                    "azure_deployment_id": "string",
                    "azure_instance": "string",
                    "bedrock": {
                        "aws_region": "string",
                    },
                    "gemini": {
                        "api_endpoint": "string",
                        "location_id": "string",
                        "project_id": "string",
                    },
                    "huggingface": {
                        "use_cache": False,
                        "wait_for_model": False,
                    },
                    "input_cost": 0,
                    "llama2_format": "string",
                    "max_tokens": 0,
                    "mistral_format": "string",
                    "output_cost": 0,
                    "temperature": 0,
                    "top_k": 0,
                    "top_p": 0,
                    "upstream_path": "string",
                    "upstream_url": "string",
                },
                "provider": "string",
            },
            "route_type": "string",
        },
        "max_request_body_size": 0,
        "prompt": "string",
        "transformation_extract_pattern": "string",
    },
    control_plane_id="string",
    consumer_group={
        "id": "string",
    },
    enabled=False,
    gateway_plugin_ai_request_transformer_id="string",
    instance_name="string",
    ordering={
        "after": {
            "accesses": ["string"],
        },
        "before": {
            "accesses": ["string"],
        },
    },
    protocols=["string"],
    route={
        "id": "string",
    },
    service={
        "id": "string",
    },
    tags=["string"])
Copy
const gatewayPluginAiRequestTransformerResource = new konnect.GatewayPluginAiRequestTransformer("gatewayPluginAiRequestTransformerResource", {
    config: {
        httpProxyHost: "string",
        httpProxyPort: 0,
        httpTimeout: 0,
        httpsProxyHost: "string",
        httpsProxyPort: 0,
        httpsVerify: false,
        llm: {
            auth: {
                allowOverride: false,
                awsAccessKeyId: "string",
                awsSecretAccessKey: "string",
                azureClientId: "string",
                azureClientSecret: "string",
                azureTenantId: "string",
                azureUseManagedIdentity: false,
                gcpServiceAccountJson: "string",
                gcpUseServiceAccount: false,
                headerName: "string",
                headerValue: "string",
                paramLocation: "string",
                paramName: "string",
                paramValue: "string",
            },
            logging: {
                logPayloads: false,
                logStatistics: false,
            },
            model: {
                name: "string",
                options: {
                    anthropicVersion: "string",
                    azureApiVersion: "string",
                    azureDeploymentId: "string",
                    azureInstance: "string",
                    bedrock: {
                        awsRegion: "string",
                    },
                    gemini: {
                        apiEndpoint: "string",
                        locationId: "string",
                        projectId: "string",
                    },
                    huggingface: {
                        useCache: false,
                        waitForModel: false,
                    },
                    inputCost: 0,
                    llama2Format: "string",
                    maxTokens: 0,
                    mistralFormat: "string",
                    outputCost: 0,
                    temperature: 0,
                    topK: 0,
                    topP: 0,
                    upstreamPath: "string",
                    upstreamUrl: "string",
                },
                provider: "string",
            },
            routeType: "string",
        },
        maxRequestBodySize: 0,
        prompt: "string",
        transformationExtractPattern: "string",
    },
    controlPlaneId: "string",
    consumerGroup: {
        id: "string",
    },
    enabled: false,
    gatewayPluginAiRequestTransformerId: "string",
    instanceName: "string",
    ordering: {
        after: {
            accesses: ["string"],
        },
        before: {
            accesses: ["string"],
        },
    },
    protocols: ["string"],
    route: {
        id: "string",
    },
    service: {
        id: "string",
    },
    tags: ["string"],
});
Copy
type: konnect:GatewayPluginAiRequestTransformer
properties:
    config:
        httpProxyHost: string
        httpProxyPort: 0
        httpTimeout: 0
        httpsProxyHost: string
        httpsProxyPort: 0
        httpsVerify: false
        llm:
            auth:
                allowOverride: false
                awsAccessKeyId: string
                awsSecretAccessKey: string
                azureClientId: string
                azureClientSecret: string
                azureTenantId: string
                azureUseManagedIdentity: false
                gcpServiceAccountJson: string
                gcpUseServiceAccount: false
                headerName: string
                headerValue: string
                paramLocation: string
                paramName: string
                paramValue: string
            logging:
                logPayloads: false
                logStatistics: false
            model:
                name: string
                options:
                    anthropicVersion: string
                    azureApiVersion: string
                    azureDeploymentId: string
                    azureInstance: string
                    bedrock:
                        awsRegion: string
                    gemini:
                        apiEndpoint: string
                        locationId: string
                        projectId: string
                    huggingface:
                        useCache: false
                        waitForModel: false
                    inputCost: 0
                    llama2Format: string
                    maxTokens: 0
                    mistralFormat: string
                    outputCost: 0
                    temperature: 0
                    topK: 0
                    topP: 0
                    upstreamPath: string
                    upstreamUrl: string
                provider: string
            routeType: string
        maxRequestBodySize: 0
        prompt: string
        transformationExtractPattern: string
    consumerGroup:
        id: string
    controlPlaneId: string
    enabled: false
    gatewayPluginAiRequestTransformerId: string
    instanceName: string
    ordering:
        after:
            accesses:
                - string
        before:
            accesses:
                - string
    protocols:
        - string
    route:
        id: string
    service:
        id: string
    tags:
        - string
Copy

GatewayPluginAiRequestTransformer Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The GatewayPluginAiRequestTransformer resource accepts the following input properties:

Config This property is required. GatewayPluginAiRequestTransformerConfig
ControlPlaneId This property is required. string
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
ConsumerGroup GatewayPluginAiRequestTransformerConsumerGroup
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
Enabled bool
Whether the plugin is applied.
GatewayPluginAiRequestTransformerId string
The ID of this resource.
InstanceName string
Ordering GatewayPluginAiRequestTransformerOrdering
Protocols List<string>
A set of strings representing HTTP protocols.
Route GatewayPluginAiRequestTransformerRoute
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
Service GatewayPluginAiRequestTransformerService
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
Tags List<string>
An optional set of strings associated with the Plugin for grouping and filtering.
Config This property is required. GatewayPluginAiRequestTransformerConfigArgs
ControlPlaneId This property is required. string
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
ConsumerGroup GatewayPluginAiRequestTransformerConsumerGroupArgs
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
Enabled bool
Whether the plugin is applied.
GatewayPluginAiRequestTransformerId string
The ID of this resource.
InstanceName string
Ordering GatewayPluginAiRequestTransformerOrderingArgs
Protocols []string
A set of strings representing HTTP protocols.
Route GatewayPluginAiRequestTransformerRouteArgs
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
Service GatewayPluginAiRequestTransformerServiceArgs
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
Tags []string
An optional set of strings associated with the Plugin for grouping and filtering.
config This property is required. GatewayPluginAiRequestTransformerConfig
controlPlaneId This property is required. String
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
consumerGroup GatewayPluginAiRequestTransformerConsumerGroup
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
enabled Boolean
Whether the plugin is applied.
gatewayPluginAiRequestTransformerId String
The ID of this resource.
instanceName String
ordering GatewayPluginAiRequestTransformerOrdering
protocols List<String>
A set of strings representing HTTP protocols.
route GatewayPluginAiRequestTransformerRoute
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
service GatewayPluginAiRequestTransformerService
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
tags List<String>
An optional set of strings associated with the Plugin for grouping and filtering.
config This property is required. GatewayPluginAiRequestTransformerConfig
controlPlaneId This property is required. string
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
consumerGroup GatewayPluginAiRequestTransformerConsumerGroup
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
enabled boolean
Whether the plugin is applied.
gatewayPluginAiRequestTransformerId string
The ID of this resource.
instanceName string
ordering GatewayPluginAiRequestTransformerOrdering
protocols string[]
A set of strings representing HTTP protocols.
route GatewayPluginAiRequestTransformerRoute
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
service GatewayPluginAiRequestTransformerService
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
tags string[]
An optional set of strings associated with the Plugin for grouping and filtering.
config This property is required. GatewayPluginAiRequestTransformerConfigArgs
control_plane_id This property is required. str
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
consumer_group GatewayPluginAiRequestTransformerConsumerGroupArgs
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
enabled bool
Whether the plugin is applied.
gateway_plugin_ai_request_transformer_id str
The ID of this resource.
instance_name str
ordering GatewayPluginAiRequestTransformerOrderingArgs
protocols Sequence[str]
A set of strings representing HTTP protocols.
route GatewayPluginAiRequestTransformerRouteArgs
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
service GatewayPluginAiRequestTransformerServiceArgs
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
tags Sequence[str]
An optional set of strings associated with the Plugin for grouping and filtering.
config This property is required. Property Map
controlPlaneId This property is required. String
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
consumerGroup Property Map
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
enabled Boolean
Whether the plugin is applied.
gatewayPluginAiRequestTransformerId String
The ID of this resource.
instanceName String
ordering Property Map
protocols List<String>
A set of strings representing HTTP protocols.
route Property Map
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
service Property Map
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
tags List<String>
An optional set of strings associated with the Plugin for grouping and filtering.

Outputs

All input properties are implicitly available as output properties. Additionally, the GatewayPluginAiRequestTransformer resource produces the following output properties:

CreatedAt double
Unix epoch when the resource was created.
Id string
The provider-assigned unique ID for this managed resource.
UpdatedAt double
Unix epoch when the resource was last updated.
CreatedAt float64
Unix epoch when the resource was created.
Id string
The provider-assigned unique ID for this managed resource.
UpdatedAt float64
Unix epoch when the resource was last updated.
createdAt Double
Unix epoch when the resource was created.
id String
The provider-assigned unique ID for this managed resource.
updatedAt Double
Unix epoch when the resource was last updated.
createdAt number
Unix epoch when the resource was created.
id string
The provider-assigned unique ID for this managed resource.
updatedAt number
Unix epoch when the resource was last updated.
created_at float
Unix epoch when the resource was created.
id str
The provider-assigned unique ID for this managed resource.
updated_at float
Unix epoch when the resource was last updated.
createdAt Number
Unix epoch when the resource was created.
id String
The provider-assigned unique ID for this managed resource.
updatedAt Number
Unix epoch when the resource was last updated.

Look up Existing GatewayPluginAiRequestTransformer Resource

Get an existing GatewayPluginAiRequestTransformer resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: GatewayPluginAiRequestTransformerState, opts?: CustomResourceOptions): GatewayPluginAiRequestTransformer
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        config: Optional[GatewayPluginAiRequestTransformerConfigArgs] = None,
        consumer_group: Optional[GatewayPluginAiRequestTransformerConsumerGroupArgs] = None,
        control_plane_id: Optional[str] = None,
        created_at: Optional[float] = None,
        enabled: Optional[bool] = None,
        gateway_plugin_ai_request_transformer_id: Optional[str] = None,
        instance_name: Optional[str] = None,
        ordering: Optional[GatewayPluginAiRequestTransformerOrderingArgs] = None,
        protocols: Optional[Sequence[str]] = None,
        route: Optional[GatewayPluginAiRequestTransformerRouteArgs] = None,
        service: Optional[GatewayPluginAiRequestTransformerServiceArgs] = None,
        tags: Optional[Sequence[str]] = None,
        updated_at: Optional[float] = None) -> GatewayPluginAiRequestTransformer
func GetGatewayPluginAiRequestTransformer(ctx *Context, name string, id IDInput, state *GatewayPluginAiRequestTransformerState, opts ...ResourceOption) (*GatewayPluginAiRequestTransformer, error)
public static GatewayPluginAiRequestTransformer Get(string name, Input<string> id, GatewayPluginAiRequestTransformerState? state, CustomResourceOptions? opts = null)
public static GatewayPluginAiRequestTransformer get(String name, Output<String> id, GatewayPluginAiRequestTransformerState state, CustomResourceOptions options)
resources:  _:    type: konnect:GatewayPluginAiRequestTransformer    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
Config GatewayPluginAiRequestTransformerConfig
ConsumerGroup GatewayPluginAiRequestTransformerConsumerGroup
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
ControlPlaneId string
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
CreatedAt double
Unix epoch when the resource was created.
Enabled bool
Whether the plugin is applied.
GatewayPluginAiRequestTransformerId string
The ID of this resource.
InstanceName string
Ordering GatewayPluginAiRequestTransformerOrdering
Protocols List<string>
A set of strings representing HTTP protocols.
Route GatewayPluginAiRequestTransformerRoute
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
Service GatewayPluginAiRequestTransformerService
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
Tags List<string>
An optional set of strings associated with the Plugin for grouping and filtering.
UpdatedAt double
Unix epoch when the resource was last updated.
Config GatewayPluginAiRequestTransformerConfigArgs
ConsumerGroup GatewayPluginAiRequestTransformerConsumerGroupArgs
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
ControlPlaneId string
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
CreatedAt float64
Unix epoch when the resource was created.
Enabled bool
Whether the plugin is applied.
GatewayPluginAiRequestTransformerId string
The ID of this resource.
InstanceName string
Ordering GatewayPluginAiRequestTransformerOrderingArgs
Protocols []string
A set of strings representing HTTP protocols.
Route GatewayPluginAiRequestTransformerRouteArgs
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
Service GatewayPluginAiRequestTransformerServiceArgs
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
Tags []string
An optional set of strings associated with the Plugin for grouping and filtering.
UpdatedAt float64
Unix epoch when the resource was last updated.
config GatewayPluginAiRequestTransformerConfig
consumerGroup GatewayPluginAiRequestTransformerConsumerGroup
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
controlPlaneId String
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
createdAt Double
Unix epoch when the resource was created.
enabled Boolean
Whether the plugin is applied.
gatewayPluginAiRequestTransformerId String
The ID of this resource.
instanceName String
ordering GatewayPluginAiRequestTransformerOrdering
protocols List<String>
A set of strings representing HTTP protocols.
route GatewayPluginAiRequestTransformerRoute
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
service GatewayPluginAiRequestTransformerService
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
tags List<String>
An optional set of strings associated with the Plugin for grouping and filtering.
updatedAt Double
Unix epoch when the resource was last updated.
config GatewayPluginAiRequestTransformerConfig
consumerGroup GatewayPluginAiRequestTransformerConsumerGroup
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
controlPlaneId string
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
createdAt number
Unix epoch when the resource was created.
enabled boolean
Whether the plugin is applied.
gatewayPluginAiRequestTransformerId string
The ID of this resource.
instanceName string
ordering GatewayPluginAiRequestTransformerOrdering
protocols string[]
A set of strings representing HTTP protocols.
route GatewayPluginAiRequestTransformerRoute
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
service GatewayPluginAiRequestTransformerService
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
tags string[]
An optional set of strings associated with the Plugin for grouping and filtering.
updatedAt number
Unix epoch when the resource was last updated.
config GatewayPluginAiRequestTransformerConfigArgs
consumer_group GatewayPluginAiRequestTransformerConsumerGroupArgs
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
control_plane_id str
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
created_at float
Unix epoch when the resource was created.
enabled bool
Whether the plugin is applied.
gateway_plugin_ai_request_transformer_id str
The ID of this resource.
instance_name str
ordering GatewayPluginAiRequestTransformerOrderingArgs
protocols Sequence[str]
A set of strings representing HTTP protocols.
route GatewayPluginAiRequestTransformerRouteArgs
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
service GatewayPluginAiRequestTransformerServiceArgs
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
tags Sequence[str]
An optional set of strings associated with the Plugin for grouping and filtering.
updated_at float
Unix epoch when the resource was last updated.
config Property Map
consumerGroup Property Map
If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
controlPlaneId String
The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
createdAt Number
Unix epoch when the resource was created.
enabled Boolean
Whether the plugin is applied.
gatewayPluginAiRequestTransformerId String
The ID of this resource.
instanceName String
ordering Property Map
protocols List<String>
A set of strings representing HTTP protocols.
route Property Map
If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
service Property Map
If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
tags List<String>
An optional set of strings associated with the Plugin for grouping and filtering.
updatedAt Number
Unix epoch when the resource was last updated.

Supporting Types

GatewayPluginAiRequestTransformerConfig
, GatewayPluginAiRequestTransformerConfigArgs

HttpProxyHost string
A string representing a host name, such as example.com.
HttpProxyPort double
An integer representing a port number between 0 and 65535, inclusive.
HttpTimeout double
Timeout in milliseconds for the AI upstream service.
HttpsProxyHost string
A string representing a host name, such as example.com.
HttpsProxyPort double
An integer representing a port number between 0 and 65535, inclusive.
HttpsVerify bool
Verify the TLS certificate of the AI upstream service.
Llm GatewayPluginAiRequestTransformerConfigLlm
MaxRequestBodySize double
max allowed body size allowed to be introspected
Prompt string
Use this prompt to tune the LLM system/assistant message for the incoming proxy request (from the client), and what you are expecting in return.
TransformationExtractPattern string
Defines the regular expression that must match to indicate a successful AI transformation at the request phase. The first match will be set as the outgoing body. If the AI service's response doesn't match this pattern, it is marked as a failure.
HttpProxyHost string
A string representing a host name, such as example.com.
HttpProxyPort float64
An integer representing a port number between 0 and 65535, inclusive.
HttpTimeout float64
Timeout in milliseconds for the AI upstream service.
HttpsProxyHost string
A string representing a host name, such as example.com.
HttpsProxyPort float64
An integer representing a port number between 0 and 65535, inclusive.
HttpsVerify bool
Verify the TLS certificate of the AI upstream service.
Llm GatewayPluginAiRequestTransformerConfigLlm
MaxRequestBodySize float64
max allowed body size allowed to be introspected
Prompt string
Use this prompt to tune the LLM system/assistant message for the incoming proxy request (from the client), and what you are expecting in return.
TransformationExtractPattern string
Defines the regular expression that must match to indicate a successful AI transformation at the request phase. The first match will be set as the outgoing body. If the AI service's response doesn't match this pattern, it is marked as a failure.
httpProxyHost String
A string representing a host name, such as example.com.
httpProxyPort Double
An integer representing a port number between 0 and 65535, inclusive.
httpTimeout Double
Timeout in milliseconds for the AI upstream service.
httpsProxyHost String
A string representing a host name, such as example.com.
httpsProxyPort Double
An integer representing a port number between 0 and 65535, inclusive.
httpsVerify Boolean
Verify the TLS certificate of the AI upstream service.
llm GatewayPluginAiRequestTransformerConfigLlm
maxRequestBodySize Double
max allowed body size allowed to be introspected
prompt String
Use this prompt to tune the LLM system/assistant message for the incoming proxy request (from the client), and what you are expecting in return.
transformationExtractPattern String
Defines the regular expression that must match to indicate a successful AI transformation at the request phase. The first match will be set as the outgoing body. If the AI service's response doesn't match this pattern, it is marked as a failure.
httpProxyHost string
A string representing a host name, such as example.com.
httpProxyPort number
An integer representing a port number between 0 and 65535, inclusive.
httpTimeout number
Timeout in milliseconds for the AI upstream service.
httpsProxyHost string
A string representing a host name, such as example.com.
httpsProxyPort number
An integer representing a port number between 0 and 65535, inclusive.
httpsVerify boolean
Verify the TLS certificate of the AI upstream service.
llm GatewayPluginAiRequestTransformerConfigLlm
maxRequestBodySize number
max allowed body size allowed to be introspected
prompt string
Use this prompt to tune the LLM system/assistant message for the incoming proxy request (from the client), and what you are expecting in return.
transformationExtractPattern string
Defines the regular expression that must match to indicate a successful AI transformation at the request phase. The first match will be set as the outgoing body. If the AI service's response doesn't match this pattern, it is marked as a failure.
http_proxy_host str
A string representing a host name, such as example.com.
http_proxy_port float
An integer representing a port number between 0 and 65535, inclusive.
http_timeout float
Timeout in milliseconds for the AI upstream service.
https_proxy_host str
A string representing a host name, such as example.com.
https_proxy_port float
An integer representing a port number between 0 and 65535, inclusive.
https_verify bool
Verify the TLS certificate of the AI upstream service.
llm GatewayPluginAiRequestTransformerConfigLlm
max_request_body_size float
max allowed body size allowed to be introspected
prompt str
Use this prompt to tune the LLM system/assistant message for the incoming proxy request (from the client), and what you are expecting in return.
transformation_extract_pattern str
Defines the regular expression that must match to indicate a successful AI transformation at the request phase. The first match will be set as the outgoing body. If the AI service's response doesn't match this pattern, it is marked as a failure.
httpProxyHost String
A string representing a host name, such as example.com.
httpProxyPort Number
An integer representing a port number between 0 and 65535, inclusive.
httpTimeout Number
Timeout in milliseconds for the AI upstream service.
httpsProxyHost String
A string representing a host name, such as example.com.
httpsProxyPort Number
An integer representing a port number between 0 and 65535, inclusive.
httpsVerify Boolean
Verify the TLS certificate of the AI upstream service.
llm Property Map
maxRequestBodySize Number
max allowed body size allowed to be introspected
prompt String
Use this prompt to tune the LLM system/assistant message for the incoming proxy request (from the client), and what you are expecting in return.
transformationExtractPattern String
Defines the regular expression that must match to indicate a successful AI transformation at the request phase. The first match will be set as the outgoing body. If the AI service's response doesn't match this pattern, it is marked as a failure.

GatewayPluginAiRequestTransformerConfigLlm
, GatewayPluginAiRequestTransformerConfigLlmArgs

Auth GatewayPluginAiRequestTransformerConfigLlmAuth
Logging GatewayPluginAiRequestTransformerConfigLlmLogging
Model GatewayPluginAiRequestTransformerConfigLlmModel
RouteType string
The model's operation implementation, for this provider. Set to preserve to pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]
Auth GatewayPluginAiRequestTransformerConfigLlmAuth
Logging GatewayPluginAiRequestTransformerConfigLlmLogging
Model GatewayPluginAiRequestTransformerConfigLlmModel
RouteType string
The model's operation implementation, for this provider. Set to preserve to pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]
auth GatewayPluginAiRequestTransformerConfigLlmAuth
logging GatewayPluginAiRequestTransformerConfigLlmLogging
model GatewayPluginAiRequestTransformerConfigLlmModel
routeType String
The model's operation implementation, for this provider. Set to preserve to pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]
auth GatewayPluginAiRequestTransformerConfigLlmAuth
logging GatewayPluginAiRequestTransformerConfigLlmLogging
model GatewayPluginAiRequestTransformerConfigLlmModel
routeType string
The model's operation implementation, for this provider. Set to preserve to pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]
auth GatewayPluginAiRequestTransformerConfigLlmAuth
logging GatewayPluginAiRequestTransformerConfigLlmLogging
model GatewayPluginAiRequestTransformerConfigLlmModel
route_type str
The model's operation implementation, for this provider. Set to preserve to pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]
auth Property Map
logging Property Map
model Property Map
routeType String
The model's operation implementation, for this provider. Set to preserve to pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]

GatewayPluginAiRequestTransformerConfigLlmAuth
, GatewayPluginAiRequestTransformerConfigLlmAuthArgs

AllowOverride bool
If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
AwsAccessKeyId string
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
AwsSecretAccessKey string
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
AzureClientId string
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
AzureClientSecret string
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
AzureTenantId string
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
AzureUseManagedIdentity bool
Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
GcpServiceAccountJson string
Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
GcpUseServiceAccount bool
Use service account auth for GCP-based providers and models.
HeaderName string
If AI model requires authentication via Authorization or API key header, specify its name here.
HeaderValue string
Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
ParamLocation string
Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
ParamName string
If AI model requires authentication via query parameter, specify its name here.
ParamValue string
Specify the full parameter value for 'param_name'.
AllowOverride bool
If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
AwsAccessKeyId string
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
AwsSecretAccessKey string
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
AzureClientId string
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
AzureClientSecret string
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
AzureTenantId string
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
AzureUseManagedIdentity bool
Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
GcpServiceAccountJson string
Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
GcpUseServiceAccount bool
Use service account auth for GCP-based providers and models.
HeaderName string
If AI model requires authentication via Authorization or API key header, specify its name here.
HeaderValue string
Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
ParamLocation string
Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
ParamName string
If AI model requires authentication via query parameter, specify its name here.
ParamValue string
Specify the full parameter value for 'param_name'.
allowOverride Boolean
If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
awsAccessKeyId String
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
awsSecretAccessKey String
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
azureClientId String
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
azureClientSecret String
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
azureTenantId String
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
azureUseManagedIdentity Boolean
Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
gcpServiceAccountJson String
Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
gcpUseServiceAccount Boolean
Use service account auth for GCP-based providers and models.
headerName String
If AI model requires authentication via Authorization or API key header, specify its name here.
headerValue String
Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
paramLocation String
Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
paramName String
If AI model requires authentication via query parameter, specify its name here.
paramValue String
Specify the full parameter value for 'param_name'.
allowOverride boolean
If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
awsAccessKeyId string
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
awsSecretAccessKey string
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
azureClientId string
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
azureClientSecret string
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
azureTenantId string
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
azureUseManagedIdentity boolean
Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
gcpServiceAccountJson string
Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
gcpUseServiceAccount boolean
Use service account auth for GCP-based providers and models.
headerName string
If AI model requires authentication via Authorization or API key header, specify its name here.
headerValue string
Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
paramLocation string
Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
paramName string
If AI model requires authentication via query parameter, specify its name here.
paramValue string
Specify the full parameter value for 'param_name'.
allow_override bool
If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
aws_access_key_id str
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
aws_secret_access_key str
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
azure_client_id str
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
azure_client_secret str
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
azure_tenant_id str
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
azure_use_managed_identity bool
Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
gcp_service_account_json str
Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
gcp_use_service_account bool
Use service account auth for GCP-based providers and models.
header_name str
If AI model requires authentication via Authorization or API key header, specify its name here.
header_value str
Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
param_location str
Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
param_name str
If AI model requires authentication via query parameter, specify its name here.
param_value str
Specify the full parameter value for 'param_name'.
allowOverride Boolean
If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
awsAccessKeyId String
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
awsSecretAccessKey String
Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
azureClientId String
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
azureClientSecret String
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
azureTenantId String
If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
azureUseManagedIdentity Boolean
Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
gcpServiceAccountJson String
Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
gcpUseServiceAccount Boolean
Use service account auth for GCP-based providers and models.
headerName String
If AI model requires authentication via Authorization or API key header, specify its name here.
headerValue String
Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
paramLocation String
Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
paramName String
If AI model requires authentication via query parameter, specify its name here.
paramValue String
Specify the full parameter value for 'param_name'.

GatewayPluginAiRequestTransformerConfigLlmLogging
, GatewayPluginAiRequestTransformerConfigLlmLoggingArgs

LogPayloads bool
If enabled, will log the request and response body into the Kong log plugin(s) output.
LogStatistics bool
If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.
LogPayloads bool
If enabled, will log the request and response body into the Kong log plugin(s) output.
LogStatistics bool
If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.
logPayloads Boolean
If enabled, will log the request and response body into the Kong log plugin(s) output.
logStatistics Boolean
If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.
logPayloads boolean
If enabled, will log the request and response body into the Kong log plugin(s) output.
logStatistics boolean
If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.
log_payloads bool
If enabled, will log the request and response body into the Kong log plugin(s) output.
log_statistics bool
If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.
logPayloads Boolean
If enabled, will log the request and response body into the Kong log plugin(s) output.
logStatistics Boolean
If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.

GatewayPluginAiRequestTransformerConfigLlmModel
, GatewayPluginAiRequestTransformerConfigLlmModelArgs

Name string
Model name to execute.
Options GatewayPluginAiRequestTransformerConfigLlmModelOptions
Key/value settings for the model
Provider string
AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]
Name string
Model name to execute.
Options GatewayPluginAiRequestTransformerConfigLlmModelOptions
Key/value settings for the model
Provider string
AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]
name String
Model name to execute.
options GatewayPluginAiRequestTransformerConfigLlmModelOptions
Key/value settings for the model
provider String
AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]
name string
Model name to execute.
options GatewayPluginAiRequestTransformerConfigLlmModelOptions
Key/value settings for the model
provider string
AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]
name str
Model name to execute.
options GatewayPluginAiRequestTransformerConfigLlmModelOptions
Key/value settings for the model
provider str
AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]
name String
Model name to execute.
options Property Map
Key/value settings for the model
provider String
AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]

GatewayPluginAiRequestTransformerConfigLlmModelOptions
, GatewayPluginAiRequestTransformerConfigLlmModelOptionsArgs

AnthropicVersion string
Defines the schema/API version, if using Anthropic provider.
AzureApiVersion string
'api-version' for Azure OpenAI instances.
AzureDeploymentId string
Deployment ID for Azure OpenAI instances.
AzureInstance string
Instance name for Azure OpenAI hosted models.
Bedrock GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrock
Gemini GatewayPluginAiRequestTransformerConfigLlmModelOptionsGemini
Huggingface GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingface
InputCost double
Defines the cost per 1M tokens in your prompt.
Llama2Format string
If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
MaxTokens double
Defines the max_tokens, if using chat or completion models.
MistralFormat string
If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
OutputCost double
Defines the cost per 1M tokens in the output of the AI.
Temperature double
Defines the matching temperature, if using chat or completion models.
TopK double
Defines the top-k most likely tokens, if supported.
TopP double
Defines the top-p probability mass, if supported.
UpstreamPath string
Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
UpstreamUrl string
Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.
AnthropicVersion string
Defines the schema/API version, if using Anthropic provider.
AzureApiVersion string
'api-version' for Azure OpenAI instances.
AzureDeploymentId string
Deployment ID for Azure OpenAI instances.
AzureInstance string
Instance name for Azure OpenAI hosted models.
Bedrock GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrock
Gemini GatewayPluginAiRequestTransformerConfigLlmModelOptionsGemini
Huggingface GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingface
InputCost float64
Defines the cost per 1M tokens in your prompt.
Llama2Format string
If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
MaxTokens float64
Defines the max_tokens, if using chat or completion models.
MistralFormat string
If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
OutputCost float64
Defines the cost per 1M tokens in the output of the AI.
Temperature float64
Defines the matching temperature, if using chat or completion models.
TopK float64
Defines the top-k most likely tokens, if supported.
TopP float64
Defines the top-p probability mass, if supported.
UpstreamPath string
Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
UpstreamUrl string
Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.
anthropicVersion String
Defines the schema/API version, if using Anthropic provider.
azureApiVersion String
'api-version' for Azure OpenAI instances.
azureDeploymentId String
Deployment ID for Azure OpenAI instances.
azureInstance String
Instance name for Azure OpenAI hosted models.
bedrock GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrock
gemini GatewayPluginAiRequestTransformerConfigLlmModelOptionsGemini
huggingface GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingface
inputCost Double
Defines the cost per 1M tokens in your prompt.
llama2Format String
If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
maxTokens Double
Defines the max_tokens, if using chat or completion models.
mistralFormat String
If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
outputCost Double
Defines the cost per 1M tokens in the output of the AI.
temperature Double
Defines the matching temperature, if using chat or completion models.
topK Double
Defines the top-k most likely tokens, if supported.
topP Double
Defines the top-p probability mass, if supported.
upstreamPath String
Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
upstreamUrl String
Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.
anthropicVersion string
Defines the schema/API version, if using Anthropic provider.
azureApiVersion string
'api-version' for Azure OpenAI instances.
azureDeploymentId string
Deployment ID for Azure OpenAI instances.
azureInstance string
Instance name for Azure OpenAI hosted models.
bedrock GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrock
gemini GatewayPluginAiRequestTransformerConfigLlmModelOptionsGemini
huggingface GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingface
inputCost number
Defines the cost per 1M tokens in your prompt.
llama2Format string
If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
maxTokens number
Defines the max_tokens, if using chat or completion models.
mistralFormat string
If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
outputCost number
Defines the cost per 1M tokens in the output of the AI.
temperature number
Defines the matching temperature, if using chat or completion models.
topK number
Defines the top-k most likely tokens, if supported.
topP number
Defines the top-p probability mass, if supported.
upstreamPath string
Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
upstreamUrl string
Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.
anthropic_version str
Defines the schema/API version, if using Anthropic provider.
azure_api_version str
'api-version' for Azure OpenAI instances.
azure_deployment_id str
Deployment ID for Azure OpenAI instances.
azure_instance str
Instance name for Azure OpenAI hosted models.
bedrock GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrock
gemini GatewayPluginAiRequestTransformerConfigLlmModelOptionsGemini
huggingface GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingface
input_cost float
Defines the cost per 1M tokens in your prompt.
llama2_format str
If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
max_tokens float
Defines the max_tokens, if using chat or completion models.
mistral_format str
If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
output_cost float
Defines the cost per 1M tokens in the output of the AI.
temperature float
Defines the matching temperature, if using chat or completion models.
top_k float
Defines the top-k most likely tokens, if supported.
top_p float
Defines the top-p probability mass, if supported.
upstream_path str
Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
upstream_url str
Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.
anthropicVersion String
Defines the schema/API version, if using Anthropic provider.
azureApiVersion String
'api-version' for Azure OpenAI instances.
azureDeploymentId String
Deployment ID for Azure OpenAI instances.
azureInstance String
Instance name for Azure OpenAI hosted models.
bedrock Property Map
gemini Property Map
huggingface Property Map
inputCost Number
Defines the cost per 1M tokens in your prompt.
llama2Format String
If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
maxTokens Number
Defines the max_tokens, if using chat or completion models.
mistralFormat String
If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
outputCost Number
Defines the cost per 1M tokens in the output of the AI.
temperature Number
Defines the matching temperature, if using chat or completion models.
topK Number
Defines the top-k most likely tokens, if supported.
topP Number
Defines the top-p probability mass, if supported.
upstreamPath String
Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
upstreamUrl String
Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.

GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrock
, GatewayPluginAiRequestTransformerConfigLlmModelOptionsBedrockArgs

AwsRegion string
If using AWS providers (Bedrock) you can override the AWS_REGION environment variable by setting this option.
AwsRegion string
If using AWS providers (Bedrock) you can override the AWS_REGION environment variable by setting this option.
awsRegion String
If using AWS providers (Bedrock) you can override the AWS_REGION environment variable by setting this option.
awsRegion string
If using AWS providers (Bedrock) you can override the AWS_REGION environment variable by setting this option.
aws_region str
If using AWS providers (Bedrock) you can override the AWS_REGION environment variable by setting this option.
awsRegion String
If using AWS providers (Bedrock) you can override the AWS_REGION environment variable by setting this option.

GatewayPluginAiRequestTransformerConfigLlmModelOptionsGemini
, GatewayPluginAiRequestTransformerConfigLlmModelOptionsGeminiArgs

ApiEndpoint string
If running Gemini on Vertex, specify the regional API endpoint (hostname only).
LocationId string
If running Gemini on Vertex, specify the location ID.
ProjectId string
If running Gemini on Vertex, specify the project ID.
ApiEndpoint string
If running Gemini on Vertex, specify the regional API endpoint (hostname only).
LocationId string
If running Gemini on Vertex, specify the location ID.
ProjectId string
If running Gemini on Vertex, specify the project ID.
apiEndpoint String
If running Gemini on Vertex, specify the regional API endpoint (hostname only).
locationId String
If running Gemini on Vertex, specify the location ID.
projectId String
If running Gemini on Vertex, specify the project ID.
apiEndpoint string
If running Gemini on Vertex, specify the regional API endpoint (hostname only).
locationId string
If running Gemini on Vertex, specify the location ID.
projectId string
If running Gemini on Vertex, specify the project ID.
api_endpoint str
If running Gemini on Vertex, specify the regional API endpoint (hostname only).
location_id str
If running Gemini on Vertex, specify the location ID.
project_id str
If running Gemini on Vertex, specify the project ID.
apiEndpoint String
If running Gemini on Vertex, specify the regional API endpoint (hostname only).
locationId String
If running Gemini on Vertex, specify the location ID.
projectId String
If running Gemini on Vertex, specify the project ID.

GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingface
, GatewayPluginAiRequestTransformerConfigLlmModelOptionsHuggingfaceArgs

UseCache bool
Use the cache layer on the inference API
WaitForModel bool
Wait for the model if it is not ready
UseCache bool
Use the cache layer on the inference API
WaitForModel bool
Wait for the model if it is not ready
useCache Boolean
Use the cache layer on the inference API
waitForModel Boolean
Wait for the model if it is not ready
useCache boolean
Use the cache layer on the inference API
waitForModel boolean
Wait for the model if it is not ready
use_cache bool
Use the cache layer on the inference API
wait_for_model bool
Wait for the model if it is not ready
useCache Boolean
Use the cache layer on the inference API
waitForModel Boolean
Wait for the model if it is not ready

GatewayPluginAiRequestTransformerConsumerGroup
, GatewayPluginAiRequestTransformerConsumerGroupArgs

Id string
Id string
id String
id string
id str
id String

GatewayPluginAiRequestTransformerOrdering
, GatewayPluginAiRequestTransformerOrderingArgs

GatewayPluginAiRequestTransformerOrderingAfter
, GatewayPluginAiRequestTransformerOrderingAfterArgs

Accesses List<string>
Accesses []string
accesses List<String>
accesses string[]
accesses Sequence[str]
accesses List<String>

GatewayPluginAiRequestTransformerOrderingBefore
, GatewayPluginAiRequestTransformerOrderingBeforeArgs

Accesses List<string>
Accesses []string
accesses List<String>
accesses string[]
accesses Sequence[str]
accesses List<String>

GatewayPluginAiRequestTransformerRoute
, GatewayPluginAiRequestTransformerRouteArgs

Id string
Id string
id String
id string
id str
id String

GatewayPluginAiRequestTransformerService
, GatewayPluginAiRequestTransformerServiceArgs

Id string
Id string
id String
id string
id str
id String

Import

$ pulumi import konnect:index/gatewayPluginAiRequestTransformer:GatewayPluginAiRequestTransformer my_konnect_gateway_plugin_ai_request_transformer "{ \"control_plane_id\": \"9524ec7d-36d9-465d-a8c5-83a3c9390458\", \"plugin_id\": \"3473c251-5b6c-4f45-b1ff-7ede735a366d\"}"
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
konnect kong/terraform-provider-konnect
License
Notes
This Pulumi package is based on the konnect Terraform Provider.