konnect.GatewayPluginAiProxy
Explore with Pulumi AI
GatewayPluginAiProxy Resource
Example Usage
Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.konnect.GatewayPluginAiProxy;
import com.pulumi.konnect.GatewayPluginAiProxyArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyConfigArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyConfigAuthArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyConfigLoggingArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyConfigModelArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyConfigModelOptionsArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyConfigModelOptionsBedrockArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyConfigModelOptionsGeminiArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyConfigModelOptionsHuggingfaceArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyConsumerArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyConsumerGroupArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyOrderingArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyOrderingAfterArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyOrderingBeforeArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyRouteArgs;
import com.pulumi.konnect.inputs.GatewayPluginAiProxyServiceArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var myGatewaypluginaiproxy = new GatewayPluginAiProxy("myGatewaypluginaiproxy", GatewayPluginAiProxyArgs.builder()
            .config(GatewayPluginAiProxyConfigArgs.builder()
                .auth(GatewayPluginAiProxyConfigAuthArgs.builder()
                    .allowOverride(true)
                    .awsAccessKeyId("...my_aws_access_key_id...")
                    .awsSecretAccessKey("...my_aws_secret_access_key...")
                    .azureClientId("...my_azure_client_id...")
                    .azureClientSecret("...my_azure_client_secret...")
                    .azureTenantId("...my_azure_tenant_id...")
                    .azureUseManagedIdentity(true)
                    .gcpServiceAccountJson("...my_gcp_service_account_json...")
                    .gcpUseServiceAccount(false)
                    .headerName("...my_header_name...")
                    .headerValue("...my_header_value...")
                    .paramLocation("query")
                    .paramName("...my_param_name...")
                    .paramValue("...my_param_value...")
                    .build())
                .logging(GatewayPluginAiProxyConfigLoggingArgs.builder()
                    .logPayloads(false)
                    .logStatistics(true)
                    .build())
                .max_request_body_size(10)
                .model(GatewayPluginAiProxyConfigModelArgs.builder()
                    .name("...my_name...")
                    .options(GatewayPluginAiProxyConfigModelOptionsArgs.builder()
                        .anthropicVersion("...my_anthropic_version...")
                        .azureApiVersion("...my_azure_api_version...")
                        .azureDeploymentId("...my_azure_deployment_id...")
                        .azureInstance("...my_azure_instance...")
                        .bedrock(GatewayPluginAiProxyConfigModelOptionsBedrockArgs.builder()
                            .awsRegion("...my_aws_region...")
                            .build())
                        .gemini(GatewayPluginAiProxyConfigModelOptionsGeminiArgs.builder()
                            .apiEndpoint("...my_api_endpoint...")
                            .locationId("...my_location_id...")
                            .projectId("...my_project_id...")
                            .build())
                        .huggingface(GatewayPluginAiProxyConfigModelOptionsHuggingfaceArgs.builder()
                            .useCache(true)
                            .waitForModel(false)
                            .build())
                        .inputCost(7.42)
                        .llama2Format("openai")
                        .maxTokens(9)
                        .mistralFormat("ollama")
                        .outputCost(1.81)
                        .temperature(2.26)
                        .topK(359)
                        .topP(0.14)
                        .upstreamPath("...my_upstream_path...")
                        .upstreamUrl("...my_upstream_url...")
                        .build())
                    .provider("anthropic")
                    .build())
                .model_name_header(true)
                .response_streaming("allow")
                .route_type("llm/v1/chat")
                .build())
            .consumer(GatewayPluginAiProxyConsumerArgs.builder()
                .id("...my_id...")
                .build())
            .consumerGroup(GatewayPluginAiProxyConsumerGroupArgs.builder()
                .id("...my_id...")
                .build())
            .controlPlaneId("9524ec7d-36d9-465d-a8c5-83a3c9390458")
            .enabled(true)
            .gatewayPluginAiProxyId("...my_id...")
            .instanceName("...my_instance_name...")
            .ordering(GatewayPluginAiProxyOrderingArgs.builder()
                .after(GatewayPluginAiProxyOrderingAfterArgs.builder()
                    .access("...")
                    .build())
                .before(GatewayPluginAiProxyOrderingBeforeArgs.builder()
                    .access("...")
                    .build())
                .build())
            .protocols("grpcs")
            .route(GatewayPluginAiProxyRouteArgs.builder()
                .id("...my_id...")
                .build())
            .service(GatewayPluginAiProxyServiceArgs.builder()
                .id("...my_id...")
                .build())
            .tags("...")
            .build());
    }
}
resources:
  myGatewaypluginaiproxy:
    type: konnect:GatewayPluginAiProxy
    properties:
      config:
        auth:
          allowOverride: true
          awsAccessKeyId: '...my_aws_access_key_id...'
          awsSecretAccessKey: '...my_aws_secret_access_key...'
          azureClientId: '...my_azure_client_id...'
          azureClientSecret: '...my_azure_client_secret...'
          azureTenantId: '...my_azure_tenant_id...'
          azureUseManagedIdentity: true
          gcpServiceAccountJson: '...my_gcp_service_account_json...'
          gcpUseServiceAccount: false
          headerName: '...my_header_name...'
          headerValue: '...my_header_value...'
          paramLocation: query
          paramName: '...my_param_name...'
          paramValue: '...my_param_value...'
        logging:
          logPayloads: false
          logStatistics: true
        max_request_body_size: 10
        model:
          name: '...my_name...'
          options:
            anthropicVersion: '...my_anthropic_version...'
            azureApiVersion: '...my_azure_api_version...'
            azureDeploymentId: '...my_azure_deployment_id...'
            azureInstance: '...my_azure_instance...'
            bedrock:
              awsRegion: '...my_aws_region...'
            gemini:
              apiEndpoint: '...my_api_endpoint...'
              locationId: '...my_location_id...'
              projectId: '...my_project_id...'
            huggingface:
              useCache: true
              waitForModel: false
            inputCost: 7.42
            llama2Format: openai
            maxTokens: 9
            mistralFormat: ollama
            outputCost: 1.81
            temperature: 2.26
            topK: 359
            topP: 0.14
            upstreamPath: '...my_upstream_path...'
            upstreamUrl: '...my_upstream_url...'
          provider: anthropic
        model_name_header: true
        response_streaming: allow
        route_type: llm/v1/chat
      consumer:
        id: '...my_id...'
      consumerGroup:
        id: '...my_id...'
      controlPlaneId: 9524ec7d-36d9-465d-a8c5-83a3c9390458
      enabled: true
      gatewayPluginAiProxyId: '...my_id...'
      instanceName: '...my_instance_name...'
      ordering:
        after:
          access:
            - '...'
        before:
          access:
            - '...'
      protocols:
        - grpcs
      route:
        id: '...my_id...'
      service:
        id: '...my_id...'
      tags:
        - '...'
Create GatewayPluginAiProxy Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new GatewayPluginAiProxy(name: string, args: GatewayPluginAiProxyArgs, opts?: CustomResourceOptions);@overload
def GatewayPluginAiProxy(resource_name: str,
                         args: GatewayPluginAiProxyArgs,
                         opts: Optional[ResourceOptions] = None)
@overload
def GatewayPluginAiProxy(resource_name: str,
                         opts: Optional[ResourceOptions] = None,
                         config: Optional[GatewayPluginAiProxyConfigArgs] = None,
                         control_plane_id: Optional[str] = None,
                         consumer: Optional[GatewayPluginAiProxyConsumerArgs] = None,
                         consumer_group: Optional[GatewayPluginAiProxyConsumerGroupArgs] = None,
                         enabled: Optional[bool] = None,
                         gateway_plugin_ai_proxy_id: Optional[str] = None,
                         instance_name: Optional[str] = None,
                         ordering: Optional[GatewayPluginAiProxyOrderingArgs] = None,
                         protocols: Optional[Sequence[str]] = None,
                         route: Optional[GatewayPluginAiProxyRouteArgs] = None,
                         service: Optional[GatewayPluginAiProxyServiceArgs] = None,
                         tags: Optional[Sequence[str]] = None)func NewGatewayPluginAiProxy(ctx *Context, name string, args GatewayPluginAiProxyArgs, opts ...ResourceOption) (*GatewayPluginAiProxy, error)public GatewayPluginAiProxy(string name, GatewayPluginAiProxyArgs args, CustomResourceOptions? opts = null)
public GatewayPluginAiProxy(String name, GatewayPluginAiProxyArgs args)
public GatewayPluginAiProxy(String name, GatewayPluginAiProxyArgs args, CustomResourceOptions options)
type: konnect:GatewayPluginAiProxy
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args GatewayPluginAiProxyArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args GatewayPluginAiProxyArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args GatewayPluginAiProxyArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args GatewayPluginAiProxyArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args GatewayPluginAiProxyArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var gatewayPluginAiProxyResource = new Konnect.GatewayPluginAiProxy("gatewayPluginAiProxyResource", new()
{
    Config = new Konnect.Inputs.GatewayPluginAiProxyConfigArgs
    {
        Auth = new Konnect.Inputs.GatewayPluginAiProxyConfigAuthArgs
        {
            AllowOverride = false,
            AwsAccessKeyId = "string",
            AwsSecretAccessKey = "string",
            AzureClientId = "string",
            AzureClientSecret = "string",
            AzureTenantId = "string",
            AzureUseManagedIdentity = false,
            GcpServiceAccountJson = "string",
            GcpUseServiceAccount = false,
            HeaderName = "string",
            HeaderValue = "string",
            ParamLocation = "string",
            ParamName = "string",
            ParamValue = "string",
        },
        Logging = new Konnect.Inputs.GatewayPluginAiProxyConfigLoggingArgs
        {
            LogPayloads = false,
            LogStatistics = false,
        },
        MaxRequestBodySize = 0,
        Model = new Konnect.Inputs.GatewayPluginAiProxyConfigModelArgs
        {
            Name = "string",
            Options = new Konnect.Inputs.GatewayPluginAiProxyConfigModelOptionsArgs
            {
                AnthropicVersion = "string",
                AzureApiVersion = "string",
                AzureDeploymentId = "string",
                AzureInstance = "string",
                Bedrock = new Konnect.Inputs.GatewayPluginAiProxyConfigModelOptionsBedrockArgs
                {
                    AwsRegion = "string",
                },
                Gemini = new Konnect.Inputs.GatewayPluginAiProxyConfigModelOptionsGeminiArgs
                {
                    ApiEndpoint = "string",
                    LocationId = "string",
                    ProjectId = "string",
                },
                Huggingface = new Konnect.Inputs.GatewayPluginAiProxyConfigModelOptionsHuggingfaceArgs
                {
                    UseCache = false,
                    WaitForModel = false,
                },
                InputCost = 0,
                Llama2Format = "string",
                MaxTokens = 0,
                MistralFormat = "string",
                OutputCost = 0,
                Temperature = 0,
                TopK = 0,
                TopP = 0,
                UpstreamPath = "string",
                UpstreamUrl = "string",
            },
            Provider = "string",
        },
        ModelNameHeader = false,
        ResponseStreaming = "string",
        RouteType = "string",
    },
    ControlPlaneId = "string",
    Consumer = new Konnect.Inputs.GatewayPluginAiProxyConsumerArgs
    {
        Id = "string",
    },
    ConsumerGroup = new Konnect.Inputs.GatewayPluginAiProxyConsumerGroupArgs
    {
        Id = "string",
    },
    Enabled = false,
    GatewayPluginAiProxyId = "string",
    InstanceName = "string",
    Ordering = new Konnect.Inputs.GatewayPluginAiProxyOrderingArgs
    {
        After = new Konnect.Inputs.GatewayPluginAiProxyOrderingAfterArgs
        {
            Accesses = new[]
            {
                "string",
            },
        },
        Before = new Konnect.Inputs.GatewayPluginAiProxyOrderingBeforeArgs
        {
            Accesses = new[]
            {
                "string",
            },
        },
    },
    Protocols = new[]
    {
        "string",
    },
    Route = new Konnect.Inputs.GatewayPluginAiProxyRouteArgs
    {
        Id = "string",
    },
    Service = new Konnect.Inputs.GatewayPluginAiProxyServiceArgs
    {
        Id = "string",
    },
    Tags = new[]
    {
        "string",
    },
});
example, err := konnect.NewGatewayPluginAiProxy(ctx, "gatewayPluginAiProxyResource", &konnect.GatewayPluginAiProxyArgs{
Config: &.GatewayPluginAiProxyConfigArgs{
Auth: &.GatewayPluginAiProxyConfigAuthArgs{
AllowOverride: pulumi.Bool(false),
AwsAccessKeyId: pulumi.String("string"),
AwsSecretAccessKey: pulumi.String("string"),
AzureClientId: pulumi.String("string"),
AzureClientSecret: pulumi.String("string"),
AzureTenantId: pulumi.String("string"),
AzureUseManagedIdentity: pulumi.Bool(false),
GcpServiceAccountJson: pulumi.String("string"),
GcpUseServiceAccount: pulumi.Bool(false),
HeaderName: pulumi.String("string"),
HeaderValue: pulumi.String("string"),
ParamLocation: pulumi.String("string"),
ParamName: pulumi.String("string"),
ParamValue: pulumi.String("string"),
},
Logging: &.GatewayPluginAiProxyConfigLoggingArgs{
LogPayloads: pulumi.Bool(false),
LogStatistics: pulumi.Bool(false),
},
MaxRequestBodySize: pulumi.Float64(0),
Model: &.GatewayPluginAiProxyConfigModelArgs{
Name: pulumi.String("string"),
Options: &.GatewayPluginAiProxyConfigModelOptionsArgs{
AnthropicVersion: pulumi.String("string"),
AzureApiVersion: pulumi.String("string"),
AzureDeploymentId: pulumi.String("string"),
AzureInstance: pulumi.String("string"),
Bedrock: &.GatewayPluginAiProxyConfigModelOptionsBedrockArgs{
AwsRegion: pulumi.String("string"),
},
Gemini: &.GatewayPluginAiProxyConfigModelOptionsGeminiArgs{
ApiEndpoint: pulumi.String("string"),
LocationId: pulumi.String("string"),
ProjectId: pulumi.String("string"),
},
Huggingface: &.GatewayPluginAiProxyConfigModelOptionsHuggingfaceArgs{
UseCache: pulumi.Bool(false),
WaitForModel: pulumi.Bool(false),
},
InputCost: pulumi.Float64(0),
Llama2Format: pulumi.String("string"),
MaxTokens: pulumi.Float64(0),
MistralFormat: pulumi.String("string"),
OutputCost: pulumi.Float64(0),
Temperature: pulumi.Float64(0),
TopK: pulumi.Float64(0),
TopP: pulumi.Float64(0),
UpstreamPath: pulumi.String("string"),
UpstreamUrl: pulumi.String("string"),
},
Provider: pulumi.String("string"),
},
ModelNameHeader: pulumi.Bool(false),
ResponseStreaming: pulumi.String("string"),
RouteType: pulumi.String("string"),
},
ControlPlaneId: pulumi.String("string"),
Consumer: &.GatewayPluginAiProxyConsumerArgs{
Id: pulumi.String("string"),
},
ConsumerGroup: &.GatewayPluginAiProxyConsumerGroupArgs{
Id: pulumi.String("string"),
},
Enabled: pulumi.Bool(false),
GatewayPluginAiProxyId: pulumi.String("string"),
InstanceName: pulumi.String("string"),
Ordering: &.GatewayPluginAiProxyOrderingArgs{
After: &.GatewayPluginAiProxyOrderingAfterArgs{
Accesses: pulumi.StringArray{
pulumi.String("string"),
},
},
Before: &.GatewayPluginAiProxyOrderingBeforeArgs{
Accesses: pulumi.StringArray{
pulumi.String("string"),
},
},
},
Protocols: pulumi.StringArray{
pulumi.String("string"),
},
Route: &.GatewayPluginAiProxyRouteArgs{
Id: pulumi.String("string"),
},
Service: &.GatewayPluginAiProxyServiceArgs{
Id: pulumi.String("string"),
},
Tags: pulumi.StringArray{
pulumi.String("string"),
},
})
var gatewayPluginAiProxyResource = new GatewayPluginAiProxy("gatewayPluginAiProxyResource", GatewayPluginAiProxyArgs.builder()
    .config(GatewayPluginAiProxyConfigArgs.builder()
        .auth(GatewayPluginAiProxyConfigAuthArgs.builder()
            .allowOverride(false)
            .awsAccessKeyId("string")
            .awsSecretAccessKey("string")
            .azureClientId("string")
            .azureClientSecret("string")
            .azureTenantId("string")
            .azureUseManagedIdentity(false)
            .gcpServiceAccountJson("string")
            .gcpUseServiceAccount(false)
            .headerName("string")
            .headerValue("string")
            .paramLocation("string")
            .paramName("string")
            .paramValue("string")
            .build())
        .logging(GatewayPluginAiProxyConfigLoggingArgs.builder()
            .logPayloads(false)
            .logStatistics(false)
            .build())
        .maxRequestBodySize(0)
        .model(GatewayPluginAiProxyConfigModelArgs.builder()
            .name("string")
            .options(GatewayPluginAiProxyConfigModelOptionsArgs.builder()
                .anthropicVersion("string")
                .azureApiVersion("string")
                .azureDeploymentId("string")
                .azureInstance("string")
                .bedrock(GatewayPluginAiProxyConfigModelOptionsBedrockArgs.builder()
                    .awsRegion("string")
                    .build())
                .gemini(GatewayPluginAiProxyConfigModelOptionsGeminiArgs.builder()
                    .apiEndpoint("string")
                    .locationId("string")
                    .projectId("string")
                    .build())
                .huggingface(GatewayPluginAiProxyConfigModelOptionsHuggingfaceArgs.builder()
                    .useCache(false)
                    .waitForModel(false)
                    .build())
                .inputCost(0)
                .llama2Format("string")
                .maxTokens(0)
                .mistralFormat("string")
                .outputCost(0)
                .temperature(0)
                .topK(0)
                .topP(0)
                .upstreamPath("string")
                .upstreamUrl("string")
                .build())
            .provider("string")
            .build())
        .modelNameHeader(false)
        .responseStreaming("string")
        .routeType("string")
        .build())
    .controlPlaneId("string")
    .consumer(GatewayPluginAiProxyConsumerArgs.builder()
        .id("string")
        .build())
    .consumerGroup(GatewayPluginAiProxyConsumerGroupArgs.builder()
        .id("string")
        .build())
    .enabled(false)
    .gatewayPluginAiProxyId("string")
    .instanceName("string")
    .ordering(GatewayPluginAiProxyOrderingArgs.builder()
        .after(GatewayPluginAiProxyOrderingAfterArgs.builder()
            .accesses("string")
            .build())
        .before(GatewayPluginAiProxyOrderingBeforeArgs.builder()
            .accesses("string")
            .build())
        .build())
    .protocols("string")
    .route(GatewayPluginAiProxyRouteArgs.builder()
        .id("string")
        .build())
    .service(GatewayPluginAiProxyServiceArgs.builder()
        .id("string")
        .build())
    .tags("string")
    .build());
gateway_plugin_ai_proxy_resource = konnect.GatewayPluginAiProxy("gatewayPluginAiProxyResource",
    config={
        "auth": {
            "allow_override": False,
            "aws_access_key_id": "string",
            "aws_secret_access_key": "string",
            "azure_client_id": "string",
            "azure_client_secret": "string",
            "azure_tenant_id": "string",
            "azure_use_managed_identity": False,
            "gcp_service_account_json": "string",
            "gcp_use_service_account": False,
            "header_name": "string",
            "header_value": "string",
            "param_location": "string",
            "param_name": "string",
            "param_value": "string",
        },
        "logging": {
            "log_payloads": False,
            "log_statistics": False,
        },
        "max_request_body_size": 0,
        "model": {
            "name": "string",
            "options": {
                "anthropic_version": "string",
                "azure_api_version": "string",
                "azure_deployment_id": "string",
                "azure_instance": "string",
                "bedrock": {
                    "aws_region": "string",
                },
                "gemini": {
                    "api_endpoint": "string",
                    "location_id": "string",
                    "project_id": "string",
                },
                "huggingface": {
                    "use_cache": False,
                    "wait_for_model": False,
                },
                "input_cost": 0,
                "llama2_format": "string",
                "max_tokens": 0,
                "mistral_format": "string",
                "output_cost": 0,
                "temperature": 0,
                "top_k": 0,
                "top_p": 0,
                "upstream_path": "string",
                "upstream_url": "string",
            },
            "provider": "string",
        },
        "model_name_header": False,
        "response_streaming": "string",
        "route_type": "string",
    },
    control_plane_id="string",
    consumer={
        "id": "string",
    },
    consumer_group={
        "id": "string",
    },
    enabled=False,
    gateway_plugin_ai_proxy_id="string",
    instance_name="string",
    ordering={
        "after": {
            "accesses": ["string"],
        },
        "before": {
            "accesses": ["string"],
        },
    },
    protocols=["string"],
    route={
        "id": "string",
    },
    service={
        "id": "string",
    },
    tags=["string"])
const gatewayPluginAiProxyResource = new konnect.GatewayPluginAiProxy("gatewayPluginAiProxyResource", {
    config: {
        auth: {
            allowOverride: false,
            awsAccessKeyId: "string",
            awsSecretAccessKey: "string",
            azureClientId: "string",
            azureClientSecret: "string",
            azureTenantId: "string",
            azureUseManagedIdentity: false,
            gcpServiceAccountJson: "string",
            gcpUseServiceAccount: false,
            headerName: "string",
            headerValue: "string",
            paramLocation: "string",
            paramName: "string",
            paramValue: "string",
        },
        logging: {
            logPayloads: false,
            logStatistics: false,
        },
        maxRequestBodySize: 0,
        model: {
            name: "string",
            options: {
                anthropicVersion: "string",
                azureApiVersion: "string",
                azureDeploymentId: "string",
                azureInstance: "string",
                bedrock: {
                    awsRegion: "string",
                },
                gemini: {
                    apiEndpoint: "string",
                    locationId: "string",
                    projectId: "string",
                },
                huggingface: {
                    useCache: false,
                    waitForModel: false,
                },
                inputCost: 0,
                llama2Format: "string",
                maxTokens: 0,
                mistralFormat: "string",
                outputCost: 0,
                temperature: 0,
                topK: 0,
                topP: 0,
                upstreamPath: "string",
                upstreamUrl: "string",
            },
            provider: "string",
        },
        modelNameHeader: false,
        responseStreaming: "string",
        routeType: "string",
    },
    controlPlaneId: "string",
    consumer: {
        id: "string",
    },
    consumerGroup: {
        id: "string",
    },
    enabled: false,
    gatewayPluginAiProxyId: "string",
    instanceName: "string",
    ordering: {
        after: {
            accesses: ["string"],
        },
        before: {
            accesses: ["string"],
        },
    },
    protocols: ["string"],
    route: {
        id: "string",
    },
    service: {
        id: "string",
    },
    tags: ["string"],
});
type: konnect:GatewayPluginAiProxy
properties:
    config:
        auth:
            allowOverride: false
            awsAccessKeyId: string
            awsSecretAccessKey: string
            azureClientId: string
            azureClientSecret: string
            azureTenantId: string
            azureUseManagedIdentity: false
            gcpServiceAccountJson: string
            gcpUseServiceAccount: false
            headerName: string
            headerValue: string
            paramLocation: string
            paramName: string
            paramValue: string
        logging:
            logPayloads: false
            logStatistics: false
        maxRequestBodySize: 0
        model:
            name: string
            options:
                anthropicVersion: string
                azureApiVersion: string
                azureDeploymentId: string
                azureInstance: string
                bedrock:
                    awsRegion: string
                gemini:
                    apiEndpoint: string
                    locationId: string
                    projectId: string
                huggingface:
                    useCache: false
                    waitForModel: false
                inputCost: 0
                llama2Format: string
                maxTokens: 0
                mistralFormat: string
                outputCost: 0
                temperature: 0
                topK: 0
                topP: 0
                upstreamPath: string
                upstreamUrl: string
            provider: string
        modelNameHeader: false
        responseStreaming: string
        routeType: string
    consumer:
        id: string
    consumerGroup:
        id: string
    controlPlaneId: string
    enabled: false
    gatewayPluginAiProxyId: string
    instanceName: string
    ordering:
        after:
            accesses:
                - string
        before:
            accesses:
                - string
    protocols:
        - string
    route:
        id: string
    service:
        id: string
    tags:
        - string
GatewayPluginAiProxy Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The GatewayPluginAiProxy resource accepts the following input properties:
- Config
GatewayPlugin Ai Proxy Config 
- ControlPlane stringId 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- Consumer
GatewayPlugin Ai Proxy Consumer 
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- ConsumerGroup GatewayPlugin Ai Proxy Consumer Group 
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- Enabled bool
- Whether the plugin is applied.
- GatewayPlugin stringAi Proxy Id 
- The ID of this resource.
- InstanceName string
- Ordering
GatewayPlugin Ai Proxy Ordering 
- Protocols List<string>
- A set of strings representing HTTP protocols.
- Route
GatewayPlugin Ai Proxy Route 
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- Service
GatewayPlugin Ai Proxy Service 
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- List<string>
- An optional set of strings associated with the Plugin for grouping and filtering.
- Config
GatewayPlugin Ai Proxy Config Args 
- ControlPlane stringId 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- Consumer
GatewayPlugin Ai Proxy Consumer Args 
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- ConsumerGroup GatewayPlugin Ai Proxy Consumer Group Args 
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- Enabled bool
- Whether the plugin is applied.
- GatewayPlugin stringAi Proxy Id 
- The ID of this resource.
- InstanceName string
- Ordering
GatewayPlugin Ai Proxy Ordering Args 
- Protocols []string
- A set of strings representing HTTP protocols.
- Route
GatewayPlugin Ai Proxy Route Args 
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- Service
GatewayPlugin Ai Proxy Service Args 
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- []string
- An optional set of strings associated with the Plugin for grouping and filtering.
- config
GatewayPlugin Ai Proxy Config 
- controlPlane StringId 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- consumer
GatewayPlugin Ai Proxy Consumer 
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- consumerGroup GatewayPlugin Ai Proxy Consumer Group 
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- enabled Boolean
- Whether the plugin is applied.
- gatewayPlugin StringAi Proxy Id 
- The ID of this resource.
- instanceName String
- ordering
GatewayPlugin Ai Proxy Ordering 
- protocols List<String>
- A set of strings representing HTTP protocols.
- route
GatewayPlugin Ai Proxy Route 
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- service
GatewayPlugin Ai Proxy Service 
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- List<String>
- An optional set of strings associated with the Plugin for grouping and filtering.
- config
GatewayPlugin Ai Proxy Config 
- controlPlane stringId 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- consumer
GatewayPlugin Ai Proxy Consumer 
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- consumerGroup GatewayPlugin Ai Proxy Consumer Group 
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- enabled boolean
- Whether the plugin is applied.
- gatewayPlugin stringAi Proxy Id 
- The ID of this resource.
- instanceName string
- ordering
GatewayPlugin Ai Proxy Ordering 
- protocols string[]
- A set of strings representing HTTP protocols.
- route
GatewayPlugin Ai Proxy Route 
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- service
GatewayPlugin Ai Proxy Service 
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- string[]
- An optional set of strings associated with the Plugin for grouping and filtering.
- config
GatewayPlugin Ai Proxy Config Args 
- control_plane_ strid 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- consumer
GatewayPlugin Ai Proxy Consumer Args 
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- consumer_group GatewayPlugin Ai Proxy Consumer Group Args 
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- enabled bool
- Whether the plugin is applied.
- gateway_plugin_ strai_ proxy_ id 
- The ID of this resource.
- instance_name str
- ordering
GatewayPlugin Ai Proxy Ordering Args 
- protocols Sequence[str]
- A set of strings representing HTTP protocols.
- route
GatewayPlugin Ai Proxy Route Args 
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- service
GatewayPlugin Ai Proxy Service Args 
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- Sequence[str]
- An optional set of strings associated with the Plugin for grouping and filtering.
- config Property Map
- controlPlane StringId 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- consumer Property Map
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- consumerGroup Property Map
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- enabled Boolean
- Whether the plugin is applied.
- gatewayPlugin StringAi Proxy Id 
- The ID of this resource.
- instanceName String
- ordering Property Map
- protocols List<String>
- A set of strings representing HTTP protocols.
- route Property Map
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- service Property Map
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- List<String>
- An optional set of strings associated with the Plugin for grouping and filtering.
Outputs
All input properties are implicitly available as output properties. Additionally, the GatewayPluginAiProxy resource produces the following output properties:
- created_at float
- Unix epoch when the resource was created.
- id str
- The provider-assigned unique ID for this managed resource.
- updated_at float
- Unix epoch when the resource was last updated.
Look up Existing GatewayPluginAiProxy Resource
Get an existing GatewayPluginAiProxy resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: GatewayPluginAiProxyState, opts?: CustomResourceOptions): GatewayPluginAiProxy@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        config: Optional[GatewayPluginAiProxyConfigArgs] = None,
        consumer: Optional[GatewayPluginAiProxyConsumerArgs] = None,
        consumer_group: Optional[GatewayPluginAiProxyConsumerGroupArgs] = None,
        control_plane_id: Optional[str] = None,
        created_at: Optional[float] = None,
        enabled: Optional[bool] = None,
        gateway_plugin_ai_proxy_id: Optional[str] = None,
        instance_name: Optional[str] = None,
        ordering: Optional[GatewayPluginAiProxyOrderingArgs] = None,
        protocols: Optional[Sequence[str]] = None,
        route: Optional[GatewayPluginAiProxyRouteArgs] = None,
        service: Optional[GatewayPluginAiProxyServiceArgs] = None,
        tags: Optional[Sequence[str]] = None,
        updated_at: Optional[float] = None) -> GatewayPluginAiProxyfunc GetGatewayPluginAiProxy(ctx *Context, name string, id IDInput, state *GatewayPluginAiProxyState, opts ...ResourceOption) (*GatewayPluginAiProxy, error)public static GatewayPluginAiProxy Get(string name, Input<string> id, GatewayPluginAiProxyState? state, CustomResourceOptions? opts = null)public static GatewayPluginAiProxy get(String name, Output<String> id, GatewayPluginAiProxyState state, CustomResourceOptions options)resources:  _:    type: konnect:GatewayPluginAiProxy    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Config
GatewayPlugin Ai Proxy Config 
- Consumer
GatewayPlugin Ai Proxy Consumer 
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- ConsumerGroup GatewayPlugin Ai Proxy Consumer Group 
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- ControlPlane stringId 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- CreatedAt double
- Unix epoch when the resource was created.
- Enabled bool
- Whether the plugin is applied.
- GatewayPlugin stringAi Proxy Id 
- The ID of this resource.
- InstanceName string
- Ordering
GatewayPlugin Ai Proxy Ordering 
- Protocols List<string>
- A set of strings representing HTTP protocols.
- Route
GatewayPlugin Ai Proxy Route 
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- Service
GatewayPlugin Ai Proxy Service 
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- List<string>
- An optional set of strings associated with the Plugin for grouping and filtering.
- UpdatedAt double
- Unix epoch when the resource was last updated.
- Config
GatewayPlugin Ai Proxy Config Args 
- Consumer
GatewayPlugin Ai Proxy Consumer Args 
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- ConsumerGroup GatewayPlugin Ai Proxy Consumer Group Args 
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- ControlPlane stringId 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- CreatedAt float64
- Unix epoch when the resource was created.
- Enabled bool
- Whether the plugin is applied.
- GatewayPlugin stringAi Proxy Id 
- The ID of this resource.
- InstanceName string
- Ordering
GatewayPlugin Ai Proxy Ordering Args 
- Protocols []string
- A set of strings representing HTTP protocols.
- Route
GatewayPlugin Ai Proxy Route Args 
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- Service
GatewayPlugin Ai Proxy Service Args 
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- []string
- An optional set of strings associated with the Plugin for grouping and filtering.
- UpdatedAt float64
- Unix epoch when the resource was last updated.
- config
GatewayPlugin Ai Proxy Config 
- consumer
GatewayPlugin Ai Proxy Consumer 
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- consumerGroup GatewayPlugin Ai Proxy Consumer Group 
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- controlPlane StringId 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- createdAt Double
- Unix epoch when the resource was created.
- enabled Boolean
- Whether the plugin is applied.
- gatewayPlugin StringAi Proxy Id 
- The ID of this resource.
- instanceName String
- ordering
GatewayPlugin Ai Proxy Ordering 
- protocols List<String>
- A set of strings representing HTTP protocols.
- route
GatewayPlugin Ai Proxy Route 
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- service
GatewayPlugin Ai Proxy Service 
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- List<String>
- An optional set of strings associated with the Plugin for grouping and filtering.
- updatedAt Double
- Unix epoch when the resource was last updated.
- config
GatewayPlugin Ai Proxy Config 
- consumer
GatewayPlugin Ai Proxy Consumer 
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- consumerGroup GatewayPlugin Ai Proxy Consumer Group 
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- controlPlane stringId 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- createdAt number
- Unix epoch when the resource was created.
- enabled boolean
- Whether the plugin is applied.
- gatewayPlugin stringAi Proxy Id 
- The ID of this resource.
- instanceName string
- ordering
GatewayPlugin Ai Proxy Ordering 
- protocols string[]
- A set of strings representing HTTP protocols.
- route
GatewayPlugin Ai Proxy Route 
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- service
GatewayPlugin Ai Proxy Service 
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- string[]
- An optional set of strings associated with the Plugin for grouping and filtering.
- updatedAt number
- Unix epoch when the resource was last updated.
- config
GatewayPlugin Ai Proxy Config Args 
- consumer
GatewayPlugin Ai Proxy Consumer Args 
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- consumer_group GatewayPlugin Ai Proxy Consumer Group Args 
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- control_plane_ strid 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- created_at float
- Unix epoch when the resource was created.
- enabled bool
- Whether the plugin is applied.
- gateway_plugin_ strai_ proxy_ id 
- The ID of this resource.
- instance_name str
- ordering
GatewayPlugin Ai Proxy Ordering Args 
- protocols Sequence[str]
- A set of strings representing HTTP protocols.
- route
GatewayPlugin Ai Proxy Route Args 
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- service
GatewayPlugin Ai Proxy Service Args 
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- Sequence[str]
- An optional set of strings associated with the Plugin for grouping and filtering.
- updated_at float
- Unix epoch when the resource was last updated.
- config Property Map
- consumer Property Map
- If set, the plugin will activate only for requests where the specified has been authenticated. (Note that some plugins can not be restricted to consumers this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer.
- consumerGroup Property Map
- If set, the plugin will activate only for requests where the specified consumer group has been authenticated. (Note that some plugins can not be restricted to consumers groups this way.). Leave unset for the plugin to activate regardless of the authenticated Consumer Groups
- controlPlane StringId 
- The UUID of your control plane. This variable is available in the Konnect manager. Requires replacement if changed.
- createdAt Number
- Unix epoch when the resource was created.
- enabled Boolean
- Whether the plugin is applied.
- gatewayPlugin StringAi Proxy Id 
- The ID of this resource.
- instanceName String
- ordering Property Map
- protocols List<String>
- A set of strings representing HTTP protocols.
- route Property Map
- If set, the plugin will only activate when receiving requests via the specified route. Leave unset for the plugin to activate regardless of the route being used.
- service Property Map
- If set, the plugin will only activate when receiving requests via one of the routes belonging to the specified Service. Leave unset for the plugin to activate regardless of the Service being matched.
- List<String>
- An optional set of strings associated with the Plugin for grouping and filtering.
- updatedAt Number
- Unix epoch when the resource was last updated.
Supporting Types
GatewayPluginAiProxyConfig, GatewayPluginAiProxyConfigArgs          
- Auth
GatewayPlugin Ai Proxy Config Auth 
- Logging
GatewayPlugin Ai Proxy Config Logging 
- MaxRequest doubleBody Size 
- max allowed body size allowed to be introspected
- Model
GatewayPlugin Ai Proxy Config Model 
- ModelName boolHeader 
- Display the model name selected in the X-Kong-LLM-Model response header
- ResponseStreaming string
- Whether to 'optionally allow', 'deny', or 'always' (force) the streaming of answers via server sent events. must be one of ["allow", "always", "deny"]
- RouteType string
- The model's operation implementation, for this provider. Set to preserveto pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]
- Auth
GatewayPlugin Ai Proxy Config Auth 
- Logging
GatewayPlugin Ai Proxy Config Logging 
- MaxRequest float64Body Size 
- max allowed body size allowed to be introspected
- Model
GatewayPlugin Ai Proxy Config Model 
- ModelName boolHeader 
- Display the model name selected in the X-Kong-LLM-Model response header
- ResponseStreaming string
- Whether to 'optionally allow', 'deny', or 'always' (force) the streaming of answers via server sent events. must be one of ["allow", "always", "deny"]
- RouteType string
- The model's operation implementation, for this provider. Set to preserveto pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]
- auth
GatewayPlugin Ai Proxy Config Auth 
- logging
GatewayPlugin Ai Proxy Config Logging 
- maxRequest DoubleBody Size 
- max allowed body size allowed to be introspected
- model
GatewayPlugin Ai Proxy Config Model 
- modelName BooleanHeader 
- Display the model name selected in the X-Kong-LLM-Model response header
- responseStreaming String
- Whether to 'optionally allow', 'deny', or 'always' (force) the streaming of answers via server sent events. must be one of ["allow", "always", "deny"]
- routeType String
- The model's operation implementation, for this provider. Set to preserveto pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]
- auth
GatewayPlugin Ai Proxy Config Auth 
- logging
GatewayPlugin Ai Proxy Config Logging 
- maxRequest numberBody Size 
- max allowed body size allowed to be introspected
- model
GatewayPlugin Ai Proxy Config Model 
- modelName booleanHeader 
- Display the model name selected in the X-Kong-LLM-Model response header
- responseStreaming string
- Whether to 'optionally allow', 'deny', or 'always' (force) the streaming of answers via server sent events. must be one of ["allow", "always", "deny"]
- routeType string
- The model's operation implementation, for this provider. Set to preserveto pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]
- auth
GatewayPlugin Ai Proxy Config Auth 
- logging
GatewayPlugin Ai Proxy Config Logging 
- max_request_ floatbody_ size 
- max allowed body size allowed to be introspected
- model
GatewayPlugin Ai Proxy Config Model 
- model_name_ boolheader 
- Display the model name selected in the X-Kong-LLM-Model response header
- response_streaming str
- Whether to 'optionally allow', 'deny', or 'always' (force) the streaming of answers via server sent events. must be one of ["allow", "always", "deny"]
- route_type str
- The model's operation implementation, for this provider. Set to preserveto pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]
- auth Property Map
- logging Property Map
- maxRequest NumberBody Size 
- max allowed body size allowed to be introspected
- model Property Map
- modelName BooleanHeader 
- Display the model name selected in the X-Kong-LLM-Model response header
- responseStreaming String
- Whether to 'optionally allow', 'deny', or 'always' (force) the streaming of answers via server sent events. must be one of ["allow", "always", "deny"]
- routeType String
- The model's operation implementation, for this provider. Set to preserveto pass through without transformation. must be one of ["llm/v1/chat", "llm/v1/completions", "preserve"]
GatewayPluginAiProxyConfigAuth, GatewayPluginAiProxyConfigAuthArgs            
- AllowOverride bool
- If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
- AwsAccess stringKey Id 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
- AwsSecret stringAccess Key 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
- AzureClient stringId 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
- AzureClient stringSecret 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
- AzureTenant stringId 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
- AzureUse boolManaged Identity 
- Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
- GcpService stringAccount Json 
- Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
- GcpUse boolService Account 
- Use service account auth for GCP-based providers and models.
- HeaderName string
- If AI model requires authentication via Authorization or API key header, specify its name here.
- HeaderValue string
- Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
- ParamLocation string
- Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
- ParamName string
- If AI model requires authentication via query parameter, specify its name here.
- ParamValue string
- Specify the full parameter value for 'param_name'.
- AllowOverride bool
- If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
- AwsAccess stringKey Id 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
- AwsSecret stringAccess Key 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
- AzureClient stringId 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
- AzureClient stringSecret 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
- AzureTenant stringId 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
- AzureUse boolManaged Identity 
- Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
- GcpService stringAccount Json 
- Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
- GcpUse boolService Account 
- Use service account auth for GCP-based providers and models.
- HeaderName string
- If AI model requires authentication via Authorization or API key header, specify its name here.
- HeaderValue string
- Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
- ParamLocation string
- Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
- ParamName string
- If AI model requires authentication via query parameter, specify its name here.
- ParamValue string
- Specify the full parameter value for 'param_name'.
- allowOverride Boolean
- If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
- awsAccess StringKey Id 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
- awsSecret StringAccess Key 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
- azureClient StringId 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
- azureClient StringSecret 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
- azureTenant StringId 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
- azureUse BooleanManaged Identity 
- Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
- gcpService StringAccount Json 
- Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
- gcpUse BooleanService Account 
- Use service account auth for GCP-based providers and models.
- headerName String
- If AI model requires authentication via Authorization or API key header, specify its name here.
- headerValue String
- Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
- paramLocation String
- Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
- paramName String
- If AI model requires authentication via query parameter, specify its name here.
- paramValue String
- Specify the full parameter value for 'param_name'.
- allowOverride boolean
- If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
- awsAccess stringKey Id 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
- awsSecret stringAccess Key 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
- azureClient stringId 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
- azureClient stringSecret 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
- azureTenant stringId 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
- azureUse booleanManaged Identity 
- Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
- gcpService stringAccount Json 
- Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
- gcpUse booleanService Account 
- Use service account auth for GCP-based providers and models.
- headerName string
- If AI model requires authentication via Authorization or API key header, specify its name here.
- headerValue string
- Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
- paramLocation string
- Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
- paramName string
- If AI model requires authentication via query parameter, specify its name here.
- paramValue string
- Specify the full parameter value for 'param_name'.
- allow_override bool
- If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
- aws_access_ strkey_ id 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
- aws_secret_ straccess_ key 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
- azure_client_ strid 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
- azure_client_ strsecret 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
- azure_tenant_ strid 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
- azure_use_ boolmanaged_ identity 
- Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
- gcp_service_ straccount_ json 
- Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
- gcp_use_ boolservice_ account 
- Use service account auth for GCP-based providers and models.
- header_name str
- If AI model requires authentication via Authorization or API key header, specify its name here.
- header_value str
- Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
- param_location str
- Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
- param_name str
- If AI model requires authentication via query parameter, specify its name here.
- param_value str
- Specify the full parameter value for 'param_name'.
- allowOverride Boolean
- If enabled, the authorization header or parameter can be overridden in the request by the value configured in the plugin.
- awsAccess StringKey Id 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSACCESSKEY_ID environment variable for this plugin instance.
- awsSecret StringAccess Key 
- Set this if you are using an AWS provider (Bedrock) and you are authenticating using static IAM User credentials. Setting this will override the AWSSECRETACCESS_KEY environment variable for this plugin instance.
- azureClient StringId 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client ID.
- azureClient StringSecret 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the client secret.
- azureTenant StringId 
- If azureusemanaged_identity is set to true, and you need to use a different user-assigned identity for this LLM instance, set the tenant ID.
- azureUse BooleanManaged Identity 
- Set true to use the Azure Cloud Managed Identity (or user-assigned identity) to authenticate with Azure-provider models.
- gcpService StringAccount Json 
- Set this field to the full JSON of the GCP service account to authenticate, if required. If null (and gcpuseservice_account is true), Kong will attempt to read from environment variable GCP_SERVICE_ACCOUNT.
- gcpUse BooleanService Account 
- Use service account auth for GCP-based providers and models.
- headerName String
- If AI model requires authentication via Authorization or API key header, specify its name here.
- headerValue String
- Specify the full auth header value for 'header_name', for example 'Bearer key' or just 'key'.
- paramLocation String
- Specify whether the 'paramname' and 'paramvalue' options go in a query string, or the POST form/JSON body. must be one of ["body", "query"]
- paramName String
- If AI model requires authentication via query parameter, specify its name here.
- paramValue String
- Specify the full parameter value for 'param_name'.
GatewayPluginAiProxyConfigLogging, GatewayPluginAiProxyConfigLoggingArgs            
- LogPayloads bool
- If enabled, will log the request and response body into the Kong log plugin(s) output.
- LogStatistics bool
- If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.
- LogPayloads bool
- If enabled, will log the request and response body into the Kong log plugin(s) output.
- LogStatistics bool
- If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.
- logPayloads Boolean
- If enabled, will log the request and response body into the Kong log plugin(s) output.
- logStatistics Boolean
- If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.
- logPayloads boolean
- If enabled, will log the request and response body into the Kong log plugin(s) output.
- logStatistics boolean
- If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.
- log_payloads bool
- If enabled, will log the request and response body into the Kong log plugin(s) output.
- log_statistics bool
- If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.
- logPayloads Boolean
- If enabled, will log the request and response body into the Kong log plugin(s) output.
- logStatistics Boolean
- If enabled and supported by the driver, will add model usage and token metrics into the Kong log plugin(s) output.
GatewayPluginAiProxyConfigModel, GatewayPluginAiProxyConfigModelArgs            
- Name string
- Model name to execute.
- Options
GatewayPlugin Ai Proxy Config Model Options 
- Key/value settings for the model
- Provider string
- AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]
- Name string
- Model name to execute.
- Options
GatewayPlugin Ai Proxy Config Model Options 
- Key/value settings for the model
- Provider string
- AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]
- name String
- Model name to execute.
- options
GatewayPlugin Ai Proxy Config Model Options 
- Key/value settings for the model
- provider String
- AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]
- name string
- Model name to execute.
- options
GatewayPlugin Ai Proxy Config Model Options 
- Key/value settings for the model
- provider string
- AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]
- name str
- Model name to execute.
- options
GatewayPlugin Ai Proxy Config Model Options 
- Key/value settings for the model
- provider str
- AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]
- name String
- Model name to execute.
- options Property Map
- Key/value settings for the model
- provider String
- AI provider request format - Kong translates requests to and from the specified backend compatible formats. must be one of ["anthropic", "azure", "bedrock", "cohere", "gemini", "huggingface", "llama2", "mistral", "openai"]
GatewayPluginAiProxyConfigModelOptions, GatewayPluginAiProxyConfigModelOptionsArgs              
- AnthropicVersion string
- Defines the schema/API version, if using Anthropic provider.
- AzureApi stringVersion 
- 'api-version' for Azure OpenAI instances.
- AzureDeployment stringId 
- Deployment ID for Azure OpenAI instances.
- AzureInstance string
- Instance name for Azure OpenAI hosted models.
- Bedrock
GatewayPlugin Ai Proxy Config Model Options Bedrock 
- Gemini
GatewayPlugin Ai Proxy Config Model Options Gemini 
- Huggingface
GatewayPlugin Ai Proxy Config Model Options Huggingface 
- InputCost double
- Defines the cost per 1M tokens in your prompt.
- Llama2Format string
- If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
- MaxTokens double
- Defines the max_tokens, if using chat or completion models.
- MistralFormat string
- If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
- OutputCost double
- Defines the cost per 1M tokens in the output of the AI.
- Temperature double
- Defines the matching temperature, if using chat or completion models.
- TopK double
- Defines the top-k most likely tokens, if supported.
- TopP double
- Defines the top-p probability mass, if supported.
- UpstreamPath string
- Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
- UpstreamUrl string
- Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.
- AnthropicVersion string
- Defines the schema/API version, if using Anthropic provider.
- AzureApi stringVersion 
- 'api-version' for Azure OpenAI instances.
- AzureDeployment stringId 
- Deployment ID for Azure OpenAI instances.
- AzureInstance string
- Instance name for Azure OpenAI hosted models.
- Bedrock
GatewayPlugin Ai Proxy Config Model Options Bedrock 
- Gemini
GatewayPlugin Ai Proxy Config Model Options Gemini 
- Huggingface
GatewayPlugin Ai Proxy Config Model Options Huggingface 
- InputCost float64
- Defines the cost per 1M tokens in your prompt.
- Llama2Format string
- If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
- MaxTokens float64
- Defines the max_tokens, if using chat or completion models.
- MistralFormat string
- If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
- OutputCost float64
- Defines the cost per 1M tokens in the output of the AI.
- Temperature float64
- Defines the matching temperature, if using chat or completion models.
- TopK float64
- Defines the top-k most likely tokens, if supported.
- TopP float64
- Defines the top-p probability mass, if supported.
- UpstreamPath string
- Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
- UpstreamUrl string
- Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.
- anthropicVersion String
- Defines the schema/API version, if using Anthropic provider.
- azureApi StringVersion 
- 'api-version' for Azure OpenAI instances.
- azureDeployment StringId 
- Deployment ID for Azure OpenAI instances.
- azureInstance String
- Instance name for Azure OpenAI hosted models.
- bedrock
GatewayPlugin Ai Proxy Config Model Options Bedrock 
- gemini
GatewayPlugin Ai Proxy Config Model Options Gemini 
- huggingface
GatewayPlugin Ai Proxy Config Model Options Huggingface 
- inputCost Double
- Defines the cost per 1M tokens in your prompt.
- llama2Format String
- If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
- maxTokens Double
- Defines the max_tokens, if using chat or completion models.
- mistralFormat String
- If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
- outputCost Double
- Defines the cost per 1M tokens in the output of the AI.
- temperature Double
- Defines the matching temperature, if using chat or completion models.
- topK Double
- Defines the top-k most likely tokens, if supported.
- topP Double
- Defines the top-p probability mass, if supported.
- upstreamPath String
- Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
- upstreamUrl String
- Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.
- anthropicVersion string
- Defines the schema/API version, if using Anthropic provider.
- azureApi stringVersion 
- 'api-version' for Azure OpenAI instances.
- azureDeployment stringId 
- Deployment ID for Azure OpenAI instances.
- azureInstance string
- Instance name for Azure OpenAI hosted models.
- bedrock
GatewayPlugin Ai Proxy Config Model Options Bedrock 
- gemini
GatewayPlugin Ai Proxy Config Model Options Gemini 
- huggingface
GatewayPlugin Ai Proxy Config Model Options Huggingface 
- inputCost number
- Defines the cost per 1M tokens in your prompt.
- llama2Format string
- If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
- maxTokens number
- Defines the max_tokens, if using chat or completion models.
- mistralFormat string
- If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
- outputCost number
- Defines the cost per 1M tokens in the output of the AI.
- temperature number
- Defines the matching temperature, if using chat or completion models.
- topK number
- Defines the top-k most likely tokens, if supported.
- topP number
- Defines the top-p probability mass, if supported.
- upstreamPath string
- Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
- upstreamUrl string
- Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.
- anthropic_version str
- Defines the schema/API version, if using Anthropic provider.
- azure_api_ strversion 
- 'api-version' for Azure OpenAI instances.
- azure_deployment_ strid 
- Deployment ID for Azure OpenAI instances.
- azure_instance str
- Instance name for Azure OpenAI hosted models.
- bedrock
GatewayPlugin Ai Proxy Config Model Options Bedrock 
- gemini
GatewayPlugin Ai Proxy Config Model Options Gemini 
- huggingface
GatewayPlugin Ai Proxy Config Model Options Huggingface 
- input_cost float
- Defines the cost per 1M tokens in your prompt.
- llama2_format str
- If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
- max_tokens float
- Defines the max_tokens, if using chat or completion models.
- mistral_format str
- If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
- output_cost float
- Defines the cost per 1M tokens in the output of the AI.
- temperature float
- Defines the matching temperature, if using chat or completion models.
- top_k float
- Defines the top-k most likely tokens, if supported.
- top_p float
- Defines the top-p probability mass, if supported.
- upstream_path str
- Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
- upstream_url str
- Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.
- anthropicVersion String
- Defines the schema/API version, if using Anthropic provider.
- azureApi StringVersion 
- 'api-version' for Azure OpenAI instances.
- azureDeployment StringId 
- Deployment ID for Azure OpenAI instances.
- azureInstance String
- Instance name for Azure OpenAI hosted models.
- bedrock Property Map
- gemini Property Map
- huggingface Property Map
- inputCost Number
- Defines the cost per 1M tokens in your prompt.
- llama2Format String
- If using llama2 provider, select the upstream message format. must be one of ["ollama", "openai", "raw"]
- maxTokens Number
- Defines the max_tokens, if using chat or completion models.
- mistralFormat String
- If using mistral provider, select the upstream message format. must be one of ["ollama", "openai"]
- outputCost Number
- Defines the cost per 1M tokens in the output of the AI.
- temperature Number
- Defines the matching temperature, if using chat or completion models.
- topK Number
- Defines the top-k most likely tokens, if supported.
- topP Number
- Defines the top-p probability mass, if supported.
- upstreamPath String
- Manually specify or override the AI operation path, used when e.g. using the 'preserve' route_type.
- upstreamUrl String
- Manually specify or override the full URL to the AI operation endpoints, when calling (self-)hosted models, or for running via a private endpoint.
GatewayPluginAiProxyConfigModelOptionsBedrock, GatewayPluginAiProxyConfigModelOptionsBedrockArgs                
- AwsRegion string
- If using AWS providers (Bedrock) you can override the AWS_REGIONenvironment variable by setting this option.
- AwsRegion string
- If using AWS providers (Bedrock) you can override the AWS_REGIONenvironment variable by setting this option.
- awsRegion String
- If using AWS providers (Bedrock) you can override the AWS_REGIONenvironment variable by setting this option.
- awsRegion string
- If using AWS providers (Bedrock) you can override the AWS_REGIONenvironment variable by setting this option.
- aws_region str
- If using AWS providers (Bedrock) you can override the AWS_REGIONenvironment variable by setting this option.
- awsRegion String
- If using AWS providers (Bedrock) you can override the AWS_REGIONenvironment variable by setting this option.
GatewayPluginAiProxyConfigModelOptionsGemini, GatewayPluginAiProxyConfigModelOptionsGeminiArgs                
- ApiEndpoint string
- If running Gemini on Vertex, specify the regional API endpoint (hostname only).
- LocationId string
- If running Gemini on Vertex, specify the location ID.
- ProjectId string
- If running Gemini on Vertex, specify the project ID.
- ApiEndpoint string
- If running Gemini on Vertex, specify the regional API endpoint (hostname only).
- LocationId string
- If running Gemini on Vertex, specify the location ID.
- ProjectId string
- If running Gemini on Vertex, specify the project ID.
- apiEndpoint String
- If running Gemini on Vertex, specify the regional API endpoint (hostname only).
- locationId String
- If running Gemini on Vertex, specify the location ID.
- projectId String
- If running Gemini on Vertex, specify the project ID.
- apiEndpoint string
- If running Gemini on Vertex, specify the regional API endpoint (hostname only).
- locationId string
- If running Gemini on Vertex, specify the location ID.
- projectId string
- If running Gemini on Vertex, specify the project ID.
- api_endpoint str
- If running Gemini on Vertex, specify the regional API endpoint (hostname only).
- location_id str
- If running Gemini on Vertex, specify the location ID.
- project_id str
- If running Gemini on Vertex, specify the project ID.
- apiEndpoint String
- If running Gemini on Vertex, specify the regional API endpoint (hostname only).
- locationId String
- If running Gemini on Vertex, specify the location ID.
- projectId String
- If running Gemini on Vertex, specify the project ID.
GatewayPluginAiProxyConfigModelOptionsHuggingface, GatewayPluginAiProxyConfigModelOptionsHuggingfaceArgs                
- UseCache bool
- Use the cache layer on the inference API
- WaitFor boolModel 
- Wait for the model if it is not ready
- UseCache bool
- Use the cache layer on the inference API
- WaitFor boolModel 
- Wait for the model if it is not ready
- useCache Boolean
- Use the cache layer on the inference API
- waitFor BooleanModel 
- Wait for the model if it is not ready
- useCache boolean
- Use the cache layer on the inference API
- waitFor booleanModel 
- Wait for the model if it is not ready
- use_cache bool
- Use the cache layer on the inference API
- wait_for_ boolmodel 
- Wait for the model if it is not ready
- useCache Boolean
- Use the cache layer on the inference API
- waitFor BooleanModel 
- Wait for the model if it is not ready
GatewayPluginAiProxyConsumer, GatewayPluginAiProxyConsumerArgs          
- Id string
- Id string
- id String
- id string
- id str
- id String
GatewayPluginAiProxyConsumerGroup, GatewayPluginAiProxyConsumerGroupArgs            
- Id string
- Id string
- id String
- id string
- id str
- id String
GatewayPluginAiProxyOrdering, GatewayPluginAiProxyOrderingArgs          
GatewayPluginAiProxyOrderingAfter, GatewayPluginAiProxyOrderingAfterArgs            
- Accesses List<string>
- Accesses []string
- accesses List<String>
- accesses string[]
- accesses Sequence[str]
- accesses List<String>
GatewayPluginAiProxyOrderingBefore, GatewayPluginAiProxyOrderingBeforeArgs            
- Accesses List<string>
- Accesses []string
- accesses List<String>
- accesses string[]
- accesses Sequence[str]
- accesses List<String>
GatewayPluginAiProxyRoute, GatewayPluginAiProxyRouteArgs          
- Id string
- Id string
- id String
- id string
- id str
- id String
GatewayPluginAiProxyService, GatewayPluginAiProxyServiceArgs          
- Id string
- Id string
- id String
- id string
- id str
- id String
Import
$ pulumi import konnect:index/gatewayPluginAiProxy:GatewayPluginAiProxy my_konnect_gateway_plugin_ai_proxy "{ \"control_plane_id\": \"9524ec7d-36d9-465d-a8c5-83a3c9390458\", \"plugin_id\": \"3473c251-5b6c-4f45-b1ff-7ede735a366d\"}"
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- konnect kong/terraform-provider-konnect
- License
- Notes
- This Pulumi package is based on the konnectTerraform Provider.