1. Packages
  2. AWS Classic
  3. API Docs
  4. bedrockfoundation
  5. getModels

Try AWS Native preview for resources not in the classic version.

AWS Classic v6.42.0 published on Wednesday, Jun 26, 2024 by Pulumi

aws.bedrockfoundation.getModels

Explore with Pulumi AI

aws logo

Try AWS Native preview for resources not in the classic version.

AWS Classic v6.42.0 published on Wednesday, Jun 26, 2024 by Pulumi

    Data source for managing AWS Bedrock Foundation Models.

    Example Usage

    Basic Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const test = aws.bedrockfoundation.getModels({});
    
    import pulumi
    import pulumi_aws as aws
    
    test = aws.bedrockfoundation.get_models()
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/bedrockfoundation"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := bedrockfoundation.GetModels(ctx, nil, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var test = Aws.BedrockFoundation.GetModels.Invoke();
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.bedrockfoundation.BedrockfoundationFunctions;
    import com.pulumi.aws.bedrockfoundation.inputs.GetModelsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var test = BedrockfoundationFunctions.getModels();
    
        }
    }
    
    variables:
      test:
        fn::invoke:
          Function: aws:bedrockfoundation:getModels
          Arguments: {}
    

    Filter by Inference Type

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const test = aws.bedrockfoundation.getModels({
        byInferenceType: "ON_DEMAND",
    });
    
    import pulumi
    import pulumi_aws as aws
    
    test = aws.bedrockfoundation.get_models(by_inference_type="ON_DEMAND")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/bedrockfoundation"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := bedrockfoundation.GetModels(ctx, &bedrockfoundation.GetModelsArgs{
    			ByInferenceType: pulumi.StringRef("ON_DEMAND"),
    		}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var test = Aws.BedrockFoundation.GetModels.Invoke(new()
        {
            ByInferenceType = "ON_DEMAND",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.bedrockfoundation.BedrockfoundationFunctions;
    import com.pulumi.aws.bedrockfoundation.inputs.GetModelsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var test = BedrockfoundationFunctions.getModels(GetModelsArgs.builder()
                .byInferenceType("ON_DEMAND")
                .build());
    
        }
    }
    
    variables:
      test:
        fn::invoke:
          Function: aws:bedrockfoundation:getModels
          Arguments:
            byInferenceType: ON_DEMAND
    

    Using getModels

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getModels(args: GetModelsArgs, opts?: InvokeOptions): Promise<GetModelsResult>
    function getModelsOutput(args: GetModelsOutputArgs, opts?: InvokeOptions): Output<GetModelsResult>
    def get_models(by_customization_type: Optional[str] = None,
                   by_inference_type: Optional[str] = None,
                   by_output_modality: Optional[str] = None,
                   by_provider: Optional[str] = None,
                   opts: Optional[InvokeOptions] = None) -> GetModelsResult
    def get_models_output(by_customization_type: Optional[pulumi.Input[str]] = None,
                   by_inference_type: Optional[pulumi.Input[str]] = None,
                   by_output_modality: Optional[pulumi.Input[str]] = None,
                   by_provider: Optional[pulumi.Input[str]] = None,
                   opts: Optional[InvokeOptions] = None) -> Output[GetModelsResult]
    func GetModels(ctx *Context, args *GetModelsArgs, opts ...InvokeOption) (*GetModelsResult, error)
    func GetModelsOutput(ctx *Context, args *GetModelsOutputArgs, opts ...InvokeOption) GetModelsResultOutput

    > Note: This function is named GetModels in the Go SDK.

    public static class GetModels 
    {
        public static Task<GetModelsResult> InvokeAsync(GetModelsArgs args, InvokeOptions? opts = null)
        public static Output<GetModelsResult> Invoke(GetModelsInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetModelsResult> getModels(GetModelsArgs args, InvokeOptions options)
    // Output-based functions aren't available in Java yet
    
    fn::invoke:
      function: aws:bedrockfoundation/getModels:getModels
      arguments:
        # arguments dictionary

    The following arguments are supported:

    ByCustomizationType string
    Customization type to filter on. Valid values are FINE_TUNING.
    ByInferenceType string
    Inference type to filter on. Valid values are ON_DEMAND and PROVISIONED.
    ByOutputModality string
    Output modality to filter on. Valid values are TEXT, IMAGE, and EMBEDDING.
    ByProvider string
    Model provider to filter on.
    ByCustomizationType string
    Customization type to filter on. Valid values are FINE_TUNING.
    ByInferenceType string
    Inference type to filter on. Valid values are ON_DEMAND and PROVISIONED.
    ByOutputModality string
    Output modality to filter on. Valid values are TEXT, IMAGE, and EMBEDDING.
    ByProvider string
    Model provider to filter on.
    byCustomizationType String
    Customization type to filter on. Valid values are FINE_TUNING.
    byInferenceType String
    Inference type to filter on. Valid values are ON_DEMAND and PROVISIONED.
    byOutputModality String
    Output modality to filter on. Valid values are TEXT, IMAGE, and EMBEDDING.
    byProvider String
    Model provider to filter on.
    byCustomizationType string
    Customization type to filter on. Valid values are FINE_TUNING.
    byInferenceType string
    Inference type to filter on. Valid values are ON_DEMAND and PROVISIONED.
    byOutputModality string
    Output modality to filter on. Valid values are TEXT, IMAGE, and EMBEDDING.
    byProvider string
    Model provider to filter on.
    by_customization_type str
    Customization type to filter on. Valid values are FINE_TUNING.
    by_inference_type str
    Inference type to filter on. Valid values are ON_DEMAND and PROVISIONED.
    by_output_modality str
    Output modality to filter on. Valid values are TEXT, IMAGE, and EMBEDDING.
    by_provider str
    Model provider to filter on.
    byCustomizationType String
    Customization type to filter on. Valid values are FINE_TUNING.
    byInferenceType String
    Inference type to filter on. Valid values are ON_DEMAND and PROVISIONED.
    byOutputModality String
    Output modality to filter on. Valid values are TEXT, IMAGE, and EMBEDDING.
    byProvider String
    Model provider to filter on.

    getModels Result

    The following output properties are available:

    Id string
    AWS region.
    ModelSummaries List<GetModelsModelSummary>
    List of model summary objects. See model_summaries.
    ByCustomizationType string
    ByInferenceType string
    ByOutputModality string
    ByProvider string
    Id string
    AWS region.
    ModelSummaries []GetModelsModelSummary
    List of model summary objects. See model_summaries.
    ByCustomizationType string
    ByInferenceType string
    ByOutputModality string
    ByProvider string
    id String
    AWS region.
    modelSummaries List<GetModelsModelSummary>
    List of model summary objects. See model_summaries.
    byCustomizationType String
    byInferenceType String
    byOutputModality String
    byProvider String
    id string
    AWS region.
    modelSummaries GetModelsModelSummary[]
    List of model summary objects. See model_summaries.
    byCustomizationType string
    byInferenceType string
    byOutputModality string
    byProvider string
    id str
    AWS region.
    model_summaries Sequence[GetModelsModelSummary]
    List of model summary objects. See model_summaries.
    by_customization_type str
    by_inference_type str
    by_output_modality str
    by_provider str
    id String
    AWS region.
    modelSummaries List<Property Map>
    List of model summary objects. See model_summaries.
    byCustomizationType String
    byInferenceType String
    byOutputModality String
    byProvider String

    Supporting Types

    GetModelsModelSummary

    CustomizationsSupporteds List<object>
    Customizations that the model supports.
    InferenceTypesSupporteds List<object>
    Inference types that the model supports.
    InputModalities List<object>
    Input modalities that the model supports.
    ModelArn string
    Model ARN.
    ModelId string
    Model identifier.
    ModelName string
    Model name.
    OutputModalities List<object>
    Output modalities that the model supports.
    ProviderName string
    Model provider name.
    ResponseStreamingSupported bool
    Indicates whether the model supports streaming.
    CustomizationsSupporteds []interface{}
    Customizations that the model supports.
    InferenceTypesSupporteds []interface{}
    Inference types that the model supports.
    InputModalities []interface{}
    Input modalities that the model supports.
    ModelArn string
    Model ARN.
    ModelId string
    Model identifier.
    ModelName string
    Model name.
    OutputModalities []interface{}
    Output modalities that the model supports.
    ProviderName string
    Model provider name.
    ResponseStreamingSupported bool
    Indicates whether the model supports streaming.
    customizationsSupporteds List<Object>
    Customizations that the model supports.
    inferenceTypesSupporteds List<Object>
    Inference types that the model supports.
    inputModalities List<Object>
    Input modalities that the model supports.
    modelArn String
    Model ARN.
    modelId String
    Model identifier.
    modelName String
    Model name.
    outputModalities List<Object>
    Output modalities that the model supports.
    providerName String
    Model provider name.
    responseStreamingSupported Boolean
    Indicates whether the model supports streaming.
    customizationsSupporteds any[]
    Customizations that the model supports.
    inferenceTypesSupporteds any[]
    Inference types that the model supports.
    inputModalities any[]
    Input modalities that the model supports.
    modelArn string
    Model ARN.
    modelId string
    Model identifier.
    modelName string
    Model name.
    outputModalities any[]
    Output modalities that the model supports.
    providerName string
    Model provider name.
    responseStreamingSupported boolean
    Indicates whether the model supports streaming.
    customizations_supporteds Sequence[Any]
    Customizations that the model supports.
    inference_types_supporteds Sequence[Any]
    Inference types that the model supports.
    input_modalities Sequence[Any]
    Input modalities that the model supports.
    model_arn str
    Model ARN.
    model_id str
    Model identifier.
    model_name str
    Model name.
    output_modalities Sequence[Any]
    Output modalities that the model supports.
    provider_name str
    Model provider name.
    response_streaming_supported bool
    Indicates whether the model supports streaming.
    customizationsSupporteds List<Any>
    Customizations that the model supports.
    inferenceTypesSupporteds List<Any>
    Inference types that the model supports.
    inputModalities List<Any>
    Input modalities that the model supports.
    modelArn String
    Model ARN.
    modelId String
    Model identifier.
    modelName String
    Model name.
    outputModalities List<Any>
    Output modalities that the model supports.
    providerName String
    Model provider name.
    responseStreamingSupported Boolean
    Indicates whether the model supports streaming.

    Package Details

    Repository
    AWS Classic pulumi/pulumi-aws
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the aws Terraform Provider.
    aws logo

    Try AWS Native preview for resources not in the classic version.

    AWS Classic v6.42.0 published on Wednesday, Jun 26, 2024 by Pulumi