1. Packages
  2. AWS
  3. API Docs
  4. bedrock
  5. getInferenceProfile
AWS v6.77.1 published on Friday, Apr 18, 2025 by Pulumi

aws.bedrock.getInferenceProfile

Explore with Pulumi AI

Data source for managing an AWS Bedrock Inference Profile.

Example Usage

Basic Usage

import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";

const test = aws.bedrock.getInferenceProfiles({});
const testGetInferenceProfile = test.then(test => aws.bedrock.getInferenceProfile({
    inferenceProfileId: test.inferenceProfileSummaries?.[0]?.inferenceProfileId,
}));
Copy
import pulumi
import pulumi_aws as aws

test = aws.bedrock.get_inference_profiles()
test_get_inference_profile = aws.bedrock.get_inference_profile(inference_profile_id=test.inference_profile_summaries[0].inference_profile_id)
Copy
package main

import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/bedrock"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		test, err := bedrock.GetInferenceProfiles(ctx, map[string]interface{}{}, nil)
		if err != nil {
			return err
		}
		_, err = bedrock.LookupInferenceProfile(ctx, &bedrock.LookupInferenceProfileArgs{
			InferenceProfileId: test.InferenceProfileSummaries[0].InferenceProfileId,
		}, nil)
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;

return await Deployment.RunAsync(() => 
{
    var test = Aws.Bedrock.GetInferenceProfiles.Invoke();

    var testGetInferenceProfile = Aws.Bedrock.GetInferenceProfile.Invoke(new()
    {
        InferenceProfileId = test.Apply(getInferenceProfilesResult => getInferenceProfilesResult.InferenceProfileSummaries[0]?.InferenceProfileId),
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.bedrock.BedrockFunctions;
import com.pulumi.aws.bedrock.inputs.GetInferenceProfileArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var test = BedrockFunctions.getInferenceProfiles(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference);

        final var testGetInferenceProfile = BedrockFunctions.getInferenceProfile(GetInferenceProfileArgs.builder()
            .inferenceProfileId(test.inferenceProfileSummaries()[0].inferenceProfileId())
            .build());

    }
}
Copy
variables:
  test:
    fn::invoke:
      function: aws:bedrock:getInferenceProfiles
      arguments: {}
  testGetInferenceProfile:
    fn::invoke:
      function: aws:bedrock:getInferenceProfile
      arguments:
        inferenceProfileId: ${test.inferenceProfileSummaries[0].inferenceProfileId}
Copy

Using getInferenceProfile

Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

function getInferenceProfile(args: GetInferenceProfileArgs, opts?: InvokeOptions): Promise<GetInferenceProfileResult>
function getInferenceProfileOutput(args: GetInferenceProfileOutputArgs, opts?: InvokeOptions): Output<GetInferenceProfileResult>
Copy
def get_inference_profile(inference_profile_id: Optional[str] = None,
                          opts: Optional[InvokeOptions] = None) -> GetInferenceProfileResult
def get_inference_profile_output(inference_profile_id: Optional[pulumi.Input[str]] = None,
                          opts: Optional[InvokeOptions] = None) -> Output[GetInferenceProfileResult]
Copy
func LookupInferenceProfile(ctx *Context, args *LookupInferenceProfileArgs, opts ...InvokeOption) (*LookupInferenceProfileResult, error)
func LookupInferenceProfileOutput(ctx *Context, args *LookupInferenceProfileOutputArgs, opts ...InvokeOption) LookupInferenceProfileResultOutput
Copy

> Note: This function is named LookupInferenceProfile in the Go SDK.

public static class GetInferenceProfile 
{
    public static Task<GetInferenceProfileResult> InvokeAsync(GetInferenceProfileArgs args, InvokeOptions? opts = null)
    public static Output<GetInferenceProfileResult> Invoke(GetInferenceProfileInvokeArgs args, InvokeOptions? opts = null)
}
Copy
public static CompletableFuture<GetInferenceProfileResult> getInferenceProfile(GetInferenceProfileArgs args, InvokeOptions options)
public static Output<GetInferenceProfileResult> getInferenceProfile(GetInferenceProfileArgs args, InvokeOptions options)
Copy
fn::invoke:
  function: aws:bedrock/getInferenceProfile:getInferenceProfile
  arguments:
    # arguments dictionary
Copy

The following arguments are supported:

InferenceProfileId This property is required. string
Inference Profile identifier.
InferenceProfileId This property is required. string
Inference Profile identifier.
inferenceProfileId This property is required. String
Inference Profile identifier.
inferenceProfileId This property is required. string
Inference Profile identifier.
inference_profile_id This property is required. str
Inference Profile identifier.
inferenceProfileId This property is required. String
Inference Profile identifier.

getInferenceProfile Result

The following output properties are available:

CreatedAt string
The time at which the inference profile was created.
Description string
The description of the inference profile.
Id string
The provider-assigned unique ID for this managed resource.
InferenceProfileArn string
The Amazon Resource Name (ARN) of the inference profile.
InferenceProfileId string
InferenceProfileName string
The unique identifier of the inference profile.
Models List<GetInferenceProfileModel>
A list of information about each model in the inference profile. See models.
Status string
The status of the inference profile. ACTIVE means that the inference profile is available to use.
Type string
The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means that the inference profile is defined by the user.
UpdatedAt string
The time at which the inference profile was last updated.
CreatedAt string
The time at which the inference profile was created.
Description string
The description of the inference profile.
Id string
The provider-assigned unique ID for this managed resource.
InferenceProfileArn string
The Amazon Resource Name (ARN) of the inference profile.
InferenceProfileId string
InferenceProfileName string
The unique identifier of the inference profile.
Models []GetInferenceProfileModel
A list of information about each model in the inference profile. See models.
Status string
The status of the inference profile. ACTIVE means that the inference profile is available to use.
Type string
The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means that the inference profile is defined by the user.
UpdatedAt string
The time at which the inference profile was last updated.
createdAt String
The time at which the inference profile was created.
description String
The description of the inference profile.
id String
The provider-assigned unique ID for this managed resource.
inferenceProfileArn String
The Amazon Resource Name (ARN) of the inference profile.
inferenceProfileId String
inferenceProfileName String
The unique identifier of the inference profile.
models List<GetInferenceProfileModel>
A list of information about each model in the inference profile. See models.
status String
The status of the inference profile. ACTIVE means that the inference profile is available to use.
type String
The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means that the inference profile is defined by the user.
updatedAt String
The time at which the inference profile was last updated.
createdAt string
The time at which the inference profile was created.
description string
The description of the inference profile.
id string
The provider-assigned unique ID for this managed resource.
inferenceProfileArn string
The Amazon Resource Name (ARN) of the inference profile.
inferenceProfileId string
inferenceProfileName string
The unique identifier of the inference profile.
models GetInferenceProfileModel[]
A list of information about each model in the inference profile. See models.
status string
The status of the inference profile. ACTIVE means that the inference profile is available to use.
type string
The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means that the inference profile is defined by the user.
updatedAt string
The time at which the inference profile was last updated.
created_at str
The time at which the inference profile was created.
description str
The description of the inference profile.
id str
The provider-assigned unique ID for this managed resource.
inference_profile_arn str
The Amazon Resource Name (ARN) of the inference profile.
inference_profile_id str
inference_profile_name str
The unique identifier of the inference profile.
models Sequence[GetInferenceProfileModel]
A list of information about each model in the inference profile. See models.
status str
The status of the inference profile. ACTIVE means that the inference profile is available to use.
type str
The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means that the inference profile is defined by the user.
updated_at str
The time at which the inference profile was last updated.
createdAt String
The time at which the inference profile was created.
description String
The description of the inference profile.
id String
The provider-assigned unique ID for this managed resource.
inferenceProfileArn String
The Amazon Resource Name (ARN) of the inference profile.
inferenceProfileId String
inferenceProfileName String
The unique identifier of the inference profile.
models List<Property Map>
A list of information about each model in the inference profile. See models.
status String
The status of the inference profile. ACTIVE means that the inference profile is available to use.
type String
The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means that the inference profile is defined by the user.
updatedAt String
The time at which the inference profile was last updated.

Supporting Types

GetInferenceProfileModel

ModelArn This property is required. string
The Amazon Resource Name (ARN) of the model.
ModelArn This property is required. string
The Amazon Resource Name (ARN) of the model.
modelArn This property is required. String
The Amazon Resource Name (ARN) of the model.
modelArn This property is required. string
The Amazon Resource Name (ARN) of the model.
model_arn This property is required. str
The Amazon Resource Name (ARN) of the model.
modelArn This property is required. String
The Amazon Resource Name (ARN) of the model.

Package Details

Repository
AWS Classic pulumi/pulumi-aws
License
Apache-2.0
Notes
This Pulumi package is based on the aws Terraform Provider.