diff --git a/.doc_gen/metadata/bedrock-runtime_metadata.yaml b/.doc_gen/metadata/bedrock-runtime_metadata.yaml index 3f985d8b278..f867b82cec4 100644 --- a/.doc_gen/metadata/bedrock-runtime_metadata.yaml +++ b/.doc_gen/metadata/bedrock-runtime_metadata.yaml @@ -22,19 +22,6 @@ bedrock-runtime_Hello: snippet_files: - javascriptv3/example_code/bedrock-runtime/hello.js - Python: - versions: - - sdk_version: 3 - github: python/example_code/bedrock-runtime - sdkguide: - excerpts: - - description: Send a prompt to a model with the InvokeModel operation. - snippet_tags: - - bedrock-runtime.example_code.hello_bedrock_invoke.complete - - description: Send a user message to a model with the Converse operation. - snippet_tags: - - bedrock-runtime.example_code.hello_bedrock_converse.complete - services: bedrock-runtime: {InvokeModel} @@ -115,50 +102,6 @@ bedrock-runtime_Converse_AmazonNovaText: services: bedrock-runtime: {Converse} -bedrock-runtime_Converse_AmazonTitanText: - title: Invoke Amazon Titan Text on &BR; using Bedrock's Converse API - title_abbrev: "Converse" - synopsis: send a text message to Amazon Titan Text, using Bedrock's Converse API. - category: Amazon Titan Text - languages: - Java: - versions: - - sdk_version: 2 - github: javav2/example_code/bedrock-runtime - excerpts: - - description: Send a text message to Amazon Titan Text, using Bedrock's Converse API. - snippet_tags: - - bedrock-runtime.java2.Converse_AmazonTitanText - - description: Send a text message to Amazon Titan Text, using Bedrock's Converse API with the async Java client. - snippet_tags: - - bedrock-runtime.java2.ConverseAsync_AmazonTitanText - .NET: - versions: - - sdk_version: 4 - github: dotnetv4/Bedrock-runtime - excerpts: - - description: Send a text message to Amazon Titan Text, using Bedrock's Converse API. - snippet_tags: - - BedrockRuntime.dotnetv4.Converse_AmazonTitanText - Python: - versions: - - sdk_version: 3 - github: python/example_code/bedrock-runtime - excerpts: - - description: Send a text message to Amazon Titan Text, using Bedrock's Converse API. - snippet_tags: - - python.example_code.bedrock-runtime.Converse_AmazonTitanText - JavaScript: - versions: - - sdk_version: 3 - github: javascriptv3/example_code/bedrock-runtime - excerpts: - - description: Send a text message to Amazon Titan Text, using Bedrock's Converse API. - snippet_tags: - - javascript.v3.bedrock-runtime.Converse_AmazonTitanText - services: - bedrock-runtime: {Converse} - bedrock-runtime_Scenario_ToolUse: title: "A tool use example illustrating how to connect AI models on &BR; with a custom tool or API" title_abbrev: "Tool use with the Converse API" @@ -508,47 +451,6 @@ bedrock-runtime_ConverseStream_AmazonNovaText: services: bedrock-runtime: {ConverseStream} -bedrock-runtime_ConverseStream_AmazonTitanText: - title: Invoke Amazon Titan Text on &BR; using Bedrock's Converse API with a response stream - title_abbrev: "ConverseStream" - synopsis: send a text message to Amazon Titan Text, using Bedrock's Converse API and process the response stream in real-time. - category: Amazon Titan Text - languages: - Java: - versions: - - sdk_version: 2 - github: javav2/example_code/bedrock-runtime - excerpts: - - description: Send a text message to Amazon Titan Text, using Bedrock's Converse API and process the response stream in real-time. - snippet_tags: - - bedrock-runtime.java2.ConverseStream_AmazonTitanText - .NET: - versions: - - sdk_version: 4 - github: dotnetv4/Bedrock-runtime - excerpts: - - description: Send a text message to Amazon Titan Text, using Bedrock's Converse API and process the response stream in real-time. - snippet_tags: - - BedrockRuntime.dotnetv4.ConverseStream_AmazonTitanText - Python: - versions: - - sdk_version: 3 - github: python/example_code/bedrock-runtime - excerpts: - - description: Send a text message to Amazon Titan Text, using Bedrock's Converse API and process the response stream in real-time. - snippet_tags: - - python.example_code.bedrock-runtime.ConverseStream_AmazonTitanText - JavaScript: - versions: - - sdk_version: 3 - github: javascriptv3/example_code/bedrock-runtime - excerpts: - - description: Send a text message to Amazon Titan Text, using Bedrock's Converse API and process the response stream in real-time. - snippet_tags: - - javascript.v3.bedrock-runtime.ConverseStream_AmazonTitanText - services: - bedrock-runtime: {ConverseStream} - bedrock-runtime_ConverseStream_AnthropicClaude: title: Invoke Anthropic Claude on &BR; using Bedrock's Converse API with a response stream title_abbrev: "ConverseStream" @@ -741,64 +643,6 @@ bedrock-runtime_ConverseStream_Mistral: bedrock-runtime: {ConverseStream} # Invoke Model -bedrock-runtime_InvokeModel_TitanText: - title: Invoke Amazon Titan Text models on &BR; using the Invoke Model API - title_abbrev: "InvokeModel" - synopsis: send a text message to Amazon Titan Text, using the Invoke Model API. - category: Amazon Titan Text - languages: - Java: - versions: - - sdk_version: 2 - github: javav2/example_code/bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message. - snippet_tags: - - bedrock-runtime.java2.InvokeModel_AmazonTitanText - Kotlin: - versions: - - sdk_version: 1 - github: kotlin/services/bedrock-runtime - excerpts: - - description: Use the Invoke Model API to generate a short story. - snippet_tags: - - bedrock-runtime.kotlin.InvokeModel_AmazonTitanText - .NET: - versions: - - sdk_version: 4 - github: dotnetv4/Bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message. - snippet_tags: - - BedrockRuntime.dotnetv4.InvokeModel_AmazonTitanText - Python: - versions: - - sdk_version: 3 - github: python/example_code/bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message. - snippet_tags: - - python.example_code.bedrock-runtime.InvokeModel_TitanText - JavaScript: - versions: - - sdk_version: 3 - github: javascriptv3/example_code/bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message. - snippet_files: - - javascriptv3/example_code/bedrock-runtime/models/amazonTitanText/invoke_model.js - Go: - versions: - - sdk_version: 2 - github: gov2/bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message. - snippet_tags: - - gov2.bedrock-runtime.InvokeModelWrapper.struct - - gov2.bedrock-runtime.InvokeTitanText - services: - bedrock-runtime: {InvokeModel} - bedrock-runtime_InvokeModel_AnthropicClaude: title: Invoke Anthropic Claude on &BR; using the Invoke Model API title_abbrev: "InvokeModel" @@ -872,39 +716,6 @@ bedrock-runtime_InvokeModel_AnthropicClaude: services: bedrock-runtime: {InvokeModel} -bedrock-runtime_InvokeModel_CohereCommand: - title: Invoke Cohere Command on &BR; using the Invoke Model API - title_abbrev: "InvokeModel: Command and Command Light" - synopsis: send a text message to Cohere Command, using the Invoke Model API. - category: Cohere Command - languages: - Java: - versions: - - sdk_version: 2 - github: javav2/example_code/bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message. - snippet_tags: - - bedrock-runtime.java2.InvokeModel_CohereCommand - .NET: - versions: - - sdk_version: 3 - github: dotnetv3/Bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message. - snippet_tags: - - BedrockRuntime.dotnetv3.InvokeModel_CohereCommand - Python: - versions: - - sdk_version: 3 - github: python/example_code/bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message. - snippet_tags: - - python.example_code.bedrock-runtime.InvokeModel_CohereCommand - services: - bedrock-runtime: {InvokeModel} - bedrock-runtime_InvokeModel_CohereCommandR: title: Invoke Cohere Command R and R+ on &BR; using the Invoke Model API title_abbrev: "InvokeModel: Command R and R+" @@ -1021,39 +832,6 @@ bedrock-runtime_InvokeModel_MistralAi: bedrock-runtime: {InvokeModel} # Invoke Model with Response Stream -bedrock-runtime_InvokeModelWithResponseStream_TitanText: - title: Invoke Amazon Titan Text models on &BR; using the Invoke Model API with a response stream - title_abbrev: "InvokeModelWithResponseStream" - synopsis: send a text message to Amazon Titan Text models, using the Invoke Model API, and print the response stream. - category: Amazon Titan Text - languages: - Java: - versions: - - sdk_version: 2 - github: javav2/example_code/bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message and process the response stream in real-time. - snippet_tags: - - bedrock-runtime.java2.InvokeModelWithResponseStream_AmazonTitanText - .NET: - versions: - - sdk_version: 3 - github: dotnetv3/Bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message and process the response stream in real-time. - snippet_tags: - - BedrockRuntime.dotnetv3.InvokeModelWithResponseStream_AmazonTitanText - Python: - versions: - - sdk_version: 3 - github: python/example_code/bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message and process the response stream in real-time. - snippet_tags: - - python.example_code.bedrock-runtime.InvokeModelWithResponseStream_TitanText - services: - bedrock-runtime: {InvokeModelWithResponseStream} - bedrock-runtime_InvokeModelWithResponseStream_AnthropicClaude: title: Invoke Anthropic Claude models on &BR; using the Invoke Model API with a response stream title_abbrev: "InvokeModelWithResponseStream" @@ -1104,39 +882,6 @@ bedrock-runtime_InvokeModelWithResponseStream_AnthropicClaude: services: bedrock-runtime: {InvokeModelWithResponseStream} -bedrock-runtime_InvokeModelWithResponseStream_CohereCommand: - title: Invoke Cohere Command on &BR; using the Invoke Model API with a response stream - title_abbrev: "InvokeModelWithResponseStream: Command and Command Light" - synopsis: send a text message to Cohere Command, using the Invoke Model API with a response stream. - category: Cohere Command - languages: - Java: - versions: - - sdk_version: 2 - github: javav2/example_code/bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message and process the response stream in real-time. - snippet_tags: - - bedrock-runtime.java2.InvokeModelWithResponseStream_CohereCommand - .NET: - versions: - - sdk_version: 3 - github: dotnetv3/Bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message and process the response stream in real-time. - snippet_tags: - - BedrockRuntime.dotnetv3.InvokeModelWithResponseStream_CohereCommand - Python: - versions: - - sdk_version: 3 - github: python/example_code/bedrock-runtime - excerpts: - - description: Use the Invoke Model API to send a text message and process the response stream in real-time. - snippet_tags: - - python.example_code.bedrock-runtime.InvokeModelWithResponseStream_CohereCommand - services: - bedrock-runtime: {InvokeModel} - bedrock-runtime_InvokeModelWithResponseStream_CohereCommandR: title: Invoke Cohere Command R and R+ on &BR; using the Invoke Model API with a response stream title_abbrev: "InvokeModelWithResponseStream: Command R and R+" diff --git a/dotnetv3/Bedrock-runtime/Actions/HelloBedrockRuntime.cs b/dotnetv3/Bedrock-runtime/Actions/HelloBedrockRuntime.cs index 1ee5a0bf9ea..34036fad7d9 100644 --- a/dotnetv3/Bedrock-runtime/Actions/HelloBedrockRuntime.cs +++ b/dotnetv3/Bedrock-runtime/Actions/HelloBedrockRuntime.cs @@ -28,7 +28,7 @@ private static async Task Invoke(string modelId, string prompt) default: Console.WriteLine($"Unknown model ID: {modelId}. Valid model IDs are: {CLAUDE}."); break; - }; + } } } } \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/BedrockRuntimeExamples.sln b/dotnetv3/Bedrock-runtime/BedrockRuntimeExamples.sln index f396e0c4bd8..7cfd5ba1493 100644 --- a/dotnetv3/Bedrock-runtime/BedrockRuntimeExamples.sln +++ b/dotnetv3/Bedrock-runtime/BedrockRuntimeExamples.sln @@ -10,11 +10,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BedrockRuntimeTests", "Test {02823466-F5FF-43A2-B70A-EF3482A0CBDD} = {02823466-F5FF-43A2-B70A-EF3482A0CBDD} {0574B2F4-D4BE-4155-902B-BF3D7CE4804E} = {0574B2F4-D4BE-4155-902B-BF3D7CE4804E} {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B} = {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B} - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F} = {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F} - {3D6441FC-0FE8-4D0C-910D-3D9310599C71} = {3D6441FC-0FE8-4D0C-910D-3D9310599C71} {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75} = {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75} - {52CDA3F4-F090-4224-978A-5F42388DCF92} = {52CDA3F4-F090-4224-978A-5F42388DCF92} - {63984664-8230-40F3-BFF5-7AC4988D7FE7} = {63984664-8230-40F3-BFF5-7AC4988D7FE7} {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA} = {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA} {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18} = {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18} {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE} = {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE} @@ -24,26 +20,20 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BedrockRuntimeTests", "Test {CDF1A045-0888-418C-8656-2BF5E3348A48} = {CDF1A045-0888-418C-8656-2BF5E3348A48} {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7} = {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7} {D3BA31F5-FF20-4321-9494-3F01439C4F61} = {D3BA31F5-FF20-4321-9494-3F01439C4F61} - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C} = {D96E9BC2-3143-4F95-835C-5F3AAC414B9C} {EFC7D088-EF45-464B-97CD-0BBA486B224A} = {EFC7D088-EF45-464B-97CD-0BBA486B224A} {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785} = {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785} - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4} = {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4} EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Models", "Models", "{41B69207-8F29-41BC-9114-78EE740485C8}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AnthropicClaude", "AnthropicClaude", "{8BAC2322-AD3C-484A-B51D-8263BC4E6646}" EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonTitanText", "AmazonTitanText", "{3F96ECB4-1644-43E8-8643-2CDCF9E679F1}" -EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CohereCommand", "CohereCommand", "{EF45C0B9-ED76-4B7A-A0A7-F102E979B71C}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MetaLlama", "MetaLlama", "{65504C76-7E32-4A12-A42E-BCDA4FE79BC1}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Mistral", "Mistral", "{BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\AmazonTitanText\Converse\Converse.csproj", "{D96E9BC2-3143-4F95-835C-5F3AAC414B9C}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\AnthropicClaude\Converse\Converse.csproj", "{8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\CohereCommand\Converse\Converse.csproj", "{CDF1A045-0888-418C-8656-2BF5E3348A48}" @@ -52,8 +42,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\MetaLlam EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\Mistral\Converse\Converse.csproj", "{9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\AmazonTitanText\ConverseStream\ConverseStream.csproj", "{FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\AnthropicClaude\ConverseStream\ConverseStream.csproj", "{AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\CohereCommand\ConverseStream\ConverseStream.csproj", "{0574B2F4-D4BE-4155-902B-BF3D7CE4804E}" @@ -62,22 +50,14 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\Me EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\Mistral\ConverseStream\ConverseStream.csproj", "{B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModel", "Models\AmazonTitanText\InvokeModel\InvokeModel.csproj", "{3D6441FC-0FE8-4D0C-910D-3D9310599C71}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModel", "Models\AnthropicClaude\InvokeModel\InvokeModel.csproj", "{D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModel", "Models\Mistral\InvokeModel\InvokeModel.csproj", "{1E62D4FB-CC59-4F1E-BB22-574CEC08C94B}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Llama3_InvokeModel", "Models\MetaLlama\Llama3_InvokeModel\Llama3_InvokeModel.csproj", "{B753CEB9-EA53-4AE1-997E-B7D54A299D58}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_InvokeModel", "Models\CohereCommand\Command_InvokeModel\Command_InvokeModel.csproj", "{2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_R_InvokeModel", "Models\CohereCommand\Command_R_InvokeModel\Command_R_InvokeModel.csproj", "{BCC66C37-4980-484F-819D-066D2FF2669C}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModelWithResponseStream", "Models\AmazonTitanText\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{52CDA3F4-F090-4224-978A-5F42388DCF92}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_InvokeModelWithResponseStream", "Models\CohereCommand\Command_InvokeModelWithResponseStream\Command_InvokeModelWithResponseStream.csproj", "{63984664-8230-40F3-BFF5-7AC4988D7FE7}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_R_InvokeModelWithResponseStream", "Models\CohereCommand\Command_R_InvokeModelWithResponseStream\Command_R_InvokeModelWithResponseStream.csproj", "{02823466-F5FF-43A2-B70A-EF3482A0CBDD}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Llama3_InvokeModelWithResponseStream", "Models\MetaLlama\Llama3_InvokeModelWithResponseStream\Llama3_InvokeModelWithResponseStream.csproj", "{4B5A00D6-B9F1-449F-A9D2-80E860D6BD75}" @@ -116,10 +96,6 @@ Global {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700}.Debug|Any CPU.Build.0 = Debug|Any CPU {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700}.Release|Any CPU.ActiveCfg = Release|Any CPU {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700}.Release|Any CPU.Build.0 = Release|Any CPU - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Release|Any CPU.Build.0 = Release|Any CPU {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}.Debug|Any CPU.Build.0 = Debug|Any CPU {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -136,10 +112,6 @@ Global {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}.Debug|Any CPU.Build.0 = Debug|Any CPU {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}.Release|Any CPU.ActiveCfg = Release|Any CPU {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}.Release|Any CPU.Build.0 = Release|Any CPU - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Release|Any CPU.Build.0 = Release|Any CPU {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}.Debug|Any CPU.Build.0 = Debug|Any CPU {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -156,10 +128,6 @@ Global {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}.Debug|Any CPU.Build.0 = Debug|Any CPU {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}.Release|Any CPU.ActiveCfg = Release|Any CPU {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}.Release|Any CPU.Build.0 = Release|Any CPU - {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Release|Any CPU.Build.0 = Release|Any CPU {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}.Debug|Any CPU.Build.0 = Debug|Any CPU {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -172,22 +140,10 @@ Global {B753CEB9-EA53-4AE1-997E-B7D54A299D58}.Debug|Any CPU.Build.0 = Debug|Any CPU {B753CEB9-EA53-4AE1-997E-B7D54A299D58}.Release|Any CPU.ActiveCfg = Release|Any CPU {B753CEB9-EA53-4AE1-997E-B7D54A299D58}.Release|Any CPU.Build.0 = Release|Any CPU - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Release|Any CPU.Build.0 = Release|Any CPU {BCC66C37-4980-484F-819D-066D2FF2669C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {BCC66C37-4980-484F-819D-066D2FF2669C}.Debug|Any CPU.Build.0 = Debug|Any CPU {BCC66C37-4980-484F-819D-066D2FF2669C}.Release|Any CPU.ActiveCfg = Release|Any CPU {BCC66C37-4980-484F-819D-066D2FF2669C}.Release|Any CPU.Build.0 = Release|Any CPU - {52CDA3F4-F090-4224-978A-5F42388DCF92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {52CDA3F4-F090-4224-978A-5F42388DCF92}.Debug|Any CPU.Build.0 = Debug|Any CPU - {52CDA3F4-F090-4224-978A-5F42388DCF92}.Release|Any CPU.ActiveCfg = Release|Any CPU - {52CDA3F4-F090-4224-978A-5F42388DCF92}.Release|Any CPU.Build.0 = Release|Any CPU - {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Debug|Any CPU.Build.0 = Debug|Any CPU - {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Release|Any CPU.ActiveCfg = Release|Any CPU - {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Release|Any CPU.Build.0 = Release|Any CPU {02823466-F5FF-43A2-B70A-EF3482A0CBDD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {02823466-F5FF-43A2-B70A-EF3482A0CBDD}.Debug|Any CPU.Build.0 = Debug|Any CPU {02823466-F5FF-43A2-B70A-EF3482A0CBDD}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -231,28 +187,21 @@ Global GlobalSection(NestedProjects) = preSolution {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700} = {E2AD9F91-E6D4-412D-A07A-E004042ADF30} {8BAC2322-AD3C-484A-B51D-8263BC4E6646} = {41B69207-8F29-41BC-9114-78EE740485C8} - {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} = {41B69207-8F29-41BC-9114-78EE740485C8} {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} = {41B69207-8F29-41BC-9114-78EE740485C8} {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} = {41B69207-8F29-41BC-9114-78EE740485C8} {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} = {41B69207-8F29-41BC-9114-78EE740485C8} - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646} {CDF1A045-0888-418C-8656-2BF5E3348A48} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646} {0574B2F4-D4BE-4155-902B-BF3D7CE4804E} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {D3BA31F5-FF20-4321-9494-3F01439C4F61} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} - {3D6441FC-0FE8-4D0C-910D-3D9310599C71} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646} {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} {B753CEB9-EA53-4AE1-997E-B7D54A299D58} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {BCC66C37-4980-484F-819D-066D2FF2669C} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} - {52CDA3F4-F090-4224-978A-5F42388DCF92} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} - {63984664-8230-40F3-BFF5-7AC4988D7FE7} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {02823466-F5FF-43A2-B70A-EF3482A0CBDD} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} {EFC7D088-EF45-464B-97CD-0BBA486B224A} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.cs b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.cs deleted file mode 100644 index 21bf4dca724..00000000000 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[BedrockRuntime.dotnetv3.Converse_AmazonTitanText] -// Use the Converse API to send a text message to Amazon Titan Text. - -using System; -using System.Collections.Generic; -using Amazon; -using Amazon.BedrockRuntime; -using Amazon.BedrockRuntime.Model; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); - -// Set the model ID, e.g., Titan Text Premier. -var modelId = "amazon.titan-text-premier-v1:0"; - -// Define the user message. -var userMessage = "Describe the purpose of a 'hello world' program in one line."; - -// Create a request with the model ID, the user message, and an inference configuration. -var request = new ConverseRequest -{ - ModelId = modelId, - Messages = new List - { - new Message - { - Role = ConversationRole.User, - Content = new List { new ContentBlock { Text = userMessage } } - } - }, - InferenceConfig = new InferenceConfiguration() - { - MaxTokens = 512, - Temperature = 0.5F, - TopP = 0.9F - } -}; - -try -{ - // Send the request to the Bedrock Runtime and wait for the result. - var response = await client.ConverseAsync(request); - - // Extract and print the response text. - string responseText = response?.Output?.Message?.Content?[0]?.Text ?? ""; - Console.WriteLine(responseText); -} -catch (AmazonBedrockRuntimeException e) -{ - Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); - throw; -} - -// snippet-end:[BedrockRuntime.dotnetv3.Converse_AmazonTitanText] - -// Create a partial class to make the top-level script testable. -namespace AmazonTitanText { public partial class Converse { } } \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj deleted file mode 100644 index 3651f4be200..00000000000 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - Exe - net8.0 - AmazonTitanText.$(MSBuildProjectName) - - - - - - - diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.cs b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.cs deleted file mode 100644 index 5a0d0444b38..00000000000 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.cs +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[BedrockRuntime.dotnetv3.ConverseStream_AmazonTitanText] -// Use the Converse API to send a text message to Amazon Titan Text -// and print the response stream. - -using System; -using System.Collections.Generic; -using System.Linq; -using Amazon; -using Amazon.BedrockRuntime; -using Amazon.BedrockRuntime.Model; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); - -// Set the model ID, e.g., Titan Text Premier. -var modelId = "amazon.titan-text-premier-v1:0"; - -// Define the user message. -var userMessage = "Describe the purpose of a 'hello world' program in one line."; - -// Create a request with the model ID, the user message, and an inference configuration. -var request = new ConverseStreamRequest -{ - ModelId = modelId, - Messages = new List - { - new Message - { - Role = ConversationRole.User, - Content = new List { new ContentBlock { Text = userMessage } } - } - }, - InferenceConfig = new InferenceConfiguration() - { - MaxTokens = 512, - Temperature = 0.5F, - TopP = 0.9F - } -}; - -try -{ - // Send the request to the Bedrock Runtime and wait for the result. - var response = await client.ConverseStreamAsync(request); - - // Extract and print the streamed response text in real-time. - foreach (var chunk in response.Stream.AsEnumerable()) - { - if (chunk is ContentBlockDeltaEvent) - { - Console.Write((chunk as ContentBlockDeltaEvent).Delta.Text); - } - } -} -catch (AmazonBedrockRuntimeException e) -{ - Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); - throw; -} - -// snippet-end:[BedrockRuntime.dotnetv3.ConverseStream_AmazonTitanText] - -// Create a partial class to make the top-level script testable. -namespace AmazonTitanText { public partial class ConverseStream { } } \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj deleted file mode 100644 index 662bf35f3e1..00000000000 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - Exe - net8.0 - AmazonTitanText.$(MSBuildProjectName) - - - - - - - \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.cs b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.cs deleted file mode 100644 index 9ec0d69dde2..00000000000 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[BedrockRuntime.dotnetv3.InvokeModel_AmazonTitanText] -// Use the native inference API to send a text message to Amazon Titan Text. - -using System; -using System.IO; -using System.Text.Json; -using System.Text.Json.Nodes; -using Amazon; -using Amazon.BedrockRuntime; -using Amazon.BedrockRuntime.Model; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); - -// Set the model ID, e.g., Titan Text Premier. -var modelId = "amazon.titan-text-premier-v1:0"; - -// Define the user message. -var userMessage = "Describe the purpose of a 'hello world' program in one line."; - -//Format the request payload using the model's native structure. -var nativeRequest = JsonSerializer.Serialize(new -{ - inputText = userMessage, - textGenerationConfig = new - { - maxTokenCount = 512, - temperature = 0.5 - } -}); - -// Create a request with the model ID and the model's native request payload. -var request = new InvokeModelRequest() -{ - ModelId = modelId, - Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)), - ContentType = "application/json" -}; - -try -{ - // Send the request to the Bedrock Runtime and wait for the response. - var response = await client.InvokeModelAsync(request); - - // Decode the response body. - var modelResponse = await JsonNode.ParseAsync(response.Body); - - // Extract and print the response text. - var responseText = modelResponse["results"]?[0]?["outputText"] ?? ""; - Console.WriteLine(responseText); -} -catch (AmazonBedrockRuntimeException e) -{ - Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); - throw; -} - -// snippet-end:[BedrockRuntime.dotnetv3.InvokeModel_AmazonTitanText] - -// Create a partial class to make the top-level script testable. -namespace AmazonTitanText { public partial class InvokeModel { } } \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj deleted file mode 100644 index 662bf35f3e1..00000000000 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - Exe - net8.0 - AmazonTitanText.$(MSBuildProjectName) - - - - - - - \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs deleted file mode 100644 index 2247c06671e..00000000000 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[BedrockRuntime.dotnetv3.InvokeModelWithResponseStream_AmazonTitanText] -// Use the native inference API to send a text message to Amazon Titan Text -// and print the response stream. - -using System; -using System.IO; -using System.Text.Json; -using System.Text.Json.Nodes; -using Amazon; -using Amazon.BedrockRuntime; -using Amazon.BedrockRuntime.Model; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); - -// Set the model ID, e.g., Titan Text Premier. -var modelId = "amazon.titan-text-premier-v1:0"; - -// Define the user message. -var userMessage = "Describe the purpose of a 'hello world' program in one line."; - -//Format the request payload using the model's native structure. -var nativeRequest = JsonSerializer.Serialize(new -{ - inputText = userMessage, - textGenerationConfig = new - { - maxTokenCount = 512, - temperature = 0.5 - } -}); - -// Create a request with the model ID and the model's native request payload. -var request = new InvokeModelWithResponseStreamRequest() -{ - ModelId = modelId, - Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)), - ContentType = "application/json" -}; - -try -{ - // Send the request to the Bedrock Runtime and wait for the response. - var streamingResponse = await client.InvokeModelWithResponseStreamAsync(request); - - // Extract and print the streamed response text in real-time. - foreach (var item in streamingResponse.Body) - { - var chunk = JsonSerializer.Deserialize((item as PayloadPart).Bytes); - var text = chunk["outputText"] ?? ""; - Console.Write(text); - } -} -catch (AmazonBedrockRuntimeException e) -{ - Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); - throw; -} - -// snippet-end:[BedrockRuntime.dotnetv3.InvokeModelWithResponseStream_AmazonTitanText] - -// Create a partial class to make the top-level script testable. -namespace AmazonTitanText { public partial class InvokeModelWithResponseStream { } } \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj deleted file mode 100644 index 662bf35f3e1..00000000000 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - Exe - net8.0 - AmazonTitanText.$(MSBuildProjectName) - - - - - - - \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/README.md b/dotnetv3/Bedrock-runtime/README.md index dd5a6ce566a..40a37068273 100644 --- a/dotnetv3/Bedrock-runtime/README.md +++ b/dotnetv3/Bedrock-runtime/README.md @@ -45,10 +45,6 @@ functions within the same service. - [InvokeModel](Models/AmazonNova/AmazonNovaCanvas/InvokeModel/InvokeModel.cs#L4) -### Amazon Titan Text - -- [InvokeModelWithResponseStream](Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs#L4) - ### Anthropic Claude - [InvokeModelWithResponseStream](Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs#L4) @@ -56,9 +52,7 @@ functions within the same service. ### Cohere Command - [InvokeModel: Command R and R+](Models/CohereCommand/Command_R_InvokeModel/InvokeModel.cs#L4) -- [InvokeModel: Command and Command Light](Models/CohereCommand/Command_InvokeModel/InvokeModel.cs#L4) - [InvokeModelWithResponseStream: Command R and R+](Models/CohereCommand/Command_R_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs#L4) -- [InvokeModelWithResponseStream: Command and Command Light](Models/CohereCommand/Command_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs#L4) ### Meta Llama diff --git a/dotnetv3/Bedrock-runtime/Tests/ActionTest_Converse.cs b/dotnetv3/Bedrock-runtime/Tests/ActionTest_Converse.cs index 10ee89efba5..0b1483600ea 100644 --- a/dotnetv3/Bedrock-runtime/Tests/ActionTest_Converse.cs +++ b/dotnetv3/Bedrock-runtime/Tests/ActionTest_Converse.cs @@ -1,4 +1,4 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 namespace BedrockRuntimeTests; @@ -11,7 +11,6 @@ public class ActionTest_Converse [InlineData(typeof(CohereCommand.Converse))] [InlineData(typeof(AnthropicClaude.Converse))] [InlineData(typeof(AmazonNovaText.Converse))] - [InlineData(typeof(AmazonTitanText.Converse))] public void ConverseDoesNotThrow(Type type) { diff --git a/dotnetv3/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs b/dotnetv3/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs index 0f6ca41ccac..96f2547f714 100644 --- a/dotnetv3/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs +++ b/dotnetv3/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs @@ -1,4 +1,4 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 namespace BedrockRuntimeTests; @@ -11,7 +11,6 @@ public class ActionTest_ConverseStream [InlineData(typeof(CohereCommand.ConverseStream))] [InlineData(typeof(AnthropicClaude.ConverseStream))] [InlineData(typeof(AmazonNovaText.ConverseStream))] - [InlineData(typeof(AmazonTitanText.ConverseStream))] public void ConverseStreamDoesNotThrow(Type type) { var entryPoint = type.Assembly.EntryPoint!; diff --git a/dotnetv3/Bedrock-runtime/Tests/ActionTest_InvokeModelWithResponseStream.cs b/dotnetv3/Bedrock-runtime/Tests/ActionTest_InvokeModelWithResponseStream.cs index c0520fa0d25..cc89834d635 100644 --- a/dotnetv3/Bedrock-runtime/Tests/ActionTest_InvokeModelWithResponseStream.cs +++ b/dotnetv3/Bedrock-runtime/Tests/ActionTest_InvokeModelWithResponseStream.cs @@ -1,4 +1,4 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 namespace BedrockRuntimeTests @@ -8,10 +8,8 @@ public class ActionTest_InvokeModelWithResponseStream [Theory, Trait("Category", "Integration")] [InlineData(typeof(Mistral.InvokeModelWithResponseStream))] [InlineData(typeof(MetaLlama3.InvokeModelWithResponseStream))] - [InlineData(typeof(CohereCommand.InvokeModelWithResponseStream))] [InlineData(typeof(CohereCommandR.InvokeModelWithResponseStream))] [InlineData(typeof(AnthropicClaude.InvokeModelWithResponseStream))] - [InlineData(typeof(AmazonTitanText.InvokeModelWithResponseStream))] public void InvokeModelWithResponseStreamDoesNotThrow(Type type) { var entryPoint = type.Assembly.EntryPoint!; diff --git a/dotnetv3/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs b/dotnetv3/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs index 35358828d1c..9e9ccc84454 100644 --- a/dotnetv3/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs +++ b/dotnetv3/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs @@ -1,4 +1,4 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 namespace BedrockRuntimeTests; @@ -8,10 +8,8 @@ public class ActionTest_InvokeModel [Theory, Trait("Category", "Integration")] [InlineData(typeof(Mistral.InvokeModel))] [InlineData(typeof(MetaLlama3.InvokeModel))] - [InlineData(typeof(CohereCommand.InvokeModel))] [InlineData(typeof(CohereCommandR.InvokeModel))] [InlineData(typeof(AnthropicClaude.InvokeModel))] - [InlineData(typeof(AmazonTitanText.InvokeModel))] [InlineData(typeof(AmazonNovaCanvas.InvokeModel))] public void InvokeModelDoesNotThrow(Type type) { diff --git a/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj b/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj index c46d5b1ec25..55387bce833 100644 --- a/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj +++ b/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj @@ -30,11 +30,7 @@ - - - - - + diff --git a/dotnetv3/DotNetV3Examples.sln b/dotnetv3/DotNetV3Examples.sln index 105e2a922a7..26cf2e3be0a 100644 --- a/dotnetv3/DotNetV3Examples.sln +++ b/dotnetv3/DotNetV3Examples.sln @@ -791,14 +791,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Bedrock-runtime EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonTitanText", "AmazonTitanText", "{D6E28F78-4CFB-4C9F-B6E4-29B5E1AAFC5B}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModelWithResponseStream", "Bedrock-runtime\Models\AmazonTitanText\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{4DC36E10-B568-484D-82B7-691BCC161A54}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModel", "Bedrock-runtime\Models\AmazonTitanText\InvokeModel\InvokeModel.csproj", "{11287DA7-B956-4857-A733-BC1890788BFF}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Bedrock-runtime\Models\AmazonTitanText\ConverseStream\ConverseStream.csproj", "{6DC5F00F-4EA5-44A7-8842-977F1D9E32B3}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Bedrock-runtime\Models\AmazonTitanText\Converse\Converse.csproj", "{BDF20166-BEC9-4D00-A4AE-D7A6BFC527FB}" -EndProject + Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BedrockRuntimeActions", "Bedrock-runtime\Actions\BedrockRuntimeActions.csproj", "{2BE9EAF8-C8C7-4B84-B4BC-C6C91A1D582D}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "EventBridge Scheduler", "EventBridge Scheduler", "{BEF5CCF6-92DC-40AD-8789-5A00DE2CDD4F}" @@ -1859,22 +1852,7 @@ Global {FB03331C-0985-4B89-8D21-E59D996106BF}.Debug|Any CPU.Build.0 = Debug|Any CPU {FB03331C-0985-4B89-8D21-E59D996106BF}.Release|Any CPU.ActiveCfg = Release|Any CPU {FB03331C-0985-4B89-8D21-E59D996106BF}.Release|Any CPU.Build.0 = Release|Any CPU - {4DC36E10-B568-484D-82B7-691BCC161A54}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4DC36E10-B568-484D-82B7-691BCC161A54}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4DC36E10-B568-484D-82B7-691BCC161A54}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4DC36E10-B568-484D-82B7-691BCC161A54}.Release|Any CPU.Build.0 = Release|Any CPU - {11287DA7-B956-4857-A733-BC1890788BFF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {11287DA7-B956-4857-A733-BC1890788BFF}.Debug|Any CPU.Build.0 = Debug|Any CPU - {11287DA7-B956-4857-A733-BC1890788BFF}.Release|Any CPU.ActiveCfg = Release|Any CPU - {11287DA7-B956-4857-A733-BC1890788BFF}.Release|Any CPU.Build.0 = Release|Any CPU - {6DC5F00F-4EA5-44A7-8842-977F1D9E32B3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6DC5F00F-4EA5-44A7-8842-977F1D9E32B3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6DC5F00F-4EA5-44A7-8842-977F1D9E32B3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6DC5F00F-4EA5-44A7-8842-977F1D9E32B3}.Release|Any CPU.Build.0 = Release|Any CPU - {BDF20166-BEC9-4D00-A4AE-D7A6BFC527FB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {BDF20166-BEC9-4D00-A4AE-D7A6BFC527FB}.Debug|Any CPU.Build.0 = Debug|Any CPU - {BDF20166-BEC9-4D00-A4AE-D7A6BFC527FB}.Release|Any CPU.ActiveCfg = Release|Any CPU - {BDF20166-BEC9-4D00-A4AE-D7A6BFC527FB}.Release|Any CPU.Build.0 = Release|Any CPU + {2BE9EAF8-C8C7-4B84-B4BC-C6C91A1D582D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {2BE9EAF8-C8C7-4B84-B4BC-C6C91A1D582D}.Debug|Any CPU.Build.0 = Debug|Any CPU {2BE9EAF8-C8C7-4B84-B4BC-C6C91A1D582D}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -2300,10 +2278,7 @@ Global {DA8272A9-08F2-4542-BE04-30E93F3FDBFC} = {072C8EA9-4695-4621-80D1-AAED8794B2C4} {FB03331C-0985-4B89-8D21-E59D996106BF} = {072C8EA9-4695-4621-80D1-AAED8794B2C4} {D6E28F78-4CFB-4C9F-B6E4-29B5E1AAFC5B} = {6520EB28-F7B4-4581-B3D8-A06E9303B16B} - {4DC36E10-B568-484D-82B7-691BCC161A54} = {D6E28F78-4CFB-4C9F-B6E4-29B5E1AAFC5B} - {11287DA7-B956-4857-A733-BC1890788BFF} = {D6E28F78-4CFB-4C9F-B6E4-29B5E1AAFC5B} - {6DC5F00F-4EA5-44A7-8842-977F1D9E32B3} = {D6E28F78-4CFB-4C9F-B6E4-29B5E1AAFC5B} - {BDF20166-BEC9-4D00-A4AE-D7A6BFC527FB} = {D6E28F78-4CFB-4C9F-B6E4-29B5E1AAFC5B} + {2BE9EAF8-C8C7-4B84-B4BC-C6C91A1D582D} = {BA23BB28-EC63-4330-8CA7-DEB1B6489580} {C419E2E9-960A-4EF2-A287-39404624BD20} = {BEF5CCF6-92DC-40AD-8789-5A00DE2CDD4F} {18FA46A8-B626-467A-8F82-B641A8F549D5} = {BEF5CCF6-92DC-40AD-8789-5A00DE2CDD4F} diff --git a/dotnetv4/Bedrock-runtime/BedrockRuntimeExamples.sln b/dotnetv4/Bedrock-runtime/BedrockRuntimeExamples.sln index a2947fd6703..28de597447d 100644 --- a/dotnetv4/Bedrock-runtime/BedrockRuntimeExamples.sln +++ b/dotnetv4/Bedrock-runtime/BedrockRuntimeExamples.sln @@ -10,11 +10,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BedrockRuntimeTests", "Test {02823466-F5FF-43A2-B70A-EF3482A0CBDD} = {02823466-F5FF-43A2-B70A-EF3482A0CBDD} {0574B2F4-D4BE-4155-902B-BF3D7CE4804E} = {0574B2F4-D4BE-4155-902B-BF3D7CE4804E} {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B} = {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B} - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F} = {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F} - {3D6441FC-0FE8-4D0C-910D-3D9310599C71} = {3D6441FC-0FE8-4D0C-910D-3D9310599C71} {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75} = {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75} - {52CDA3F4-F090-4224-978A-5F42388DCF92} = {52CDA3F4-F090-4224-978A-5F42388DCF92} - {63984664-8230-40F3-BFF5-7AC4988D7FE7} = {63984664-8230-40F3-BFF5-7AC4988D7FE7} {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA} = {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA} {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18} = {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18} {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE} = {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE} @@ -24,26 +20,20 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BedrockRuntimeTests", "Test {CDF1A045-0888-418C-8656-2BF5E3348A48} = {CDF1A045-0888-418C-8656-2BF5E3348A48} {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7} = {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7} {D3BA31F5-FF20-4321-9494-3F01439C4F61} = {D3BA31F5-FF20-4321-9494-3F01439C4F61} - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C} = {D96E9BC2-3143-4F95-835C-5F3AAC414B9C} {EFC7D088-EF45-464B-97CD-0BBA486B224A} = {EFC7D088-EF45-464B-97CD-0BBA486B224A} {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785} = {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785} - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4} = {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4} EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Models", "Models", "{41B69207-8F29-41BC-9114-78EE740485C8}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AnthropicClaude", "AnthropicClaude", "{8BAC2322-AD3C-484A-B51D-8263BC4E6646}" EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonTitanText", "AmazonTitanText", "{3F96ECB4-1644-43E8-8643-2CDCF9E679F1}" -EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CohereCommand", "CohereCommand", "{EF45C0B9-ED76-4B7A-A0A7-F102E979B71C}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MetaLlama", "MetaLlama", "{65504C76-7E32-4A12-A42E-BCDA4FE79BC1}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Mistral", "Mistral", "{BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\AmazonTitanText\Converse\Converse.csproj", "{D96E9BC2-3143-4F95-835C-5F3AAC414B9C}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\AnthropicClaude\Converse\Converse.csproj", "{8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\CohereCommand\Converse\Converse.csproj", "{CDF1A045-0888-418C-8656-2BF5E3348A48}" @@ -52,8 +42,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\MetaLlam EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\Mistral\Converse\Converse.csproj", "{9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\AmazonTitanText\ConverseStream\ConverseStream.csproj", "{FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\AnthropicClaude\ConverseStream\ConverseStream.csproj", "{AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\CohereCommand\ConverseStream\ConverseStream.csproj", "{0574B2F4-D4BE-4155-902B-BF3D7CE4804E}" @@ -62,22 +50,14 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\Me EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\Mistral\ConverseStream\ConverseStream.csproj", "{B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModel", "Models\AmazonTitanText\InvokeModel\InvokeModel.csproj", "{3D6441FC-0FE8-4D0C-910D-3D9310599C71}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModel", "Models\AnthropicClaude\InvokeModel\InvokeModel.csproj", "{D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModel", "Models\Mistral\InvokeModel\InvokeModel.csproj", "{1E62D4FB-CC59-4F1E-BB22-574CEC08C94B}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Llama3_InvokeModel", "Models\MetaLlama\Llama3_InvokeModel\Llama3_InvokeModel.csproj", "{B753CEB9-EA53-4AE1-997E-B7D54A299D58}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_InvokeModel", "Models\CohereCommand\Command_InvokeModel\Command_InvokeModel.csproj", "{2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_R_InvokeModel", "Models\CohereCommand\Command_R_InvokeModel\Command_R_InvokeModel.csproj", "{BCC66C37-4980-484F-819D-066D2FF2669C}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModelWithResponseStream", "Models\AmazonTitanText\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{52CDA3F4-F090-4224-978A-5F42388DCF92}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_InvokeModelWithResponseStream", "Models\CohereCommand\Command_InvokeModelWithResponseStream\Command_InvokeModelWithResponseStream.csproj", "{63984664-8230-40F3-BFF5-7AC4988D7FE7}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_R_InvokeModelWithResponseStream", "Models\CohereCommand\Command_R_InvokeModelWithResponseStream\Command_R_InvokeModelWithResponseStream.csproj", "{02823466-F5FF-43A2-B70A-EF3482A0CBDD}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Llama3_InvokeModelWithResponseStream", "Models\MetaLlama\Llama3_InvokeModelWithResponseStream\Llama3_InvokeModelWithResponseStream.csproj", "{4B5A00D6-B9F1-449F-A9D2-80E860D6BD75}" @@ -96,10 +76,6 @@ Global {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700}.Debug|Any CPU.Build.0 = Debug|Any CPU {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700}.Release|Any CPU.ActiveCfg = Release|Any CPU {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700}.Release|Any CPU.Build.0 = Release|Any CPU - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Release|Any CPU.Build.0 = Release|Any CPU {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}.Debug|Any CPU.Build.0 = Debug|Any CPU {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -116,10 +92,6 @@ Global {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}.Debug|Any CPU.Build.0 = Debug|Any CPU {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}.Release|Any CPU.ActiveCfg = Release|Any CPU {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}.Release|Any CPU.Build.0 = Release|Any CPU - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Release|Any CPU.Build.0 = Release|Any CPU {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}.Debug|Any CPU.Build.0 = Debug|Any CPU {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -136,10 +108,6 @@ Global {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}.Debug|Any CPU.Build.0 = Debug|Any CPU {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}.Release|Any CPU.ActiveCfg = Release|Any CPU {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}.Release|Any CPU.Build.0 = Release|Any CPU - {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Release|Any CPU.Build.0 = Release|Any CPU {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}.Debug|Any CPU.Build.0 = Debug|Any CPU {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -152,22 +120,10 @@ Global {B753CEB9-EA53-4AE1-997E-B7D54A299D58}.Debug|Any CPU.Build.0 = Debug|Any CPU {B753CEB9-EA53-4AE1-997E-B7D54A299D58}.Release|Any CPU.ActiveCfg = Release|Any CPU {B753CEB9-EA53-4AE1-997E-B7D54A299D58}.Release|Any CPU.Build.0 = Release|Any CPU - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Release|Any CPU.Build.0 = Release|Any CPU {BCC66C37-4980-484F-819D-066D2FF2669C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {BCC66C37-4980-484F-819D-066D2FF2669C}.Debug|Any CPU.Build.0 = Debug|Any CPU {BCC66C37-4980-484F-819D-066D2FF2669C}.Release|Any CPU.ActiveCfg = Release|Any CPU {BCC66C37-4980-484F-819D-066D2FF2669C}.Release|Any CPU.Build.0 = Release|Any CPU - {52CDA3F4-F090-4224-978A-5F42388DCF92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {52CDA3F4-F090-4224-978A-5F42388DCF92}.Debug|Any CPU.Build.0 = Debug|Any CPU - {52CDA3F4-F090-4224-978A-5F42388DCF92}.Release|Any CPU.ActiveCfg = Release|Any CPU - {52CDA3F4-F090-4224-978A-5F42388DCF92}.Release|Any CPU.Build.0 = Release|Any CPU - {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Debug|Any CPU.Build.0 = Debug|Any CPU - {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Release|Any CPU.ActiveCfg = Release|Any CPU - {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Release|Any CPU.Build.0 = Release|Any CPU {02823466-F5FF-43A2-B70A-EF3482A0CBDD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {02823466-F5FF-43A2-B70A-EF3482A0CBDD}.Debug|Any CPU.Build.0 = Debug|Any CPU {02823466-F5FF-43A2-B70A-EF3482A0CBDD}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -191,28 +147,21 @@ Global GlobalSection(NestedProjects) = preSolution {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700} = {E2AD9F91-E6D4-412D-A07A-E004042ADF30} {8BAC2322-AD3C-484A-B51D-8263BC4E6646} = {41B69207-8F29-41BC-9114-78EE740485C8} - {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} = {41B69207-8F29-41BC-9114-78EE740485C8} {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} = {41B69207-8F29-41BC-9114-78EE740485C8} {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} = {41B69207-8F29-41BC-9114-78EE740485C8} {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} = {41B69207-8F29-41BC-9114-78EE740485C8} - {D96E9BC2-3143-4F95-835C-5F3AAC414B9C} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646} {CDF1A045-0888-418C-8656-2BF5E3348A48} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} - {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646} {0574B2F4-D4BE-4155-902B-BF3D7CE4804E} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {D3BA31F5-FF20-4321-9494-3F01439C4F61} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} - {3D6441FC-0FE8-4D0C-910D-3D9310599C71} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646} {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} {B753CEB9-EA53-4AE1-997E-B7D54A299D58} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} - {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {BCC66C37-4980-484F-819D-066D2FF2669C} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} - {52CDA3F4-F090-4224-978A-5F42388DCF92} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} - {63984664-8230-40F3-BFF5-7AC4988D7FE7} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {02823466-F5FF-43A2-B70A-EF3482A0CBDD} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} {EFC7D088-EF45-464B-97CD-0BBA486B224A} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.cs b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.cs deleted file mode 100644 index 0986193cb91..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[BedrockRuntime.dotnetv4.Converse_AmazonTitanText] -// Use the Converse API to send a text message to Amazon Titan Text. - -using System; -using System.Collections.Generic; -using Amazon; -using Amazon.BedrockRuntime; -using Amazon.BedrockRuntime.Model; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); - -// Set the model ID, e.g., Titan Text Premier. -var modelId = "amazon.titan-text-premier-v1:0"; - -// Define the user message. -var userMessage = "Describe the purpose of a 'hello world' program in one line."; - -// Create a request with the model ID, the user message, and an inference configuration. -var request = new ConverseRequest -{ - ModelId = modelId, - Messages = new List - { - new Message - { - Role = ConversationRole.User, - Content = new List { new ContentBlock { Text = userMessage } } - } - }, - InferenceConfig = new InferenceConfiguration() - { - MaxTokens = 512, - Temperature = 0.5F, - TopP = 0.9F - } -}; - -try -{ - // Send the request to the Bedrock Runtime and wait for the result. - var response = await client.ConverseAsync(request); - - // Extract and print the response text. - string responseText = response?.Output?.Message?.Content?[0]?.Text ?? ""; - Console.WriteLine(responseText); -} -catch (AmazonBedrockRuntimeException e) -{ - Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); - throw; -} - -// snippet-end:[BedrockRuntime.dotnetv4.Converse_AmazonTitanText] - -// Create a partial class to make the top-level script testable. -namespace AmazonTitanText { public partial class Converse { } } \ No newline at end of file diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj deleted file mode 100644 index 54fc07f593e..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - Exe - net8.0 - AmazonTitanText.$(MSBuildProjectName) - - - - - - - diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.cs b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.cs deleted file mode 100644 index 80ab82e5b05..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.cs +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[BedrockRuntime.dotnetv4.ConverseStream_AmazonTitanText] -// Use the Converse API to send a text message to Amazon Titan Text -// and print the response stream. - -using System; -using System.Collections.Generic; -using System.Linq; -using Amazon; -using Amazon.BedrockRuntime; -using Amazon.BedrockRuntime.Model; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); - -// Set the model ID, e.g., Titan Text Premier. -var modelId = "amazon.titan-text-premier-v1:0"; - -// Define the user message. -var userMessage = "Describe the purpose of a 'hello world' program in one line."; - -// Create a request with the model ID, the user message, and an inference configuration. -var request = new ConverseStreamRequest -{ - ModelId = modelId, - Messages = new List - { - new Message - { - Role = ConversationRole.User, - Content = new List { new ContentBlock { Text = userMessage } } - } - }, - InferenceConfig = new InferenceConfiguration() - { - MaxTokens = 512, - Temperature = 0.5F, - TopP = 0.9F - } -}; - -try -{ - // Send the request to the Bedrock Runtime and wait for the result. - var response = await client.ConverseStreamAsync(request); - - // Extract and print the streamed response text in real-time. - foreach (var chunk in response.Stream.AsEnumerable()) - { - if (chunk is ContentBlockDeltaEvent) - { - Console.Write((chunk as ContentBlockDeltaEvent).Delta.Text); - } - } -} -catch (AmazonBedrockRuntimeException e) -{ - Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); - throw; -} - -// snippet-end:[BedrockRuntime.dotnetv4.ConverseStream_AmazonTitanText] - -// Create a partial class to make the top-level script testable. -namespace AmazonTitanText { public partial class ConverseStream { } } \ No newline at end of file diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj deleted file mode 100644 index 6b33c77774e..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - Exe - net8.0 - AmazonTitanText.$(MSBuildProjectName) - - - - - - - \ No newline at end of file diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.cs b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.cs deleted file mode 100644 index d5c19059944..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[BedrockRuntime.dotnetv4.InvokeModel_AmazonTitanText] -// Use the native inference API to send a text message to Amazon Titan Text. - -using System; -using System.IO; -using System.Text.Json; -using System.Text.Json.Nodes; -using Amazon; -using Amazon.BedrockRuntime; -using Amazon.BedrockRuntime.Model; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); - -// Set the model ID, e.g., Titan Text Premier. -var modelId = "amazon.titan-text-premier-v1:0"; - -// Define the user message. -var userMessage = "Describe the purpose of a 'hello world' program in one line."; - -//Format the request payload using the model's native structure. -var nativeRequest = JsonSerializer.Serialize(new -{ - inputText = userMessage, - textGenerationConfig = new - { - maxTokenCount = 512, - temperature = 0.5 - } -}); - -// Create a request with the model ID and the model's native request payload. -var request = new InvokeModelRequest() -{ - ModelId = modelId, - Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)), - ContentType = "application/json" -}; - -try -{ - // Send the request to the Bedrock Runtime and wait for the response. - var response = await client.InvokeModelAsync(request); - - // Decode the response body. - var modelResponse = await JsonNode.ParseAsync(response.Body); - - // Extract and print the response text. - var responseText = modelResponse["results"]?[0]?["outputText"] ?? ""; - Console.WriteLine(responseText); -} -catch (AmazonBedrockRuntimeException e) -{ - Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); - throw; -} - -// snippet-end:[BedrockRuntime.dotnetv4.InvokeModel_AmazonTitanText] - -// Create a partial class to make the top-level script testable. -namespace AmazonTitanText { public partial class InvokeModel { } } \ No newline at end of file diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj deleted file mode 100644 index 6b33c77774e..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - Exe - net8.0 - AmazonTitanText.$(MSBuildProjectName) - - - - - - - \ No newline at end of file diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs deleted file mode 100644 index 9aa49c27ba8..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_AmazonTitanText] -// Use the native inference API to send a text message to Amazon Titan Text -// and print the response stream. - -using System; -using System.IO; -using System.Text.Json; -using System.Text.Json.Nodes; -using Amazon; -using Amazon.BedrockRuntime; -using Amazon.BedrockRuntime.Model; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); - -// Set the model ID, e.g., Titan Text Premier. -var modelId = "amazon.titan-text-premier-v1:0"; - -// Define the user message. -var userMessage = "Describe the purpose of a 'hello world' program in one line."; - -//Format the request payload using the model's native structure. -var nativeRequest = JsonSerializer.Serialize(new -{ - inputText = userMessage, - textGenerationConfig = new - { - maxTokenCount = 512, - temperature = 0.5 - } -}); - -// Create a request with the model ID and the model's native request payload. -var request = new InvokeModelWithResponseStreamRequest() -{ - ModelId = modelId, - Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)), - ContentType = "application/json" -}; - -try -{ - // Send the request to the Bedrock Runtime and wait for the response. - var streamingResponse = await client.InvokeModelWithResponseStreamAsync(request); - - // Extract and print the streamed response text in real-time. - foreach (var item in streamingResponse.Body) - { - var chunk = JsonSerializer.Deserialize((item as PayloadPart).Bytes); - var text = chunk["outputText"] ?? ""; - Console.Write(text); - } -} -catch (AmazonBedrockRuntimeException e) -{ - Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); - throw; -} - -// snippet-end:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_AmazonTitanText] - -// Create a partial class to make the top-level script testable. -namespace AmazonTitanText { public partial class InvokeModelWithResponseStream { } } \ No newline at end of file diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj deleted file mode 100644 index 6b33c77774e..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - Exe - net8.0 - AmazonTitanText.$(MSBuildProjectName) - - - - - - - \ No newline at end of file diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj index 8da69b86bc4..df2cbf6090c 100644 --- a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj +++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj index 81ab5d08f39..668ed2c2f1a 100644 --- a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj +++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj index 81ab5d08f39..668ed2c2f1a 100644 --- a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj +++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj index 49a65a040be..ba1b471b33c 100644 --- a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj +++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + \ No newline at end of file diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj deleted file mode 100644 index 058b325f013..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - Exe - net8.0 - CohereCommand.$(MSBuildProjectName) - - - - - - - diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/InvokeModel.cs b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/InvokeModel.cs deleted file mode 100644 index e0b6b9399e7..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/InvokeModel.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[BedrockRuntime.dotnetv4.InvokeModel_CohereCommand] -// Use the native inference API to send a text message to Cohere Command. - -using System; -using System.IO; -using System.Text.Json; -using System.Text.Json.Nodes; -using Amazon; -using Amazon.BedrockRuntime; -using Amazon.BedrockRuntime.Model; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); - -// Set the model ID, e.g., Command Light. -var modelId = "cohere.command-light-text-v14"; - -// Define the user message. -var userMessage = "Describe the purpose of a 'hello world' program in one line."; - -//Format the request payload using the model's native structure. -var nativeRequest = JsonSerializer.Serialize(new -{ - prompt = userMessage, - max_tokens = 512, - temperature = 0.5 -}); - -// Create a request with the model ID and the model's native request payload. -var request = new InvokeModelRequest() -{ - ModelId = modelId, - Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)), - ContentType = "application/json" -}; - -try -{ - // Send the request to the Bedrock Runtime and wait for the response. - var response = await client.InvokeModelAsync(request); - - // Decode the response body. - var modelResponse = await JsonNode.ParseAsync(response.Body); - - // Extract and print the response text. - var responseText = modelResponse["generations"]?[0]?["text"] ?? ""; - Console.WriteLine(responseText); -} -catch (AmazonBedrockRuntimeException e) -{ - Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); - throw; -} - -// snippet-end:[BedrockRuntime.dotnetv4.InvokeModel_CohereCommand] - -// Create a partial class to make the top-level script testable. -namespace CohereCommand { public partial class InvokeModel { } } \ No newline at end of file diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj deleted file mode 100644 index 058b325f013..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - Exe - net8.0 - CohereCommand.$(MSBuildProjectName) - - - - - - - diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs deleted file mode 100644 index 5562ff5e7e2..00000000000 --- a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_CohereCommand] -// Use the native inference API to send a text message to Cohere Command -// and print the response stream. - -using System; -using System.IO; -using System.Text.Json; -using System.Text.Json.Nodes; -using Amazon; -using Amazon.BedrockRuntime; -using Amazon.BedrockRuntime.Model; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); - -// Set the model ID, e.g., Command Light. -var modelId = "cohere.command-light-text-v14"; - -// Define the user message. -var userMessage = "Describe the purpose of a 'hello world' program in one line."; - -//Format the request payload using the model's native structure. -var nativeRequest = JsonSerializer.Serialize(new -{ - prompt = userMessage, - max_tokens = 512, - temperature = 0.5 -}); - -// Create a request with the model ID and the model's native request payload. -var request = new InvokeModelWithResponseStreamRequest() -{ - ModelId = modelId, - Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)), - ContentType = "application/json" -}; - -try -{ - // Send the request to the Bedrock Runtime and wait for the response. - var streamingResponse = await client.InvokeModelWithResponseStreamAsync(request); - - // Extract and print the streamed response text in real-time. - foreach (var item in streamingResponse.Body) - { - var chunk = JsonSerializer.Deserialize((item as PayloadPart).Bytes); - var text = chunk["generations"]?[0]?["text"] ?? ""; - Console.Write(text); - } -} -catch (AmazonBedrockRuntimeException e) -{ - Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); - throw; -} - -// snippet-end:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_CohereCommand] - -// Create a partial class to make the top-level script testable. -namespace CohereCommand { public partial class InvokeModelWithResponseStream { } } \ No newline at end of file diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj index 058b325f013..0ae5f0bd353 100644 --- a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj +++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj index 058b325f013..0ae5f0bd353 100644 --- a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj +++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj index 058b325f013..0ae5f0bd353 100644 --- a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj +++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj index 058b325f013..0ae5f0bd353 100644 --- a/dotnetv4/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj +++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj index a1950f2ec36..aea9d9871db 100644 --- a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj +++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj b/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj index a1950f2ec36..aea9d9871db 100644 --- a/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj +++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj index a1950f2ec36..aea9d9871db 100644 --- a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj +++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj index a1950f2ec36..aea9d9871db 100644 --- a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj +++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj b/dotnetv4/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj index 88c91e1c353..6e9e7dc1b9d 100644 --- a/dotnetv4/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj +++ b/dotnetv4/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj b/dotnetv4/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj index c11471e4a6c..73a360f70d2 100644 --- a/dotnetv4/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj +++ b/dotnetv4/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj index c11471e4a6c..73a360f70d2 100644 --- a/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj +++ b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj index c11471e4a6c..73a360f70d2 100644 --- a/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj +++ b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv4/Bedrock-runtime/README.md b/dotnetv4/Bedrock-runtime/README.md index c3b416815b2..ad92c3b4212 100644 --- a/dotnetv4/Bedrock-runtime/README.md +++ b/dotnetv4/Bedrock-runtime/README.md @@ -28,12 +28,6 @@ For prerequisites, see the [README](../README.md#Prerequisites) in the `dotnetv4 -### Amazon Titan Text - -- [Converse](Models/AmazonTitanText/Converse/Converse.cs#L4) -- [ConverseStream](Models/AmazonTitanText/ConverseStream/ConverseStream.cs#L4) -- [InvokeModel](Models/AmazonTitanText/InvokeModel/InvokeModel.cs#L4) - ### Anthropic Claude - [Converse](Models/AnthropicClaude/Converse/Converse.cs#L4) diff --git a/dotnetv4/Bedrock-runtime/Tests/ActionTest_Converse.cs b/dotnetv4/Bedrock-runtime/Tests/ActionTest_Converse.cs index 1495e42b66d..b2474e44041 100644 --- a/dotnetv4/Bedrock-runtime/Tests/ActionTest_Converse.cs +++ b/dotnetv4/Bedrock-runtime/Tests/ActionTest_Converse.cs @@ -1,4 +1,4 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 namespace BedrockRuntimeTests; @@ -6,7 +6,6 @@ namespace BedrockRuntimeTests; public class ActionTest_Converse { [Theory, Trait("Category", "Integration")] - [InlineData(typeof(AmazonTitanText.Converse))] [InlineData(typeof(Mistral.Converse))] [InlineData(typeof(MetaLlama.Converse))] [InlineData(typeof(CohereCommand.Converse))] diff --git a/dotnetv4/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs b/dotnetv4/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs index d5c0a5f452f..83bf0460e39 100644 --- a/dotnetv4/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs +++ b/dotnetv4/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs @@ -1,4 +1,4 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 namespace BedrockRuntimeTests; @@ -6,7 +6,6 @@ namespace BedrockRuntimeTests; public class ActionTest_ConverseStream { [Theory, Trait("Category", "Integration")] - [InlineData(typeof(AmazonTitanText.ConverseStream))] [InlineData(typeof(Mistral.ConverseStream))] [InlineData(typeof(MetaLlama.ConverseStream))] [InlineData(typeof(CohereCommand.ConverseStream))] diff --git a/dotnetv4/Bedrock-runtime/Tests/ActionTest_InvokeModelWithResponseStream.cs b/dotnetv4/Bedrock-runtime/Tests/ActionTest_InvokeModelWithResponseStream.cs index f9c56201200..306cd8d585f 100644 --- a/dotnetv4/Bedrock-runtime/Tests/ActionTest_InvokeModelWithResponseStream.cs +++ b/dotnetv4/Bedrock-runtime/Tests/ActionTest_InvokeModelWithResponseStream.cs @@ -1,4 +1,4 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 namespace BedrockRuntimeTests @@ -6,10 +6,8 @@ namespace BedrockRuntimeTests public class ActionTest_InvokeModelWithResponseStream { [Theory, Trait("Category", "Integration")] - [InlineData(typeof(AmazonTitanText.InvokeModelWithResponseStream))] [InlineData(typeof(Mistral.InvokeModelWithResponseStream))] [InlineData(typeof(MetaLlama3.InvokeModelWithResponseStream))] - [InlineData(typeof(CohereCommand.InvokeModelWithResponseStream))] [InlineData(typeof(CohereCommandR.InvokeModelWithResponseStream))] [InlineData(typeof(AnthropicClaude.InvokeModelWithResponseStream))] diff --git a/dotnetv4/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs b/dotnetv4/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs index 9512876fc37..9d540bcfd82 100644 --- a/dotnetv4/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs +++ b/dotnetv4/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs @@ -1,4 +1,4 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 namespace BedrockRuntimeTests; @@ -6,10 +6,8 @@ namespace BedrockRuntimeTests; public class ActionTest_InvokeModel { [Theory, Trait("Category", "Integration")] - [InlineData(typeof(AmazonTitanText.InvokeModel))] [InlineData(typeof(Mistral.InvokeModel))] [InlineData(typeof(MetaLlama3.InvokeModel))] - [InlineData(typeof(CohereCommand.InvokeModel))] [InlineData(typeof(CohereCommandR.InvokeModel))] [InlineData(typeof(AnthropicClaude.InvokeModel))] diff --git a/dotnetv4/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj b/dotnetv4/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj index b41309ed432..d039c02e868 100644 --- a/dotnetv4/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj +++ b/dotnetv4/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj @@ -11,33 +11,27 @@ - - - - + + + + - - + + runtime; build; native; contentfiles; analyzers; buildtransitive all - + runtime; build; native; contentfiles; analyzers; buildtransitive all - - - - - - diff --git a/dotnetv4/DotNetV4Examples.sln b/dotnetv4/DotNetV4Examples.sln index 8b243f70231..e4e1cf6f809 100644 --- a/dotnetv4/DotNetV4Examples.sln +++ b/dotnetv4/DotNetV4Examples.sln @@ -63,10 +63,7 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Command_R_InvokeModelWithRe EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Command_R_InvokeModel", "Bedrock-runtime\Models\CohereCommand\Command_R_InvokeModel\Command_R_InvokeModel.csproj", "{6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Command_InvokeModelWithResponseStream", "Bedrock-runtime\Models\CohereCommand\Command_InvokeModelWithResponseStream\Command_InvokeModelWithResponseStream.csproj", "{6E0B8FF0-0D03-4424-86D5-CA01437C6814}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Command_InvokeModel", "Bedrock-runtime\Models\CohereCommand\Command_InvokeModel\Command_InvokeModel.csproj", "{5C91FECD-E8B6-4659-8691-60CA676E8F68}" -EndProject + Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AnthropicClaude", "AnthropicClaude", "{6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModelWithResponseStream", "Bedrock-runtime\Models\AnthropicClaude\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{345DA0D1-C762-49EF-9953-6F4D57CB7FC7}" @@ -79,14 +76,7 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Converse", "Bedrock-runtime EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonTitanText", "AmazonTitanText", "{74979310-8A92-47DC-B5CA-EFA7970E1202}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModelWithResponseStream", "Bedrock-runtime\Models\AmazonTitanText\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{7FA90AFA-ED17-43CD-94EF-314B43095C10}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModel", "Bedrock-runtime\Models\AmazonTitanText\InvokeModel\InvokeModel.csproj", "{18E636A2-A383-487C-AB19-B133B50173F2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConverseStream", "Bedrock-runtime\Models\AmazonTitanText\ConverseStream\ConverseStream.csproj", "{AD2DCA34-3627-43EC-BEE7-7D1104FC521A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Converse", "Bedrock-runtime\Models\AmazonTitanText\Converse\Converse.csproj", "{3EA8A897-A32D-42B6-B87E-CE269E4597D5}" -EndProject + Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BedrockRuntimeActions", "Bedrock-runtime\Actions\BedrockRuntimeActions.csproj", "{05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CloudFormation", "CloudFormation", "{5FBEAD92-9234-4824-9320-2052D236C9CD}" @@ -247,14 +237,7 @@ Global {6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}.Debug|Any CPU.Build.0 = Debug|Any CPU {6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}.Release|Any CPU.ActiveCfg = Release|Any CPU {6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}.Release|Any CPU.Build.0 = Release|Any CPU - {6E0B8FF0-0D03-4424-86D5-CA01437C6814}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6E0B8FF0-0D03-4424-86D5-CA01437C6814}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6E0B8FF0-0D03-4424-86D5-CA01437C6814}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6E0B8FF0-0D03-4424-86D5-CA01437C6814}.Release|Any CPU.Build.0 = Release|Any CPU - {5C91FECD-E8B6-4659-8691-60CA676E8F68}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5C91FECD-E8B6-4659-8691-60CA676E8F68}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5C91FECD-E8B6-4659-8691-60CA676E8F68}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5C91FECD-E8B6-4659-8691-60CA676E8F68}.Release|Any CPU.Build.0 = Release|Any CPU + {345DA0D1-C762-49EF-9953-6F4D57CB7FC7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {345DA0D1-C762-49EF-9953-6F4D57CB7FC7}.Debug|Any CPU.Build.0 = Debug|Any CPU {345DA0D1-C762-49EF-9953-6F4D57CB7FC7}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -271,22 +254,7 @@ Global {874C7405-ED8D-477D-9362-0C69CF56F213}.Debug|Any CPU.Build.0 = Debug|Any CPU {874C7405-ED8D-477D-9362-0C69CF56F213}.Release|Any CPU.ActiveCfg = Release|Any CPU {874C7405-ED8D-477D-9362-0C69CF56F213}.Release|Any CPU.Build.0 = Release|Any CPU - {7FA90AFA-ED17-43CD-94EF-314B43095C10}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7FA90AFA-ED17-43CD-94EF-314B43095C10}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7FA90AFA-ED17-43CD-94EF-314B43095C10}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7FA90AFA-ED17-43CD-94EF-314B43095C10}.Release|Any CPU.Build.0 = Release|Any CPU - {18E636A2-A383-487C-AB19-B133B50173F2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {18E636A2-A383-487C-AB19-B133B50173F2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {18E636A2-A383-487C-AB19-B133B50173F2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {18E636A2-A383-487C-AB19-B133B50173F2}.Release|Any CPU.Build.0 = Release|Any CPU - {AD2DCA34-3627-43EC-BEE7-7D1104FC521A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {AD2DCA34-3627-43EC-BEE7-7D1104FC521A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {AD2DCA34-3627-43EC-BEE7-7D1104FC521A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {AD2DCA34-3627-43EC-BEE7-7D1104FC521A}.Release|Any CPU.Build.0 = Release|Any CPU - {3EA8A897-A32D-42B6-B87E-CE269E4597D5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3EA8A897-A32D-42B6-B87E-CE269E4597D5}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3EA8A897-A32D-42B6-B87E-CE269E4597D5}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3EA8A897-A32D-42B6-B87E-CE269E4597D5}.Release|Any CPU.Build.0 = Release|Any CPU + {05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}.Debug|Any CPU.Build.0 = Debug|Any CPU {05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -410,18 +378,14 @@ Global {81EA8494-176C-4178-A1C3-6FA3B1222B74} = {39EAAA32-53A8-4641-873C-976FD5963360} {085F3A30-A788-48D6-8067-74D71C29A941} = {39EAAA32-53A8-4641-873C-976FD5963360} {6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716} = {39EAAA32-53A8-4641-873C-976FD5963360} - {6E0B8FF0-0D03-4424-86D5-CA01437C6814} = {39EAAA32-53A8-4641-873C-976FD5963360} - {5C91FECD-E8B6-4659-8691-60CA676E8F68} = {39EAAA32-53A8-4641-873C-976FD5963360} + {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39} = {4429C078-35C8-4E2B-9C7B-F0C619741B67} {345DA0D1-C762-49EF-9953-6F4D57CB7FC7} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39} {C95689B5-C0A1-4C1F-9E97-369D3D397930} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39} {8551C158-60B4-4594-8B1D-5BE851F90EE4} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39} {874C7405-ED8D-477D-9362-0C69CF56F213} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39} {74979310-8A92-47DC-B5CA-EFA7970E1202} = {4429C078-35C8-4E2B-9C7B-F0C619741B67} - {7FA90AFA-ED17-43CD-94EF-314B43095C10} = {74979310-8A92-47DC-B5CA-EFA7970E1202} - {18E636A2-A383-487C-AB19-B133B50173F2} = {74979310-8A92-47DC-B5CA-EFA7970E1202} - {AD2DCA34-3627-43EC-BEE7-7D1104FC521A} = {74979310-8A92-47DC-B5CA-EFA7970E1202} - {3EA8A897-A32D-42B6-B87E-CE269E4597D5} = {74979310-8A92-47DC-B5CA-EFA7970E1202} + {05E93A3E-CFA0-4980-8EE5-CD25C7ED766D} = {D859B39C-9106-4D3D-8C57-11B15FA8106B} {AAFC86EB-49D7-4FD8-8C79-C42C129EB75A} = {5FBEAD92-9234-4824-9320-2052D236C9CD} {98A11016-DD41-4848-A848-51D703951A91} = {5FBEAD92-9234-4824-9320-2052D236C9CD} diff --git a/gov2/bedrock-runtime/README.md b/gov2/bedrock-runtime/README.md index 8efda951903..10797326d1e 100644 --- a/gov2/bedrock-runtime/README.md +++ b/gov2/bedrock-runtime/README.md @@ -44,10 +44,6 @@ functions within the same service. - [InvokeModel](actions/invoke_model.go#L7) -### Amazon Titan Text - -- [InvokeModel](actions/invoke_model.go#L7) - ### Anthropic Claude - [Converse](actions/converse.go#L7) diff --git a/gov2/bedrock-runtime/actions/invoke_model.go b/gov2/bedrock-runtime/actions/invoke_model.go index ed210723ddd..338a9e84ae1 100644 --- a/gov2/bedrock-runtime/actions/invoke_model.go +++ b/gov2/bedrock-runtime/actions/invoke_model.go @@ -80,63 +80,6 @@ func (wrapper InvokeModelWrapper) InvokeClaude(ctx context.Context, prompt strin // snippet-end:[gov2.bedrock-runtime.InvokeClaude] -// snippet-start:[gov2.bedrock-runtime.InvokeJurassic2] - -// Each model provider has their own individual request and response formats. -// For the format, ranges, and default values for AI21 Labs Jurassic-2, refer to: -// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-jurassic2.html - -type Jurassic2Request struct { - Prompt string `json:"prompt"` - MaxTokens int `json:"maxTokens,omitempty"` - Temperature float64 `json:"temperature,omitempty"` -} - -type Jurassic2Response struct { - Completions []Completion `json:"completions"` -} -type Completion struct { - Data Data `json:"data"` -} -type Data struct { - Text string `json:"text"` -} - -// Invokes AI21 Labs Jurassic-2 on Amazon Bedrock to run an inference using the input -// provided in the request body. -func (wrapper InvokeModelWrapper) InvokeJurassic2(ctx context.Context, prompt string) (string, error) { - modelId := "ai21.j2-mid-v1" - - body, err := json.Marshal(Jurassic2Request{ - Prompt: prompt, - MaxTokens: 200, - Temperature: 0.5, - }) - - if err != nil { - log.Fatal("failed to marshal", err) - } - - output, err := wrapper.BedrockRuntimeClient.InvokeModel(ctx, &bedrockruntime.InvokeModelInput{ - ModelId: aws.String(modelId), - ContentType: aws.String("application/json"), - Body: body, - }) - - if err != nil { - ProcessError(err, modelId) - } - - var response Jurassic2Response - if err := json.Unmarshal(output.Body, &response); err != nil { - log.Fatal("failed to unmarshal", err) - } - - return response.Completions[0].Data.Text, nil -} - -// snippet-end:[gov2.bedrock-runtime.InvokeJurassic2] - // snippet-start:[gov2.bedrock-runtime.InvokeTitanImage] type TitanImageRequest struct { @@ -207,70 +150,6 @@ func (wrapper InvokeModelWrapper) InvokeTitanImage(ctx context.Context, prompt s // snippet-end:[gov2.bedrock-runtime.InvokeTitanImage] -// snippet-start:[gov2.bedrock-runtime.InvokeTitanText] - -// Each model provider has their own individual request and response formats. -// For the format, ranges, and default values for Amazon Titan Text, refer to: -// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-text.html -type TitanTextRequest struct { - InputText string `json:"inputText"` - TextGenerationConfig TextGenerationConfig `json:"textGenerationConfig"` -} - -type TextGenerationConfig struct { - Temperature float64 `json:"temperature"` - TopP float64 `json:"topP"` - MaxTokenCount int `json:"maxTokenCount"` - StopSequences []string `json:"stopSequences,omitempty"` -} - -type TitanTextResponse struct { - InputTextTokenCount int `json:"inputTextTokenCount"` - Results []Result `json:"results"` -} - -type Result struct { - TokenCount int `json:"tokenCount"` - OutputText string `json:"outputText"` - CompletionReason string `json:"completionReason"` -} - -func (wrapper InvokeModelWrapper) InvokeTitanText(ctx context.Context, prompt string) (string, error) { - modelId := "amazon.titan-text-express-v1" - - body, err := json.Marshal(TitanTextRequest{ - InputText: prompt, - TextGenerationConfig: TextGenerationConfig{ - Temperature: 0, - TopP: 1, - MaxTokenCount: 4096, - }, - }) - - if err != nil { - log.Fatal("failed to marshal", err) - } - - output, err := wrapper.BedrockRuntimeClient.InvokeModel(ctx, &bedrockruntime.InvokeModelInput{ - ModelId: aws.String(modelId), - ContentType: aws.String("application/json"), - Body: body, - }) - - if err != nil { - ProcessError(err, modelId) - } - - var response TitanTextResponse - if err := json.Unmarshal(output.Body, &response); err != nil { - log.Fatal("failed to unmarshal", err) - } - - return response.Results[0].OutputText, nil -} - -// snippet-end:[gov2.bedrock-runtime.InvokeTitanText] - func ProcessError(err error, modelId string) { errMsg := err.Error() if strings.Contains(errMsg, "no such host") { diff --git a/gov2/bedrock-runtime/actions/invoke_model_test.go b/gov2/bedrock-runtime/actions/invoke_model_test.go index 10a371692fa..c0aede5a8d2 100644 --- a/gov2/bedrock-runtime/actions/invoke_model_test.go +++ b/gov2/bedrock-runtime/actions/invoke_model_test.go @@ -18,9 +18,7 @@ import ( ) const CLAUDE_MODEL_ID = "anthropic.claude-v2" -const JURASSIC2_MODEL_ID = "ai21.j2-mid-v1" const TITAN_IMAGE_MODEL_ID = "amazon.titan-image-generator-v1" -const TITAN_TEXT_EXPRESS_MODEL_ID = "amazon.titan-text-express-v1" const prompt = "A test prompt" @@ -41,12 +39,6 @@ func CallInvokeModelActions(sdkConfig aws.Config) { } log.Println(claudeCompletion) - jurassic2Completion, err := wrapper.InvokeJurassic2(ctx, prompt) - if err != nil { - panic(err) - } - log.Println(jurassic2Completion) - seed := int64(0) titanImageCompletion, err := wrapper.InvokeTitanImage(ctx, prompt, seed) if err != nil { @@ -54,12 +46,6 @@ func CallInvokeModelActions(sdkConfig aws.Config) { } log.Println(titanImageCompletion) - titanTextCompletion, err := wrapper.InvokeTitanText(ctx, prompt) - if err != nil { - panic(err) - } - log.Println(titanTextCompletion) - log.Printf("Thanks for watching!") } @@ -73,9 +59,7 @@ type InvokeModelActionsTest struct{} func (scenTest *InvokeModelActionsTest) SetupDataAndStubs() []testtools.Stub { var stubList []testtools.Stub stubList = append(stubList, stubInvokeModel(CLAUDE_MODEL_ID)) - stubList = append(stubList, stubInvokeModel(JURASSIC2_MODEL_ID)) stubList = append(stubList, stubInvokeModel(TITAN_IMAGE_MODEL_ID)) - stubList = append(stubList, stubInvokeModel(TITAN_TEXT_EXPRESS_MODEL_ID)) return stubList } @@ -102,18 +86,6 @@ func stubInvokeModel(modelId string) testtools.Stub { Completion: "A fake response", }) - case JURASSIC2_MODEL_ID: - request, _ = json.Marshal(Jurassic2Request{ - Prompt: prompt, - MaxTokens: 200, - Temperature: 0.5, - }) - response, _ = json.Marshal(Jurassic2Response{ - Completions: []Completion{ - {Data: Data{Text: "A fake response"}}, - }, - }) - case TITAN_IMAGE_MODEL_ID: request, _ = json.Marshal(TitanImageRequest{ TaskType: "TEXT_IMAGE", @@ -133,23 +105,6 @@ func stubInvokeModel(modelId string) testtools.Stub { Images: []string{"FakeBase64String=="}, }) - case TITAN_TEXT_EXPRESS_MODEL_ID: - request, _ = json.Marshal(TitanTextRequest{ - InputText: prompt, - TextGenerationConfig: TextGenerationConfig{ - Temperature: 0, - TopP: 1, - MaxTokenCount: 4096, - }, - }) - response, _ = json.Marshal(TitanTextResponse{ - Results: []Result{ - { - OutputText: "A fake response", - }, - }, - }) - default: return testtools.Stub{} } diff --git a/gov2/bedrock-runtime/scenarios/scenario_invoke_models.go b/gov2/bedrock-runtime/scenarios/scenario_invoke_models.go index 56c0e96c590..c995d71d4bc 100644 --- a/gov2/bedrock-runtime/scenarios/scenario_invoke_models.go +++ b/gov2/bedrock-runtime/scenarios/scenario_invoke_models.go @@ -25,11 +25,9 @@ import ( // to invoke various foundation models for text and image generation // // 1. Generate text with Anthropic Claude 2 -// 2. Generate text with AI21 Labs Jurassic-2 -// 3. Generate text with Meta Llama 2 Chat -// 4. Generate text and asynchronously process the response stream with Anthropic Claude 2 -// 5. Generate an image with the Amazon Titan image generation model -// 6. Generate text with Amazon Titan Text G1 Express model +// 2. Generate text with Meta Llama 2 Chat +// 3. Generate text and asynchronously process the response stream with Anthropic Claude 2 +// 4. Generate an image with the Amazon Titan image generation model type InvokeModelsScenario struct { sdkConfig aws.Config invokeModelWrapper actions.InvokeModelWrapper @@ -70,10 +68,6 @@ func (scenario InvokeModelsScenario) Run(ctx context.Context) { log.Printf("Invoking Claude with prompt: %v\n", text2textPrompt) scenario.InvokeClaude(ctx, text2textPrompt) - log.Println(strings.Repeat("-", 77)) - log.Printf("Invoking Jurassic-2 with prompt: %v\n", text2textPrompt) - scenario.InvokeJurassic2(ctx, text2textPrompt) - log.Println(strings.Repeat("=", 77)) log.Printf("Now, let's invoke Claude with the asynchronous client and process the response stream:\n\n") @@ -91,10 +85,6 @@ func (scenario InvokeModelsScenario) Run(ctx context.Context) { log.Printf("Invoking Amazon Titan with prompt: %v\n", text2ImagePrompt) scenario.InvokeTitanImage(ctx, text2ImagePrompt, seed) - log.Println(strings.Repeat("-", 77)) - log.Printf("Invoking Titan Text Express with prompt: %v\n", text2textPrompt) - scenario.InvokeTitanText(ctx, text2textPrompt) - log.Println(strings.Repeat("=", 77)) log.Println("Thanks for watching!") log.Println(strings.Repeat("=", 77)) @@ -108,14 +98,6 @@ func (scenario InvokeModelsScenario) InvokeClaude(ctx context.Context, prompt st log.Printf("\nClaude : %v\n", strings.TrimSpace(completion)) } -func (scenario InvokeModelsScenario) InvokeJurassic2(ctx context.Context, prompt string) { - completion, err := scenario.invokeModelWrapper.InvokeJurassic2(ctx, prompt) - if err != nil { - panic(err) - } - log.Printf("\nJurassic-2 : %v\n", strings.TrimSpace(completion)) -} - func (scenario InvokeModelsScenario) InvokeWithResponseStream(ctx context.Context, prompt string) { log.Println("\nClaude with response stream:") _, err := scenario.responseStreamWrapper.InvokeModelWithResponseStream(ctx, prompt) @@ -134,14 +116,6 @@ func (scenario InvokeModelsScenario) InvokeTitanImage(ctx context.Context, promp fmt.Printf("The generated image has been saved to %s\n", imagePath) } -func (scenario InvokeModelsScenario) InvokeTitanText(ctx context.Context, prompt string) { - completion, err := scenario.invokeModelWrapper.InvokeTitanText(ctx, prompt) - if err != nil { - panic(err) - } - log.Printf("\nTitan Text Express : %v\n\n", strings.TrimSpace(completion)) -} - // snippet-end:[gov2.bedrock-runtime.Scenario_InvokeModels] func saveImage(base64ImageData string, modelId string) string { diff --git a/javascriptv3/example_code/bedrock-runtime/README.md b/javascriptv3/example_code/bedrock-runtime/README.md index 12cb74ff458..d67d42a88a3 100644 --- a/javascriptv3/example_code/bedrock-runtime/README.md +++ b/javascriptv3/example_code/bedrock-runtime/README.md @@ -56,12 +56,6 @@ functions within the same service. - [InvokeModel](models/amazonNovaCanvas/invokeModel.js#L4) -### Amazon Titan Text - -- [Converse](models/amazonTitanText/converse.js#L4) -- [ConverseStream](models/amazonTitanText/converseStream.js#L4) -- [InvokeModel](models/amazonTitanText/invoke_model.js) - ### Anthropic Claude - [Converse](models/anthropicClaude/converse.js#L4) diff --git a/javascriptv3/example_code/bedrock-runtime/config/foundation_models.js b/javascriptv3/example_code/bedrock-runtime/config/foundation_models.js index 8759211664c..c78090da294 100644 --- a/javascriptv3/example_code/bedrock-runtime/config/foundation_models.js +++ b/javascriptv3/example_code/bedrock-runtime/config/foundation_models.js @@ -20,37 +20,6 @@ export const FoundationModels = Object.freeze({ module: () => import("../models/anthropicClaude/invoke_claude_3.js"), invoker: (/** @type {Module} */ module) => module.invokeModel, }, - CLAUDE_2_1: { - modelId: "anthropic.claude-v2:1", - modelName: "Anthropic Claude 2.1", - module: () => import("../models/anthropicClaude/invoke_claude_2.js"), - invoker: (/** @type {Module} */ module) => module.invokeModel, - }, - CLAUDE_2: { - modelId: "anthropic.claude-v2", - modelName: "Anthropic Claude 2.0", - module: () => import("../models/anthropicClaude/invoke_claude_2.js"), - invoker: (/** @type {Module} */ module) => module.invokeModel, - }, - CLAUDE_INSTANT: { - modelId: "anthropic.claude-instant-v1", - modelName: "Anthropic Claude Instant", - module: () => - import("../models/anthropicClaude/invoke_claude_instant_1.js"), - invoker: (/** @type {Module} */ module) => module.invokeModel, - }, - JURASSIC2_MID: { - modelId: "ai21.j2-mid-v1", - modelName: "Jurassic-2 Mid", - module: () => import("../models/ai21LabsJurassic2/invoke_model.js"), - invoker: (/** @type {Module} */ module) => module.invokeModel, - }, - JURASSIC2_ULTRA: { - modelId: "ai21.j2-ultra-v1", - modelName: "Jurassic-2 Ultra", - module: () => import("../models/ai21LabsJurassic2/invoke_model.js"), - invoker: (/** @type {Module} */ module) => module.invokeModel, - }, MISTRAL_7B: { modelId: "mistral.mistral-7b-instruct-v0:2", modelName: "Mistral 7B Instruct", @@ -63,16 +32,4 @@ export const FoundationModels = Object.freeze({ module: () => import("../models/mistral/invoke_mixtral_8x7b.js"), invoker: (/** @type {Module} */ module) => module.invokeModel, }, - TITAN_TEXT_G1_EXPRESS: { - modelId: "amazon.titan-text-express-v1", - modelName: "Titan Text G1 - Express", - module: () => import("../models/amazonTitanText/invoke_model.js"), - invoker: (/** @type {Module} */ module) => module.invokeModel, - }, - TITAN_TEXT_G1_LITE: { - modelId: "amazon.titan-text-lite-v1", - modelName: "Titan Text G1 - Lite", - module: () => import("../models/amazonTitanText/invoke_model.js"), - invoker: (/** @type {Module} */ module) => module.invokeModel, - }, }); diff --git a/javascriptv3/example_code/bedrock-runtime/models/amazonTitanText/converse.js b/javascriptv3/example_code/bedrock-runtime/models/amazonTitanText/converse.js deleted file mode 100644 index faad66eeeef..00000000000 --- a/javascriptv3/example_code/bedrock-runtime/models/amazonTitanText/converse.js +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[javascript.v3.bedrock-runtime.Converse_AmazonTitanText] -// Use the Conversation API to send a text message to Amazon Titan Text. - -import { - BedrockRuntimeClient, - ConverseCommand, -} from "@aws-sdk/client-bedrock-runtime"; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -const client = new BedrockRuntimeClient({ region: "us-east-1" }); - -// Set the model ID, e.g., Titan Text Premier. -const modelId = "amazon.titan-text-premier-v1:0"; - -// Start a conversation with the user message. -const userMessage = - "Describe the purpose of a 'hello world' program in one line."; -const conversation = [ - { - role: "user", - content: [{ text: userMessage }], - }, -]; - -// Create a command with the model ID, the message, and a basic configuration. -const command = new ConverseCommand({ - modelId, - messages: conversation, - inferenceConfig: { maxTokens: 512, temperature: 0.5, topP: 0.9 }, -}); - -try { - // Send the command to the model and wait for the response - const response = await client.send(command); - - // Extract and print the response text. - const responseText = response.output.message.content[0].text; - console.log(responseText); -} catch (err) { - console.log(`ERROR: Can't invoke '${modelId}'. Reason: ${err}`); - process.exit(1); -} - -// snippet-end:[javascript.v3.bedrock-runtime.Converse_AmazonTitanText] diff --git a/javascriptv3/example_code/bedrock-runtime/models/amazonTitanText/converseStream.js b/javascriptv3/example_code/bedrock-runtime/models/amazonTitanText/converseStream.js deleted file mode 100644 index c7733b7f738..00000000000 --- a/javascriptv3/example_code/bedrock-runtime/models/amazonTitanText/converseStream.js +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -// snippet-start:[javascript.v3.bedrock-runtime.ConverseStream_AmazonTitanText] -// Use the Conversation API to send a text message to Amazon Titan Text. - -import { - BedrockRuntimeClient, - ConverseStreamCommand, -} from "@aws-sdk/client-bedrock-runtime"; - -// Create a Bedrock Runtime client in the AWS Region you want to use. -const client = new BedrockRuntimeClient({ region: "us-east-1" }); - -// Set the model ID, e.g., Titan Text Premier. -const modelId = "amazon.titan-text-premier-v1:0"; - -// Start a conversation with the user message. -const userMessage = - "Describe the purpose of a 'hello world' program in one line."; -const conversation = [ - { - role: "user", - content: [{ text: userMessage }], - }, -]; - -// Create a command with the model ID, the message, and a basic configuration. -const command = new ConverseStreamCommand({ - modelId, - messages: conversation, - inferenceConfig: { maxTokens: 512, temperature: 0.5, topP: 0.9 }, -}); - -try { - // Send the command to the model and wait for the response - const response = await client.send(command); - - // Extract and print the streamed response text in real-time. - for await (const item of response.stream) { - if (item.contentBlockDelta) { - process.stdout.write(item.contentBlockDelta.delta?.text); - } - } -} catch (err) { - console.log(`ERROR: Can't invoke '${modelId}'. Reason: ${err}`); - process.exit(1); -} - -// snippet-end:[javascript.v3.bedrock-runtime.ConverseStream_AmazonTitanText] diff --git a/javascriptv3/example_code/bedrock-runtime/models/amazonTitanText/invoke_model.js b/javascriptv3/example_code/bedrock-runtime/models/amazonTitanText/invoke_model.js deleted file mode 100644 index d8aee0eb299..00000000000 --- a/javascriptv3/example_code/bedrock-runtime/models/amazonTitanText/invoke_model.js +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -import { fileURLToPath } from "node:url"; - -import { FoundationModels } from "../../config/foundation_models.js"; -import { - BedrockRuntimeClient, - InvokeModelCommand, -} from "@aws-sdk/client-bedrock-runtime"; - -/** - * @typedef {Object} ResponseBody - * @property {Object[]} results - */ - -/** - * Invokes an Amazon Titan Text generation model. - * - * @param {string} prompt - The input text prompt for the model to complete. - * @param {string} [modelId] - The ID of the model to use. Defaults to "amazon.titan-text-express-v1". - */ -export const invokeModel = async ( - prompt, - modelId = "amazon.titan-text-express-v1", -) => { - // Create a new Bedrock Runtime client instance. - const client = new BedrockRuntimeClient({ region: "us-east-1" }); - - // Prepare the payload for the model. - const payload = { - inputText: prompt, - textGenerationConfig: { - maxTokenCount: 4096, - stopSequences: [], - temperature: 0, - topP: 1, - }, - }; - - // Invoke the model with the payload and wait for the response. - const command = new InvokeModelCommand({ - contentType: "application/json", - body: JSON.stringify(payload), - modelId, - }); - const apiResponse = await client.send(command); - - // Decode and return the response. - const decodedResponseBody = new TextDecoder().decode(apiResponse.body); - /** @type {ResponseBody} */ - const responseBody = JSON.parse(decodedResponseBody); - return responseBody.results[0].outputText; -}; - -// Invoke the function if this file was run directly. -if (process.argv[1] === fileURLToPath(import.meta.url)) { - const prompt = - 'Complete the following in one sentence: "Once upon a time..."'; - const modelId = FoundationModels.TITAN_TEXT_G1_EXPRESS.modelId; - console.log(`Prompt: ${prompt}`); - console.log(`Model ID: ${modelId}`); - - try { - console.log("-".repeat(53)); - const response = await invokeModel(prompt, modelId); - console.log(response); - } catch (err) { - console.log(err); - } -} diff --git a/javascriptv3/example_code/bedrock-runtime/package.json b/javascriptv3/example_code/bedrock-runtime/package.json index 57f8dfc88e3..f310d5783e8 100644 --- a/javascriptv3/example_code/bedrock-runtime/package.json +++ b/javascriptv3/example_code/bedrock-runtime/package.json @@ -8,9 +8,9 @@ "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/bedrock-runtime-test-results.junit.xml" }, "devDependencies": { - "vitest": "^1.6.1" + "vitest": "^3.2.4" }, "dependencies": { - "@aws-sdk/client-bedrock-runtime": "^3.785.0" + "@aws-sdk/client-bedrock-runtime": "^3.899.0" } } diff --git a/javascriptv3/example_code/bedrock-runtime/scenarios/converse_tool_scenario/converse-tool-scenario.js b/javascriptv3/example_code/bedrock-runtime/scenarios/converse_tool_scenario/converse-tool-scenario.js index 8093d8108cb..6aad2629fec 100644 --- a/javascriptv3/example_code/bedrock-runtime/scenarios/converse_tool_scenario/converse-tool-scenario.js +++ b/javascriptv3/example_code/bedrock-runtime/scenarios/converse_tool_scenario/converse-tool-scenario.js @@ -21,11 +21,11 @@ import { import { parseArgs } from "node:util"; import { fileURLToPath } from "node:url"; -import { dirname } from "node:path"; -const __filename = fileURLToPath(import.meta.url); import data from "./questions.json" with { type: "json" }; import toolConfig from "./tool_config.json" with { type: "json" }; +const __filename = fileURLToPath(import.meta.url); + const systemPrompt = [ { text: @@ -198,6 +198,7 @@ async function callWeatherTool(longitude, latitude) { */ const pressEnter = new ScenarioInput("continue", "Press Enter to continue", { type: "input", + default: "", }); const greet = new ScenarioOutput( diff --git a/javascriptv3/example_code/bedrock-runtime/tests/amazon_titan.integration.test.js b/javascriptv3/example_code/bedrock-runtime/tests/amazon_titan.integration.test.js deleted file mode 100644 index 08f50a5176d..00000000000 --- a/javascriptv3/example_code/bedrock-runtime/tests/amazon_titan.integration.test.js +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -import { describe, it } from "vitest"; -import { FoundationModels } from "../config/foundation_models.js"; -import { expectToBeANonEmptyString } from "./test_tools.js"; -import { invokeModel } from "../models/amazonTitanText/invoke_model.js"; - -const TEXT_PROMPT = "Hello, this is a test prompt"; - -describe("Invoke Titan Text G1 - Express", () => { - it("should return a response", async () => { - const modelId = FoundationModels.TITAN_TEXT_G1_EXPRESS.modelId; - const response = await invokeModel(TEXT_PROMPT, modelId); - expectToBeANonEmptyString(response); - }); -}); - -describe("Invoke Titan Text G1 - Lite", () => { - it("should return a response", async () => { - const modelId = FoundationModels.TITAN_TEXT_G1_LITE.modelId; - const response = await invokeModel(TEXT_PROMPT, modelId); - expectToBeANonEmptyString(response); - }); -}); diff --git a/javascriptv3/example_code/bedrock-runtime/tests/anthropic_claude.integration.test.js b/javascriptv3/example_code/bedrock-runtime/tests/anthropic_claude.integration.test.js index a2cac307af0..d7f6bab28f1 100644 --- a/javascriptv3/example_code/bedrock-runtime/tests/anthropic_claude.integration.test.js +++ b/javascriptv3/example_code/bedrock-runtime/tests/anthropic_claude.integration.test.js @@ -4,14 +4,6 @@ import { describe, it } from "vitest"; import { FoundationModels } from "../config/foundation_models.js"; import { expectToBeANonEmptyString } from "./test_tools.js"; -import { - invokeModel as invokeClaudeInstantMessagesApi, - invokeTextCompletionsApi as invokeClaudeInstantTextCompletionsApi, -} from "../models/anthropicClaude/invoke_claude_instant_1.js"; -import { - invokeModel as invokeClaude2MessagesApi, - invokeTextCompletionsApi as invokeClaude2TextCompletionsApi, -} from "../models/anthropicClaude/invoke_claude_2.js"; import { invokeModel as invokeClaude3, invokeModelWithResponseStream as invokeClaude3WithResponseStream, @@ -19,63 +11,6 @@ import { const TEXT_PROMPT = "Hello, this is a test prompt"; -describe("Invoke Anthropic Claude Instant using the Text Completions API", () => { - it("should return a response", async () => { - const modelId = FoundationModels.CLAUDE_INSTANT.modelId; - const response = await invokeClaudeInstantTextCompletionsApi( - TEXT_PROMPT, - modelId, - ); - expectToBeANonEmptyString(response); - }); -}); - -describe("Invoke Anthropic Claude Instant using the Messages API", () => { - it("should return a response", async () => { - const modelId = FoundationModels.CLAUDE_INSTANT.modelId; - const response = await invokeClaudeInstantMessagesApi(TEXT_PROMPT, modelId); - expectToBeANonEmptyString(response); - }); -}); - -describe("Invoke Anthropic Claude 2.0 using the Text Completions API", () => { - it("should return a response", async () => { - const modelId = FoundationModels.CLAUDE_2.modelId; - const response = await invokeClaude2TextCompletionsApi( - TEXT_PROMPT, - modelId, - ); - expectToBeANonEmptyString(response); - }); -}); - -describe("Invoke Anthropic Claude 2.0 using the Messages API", () => { - it("should return a response", async () => { - const modelId = FoundationModels.CLAUDE_2.modelId; - const response = await invokeClaude2MessagesApi(TEXT_PROMPT, modelId); - expectToBeANonEmptyString(response); - }); -}); - -describe("Invoke Anthropic Claude 2.1 using the Text Completions API", () => { - it("should return a response", async () => { - const modelId = FoundationModels.CLAUDE_2_1.modelId; - const response = await invokeClaude2TextCompletionsApi( - TEXT_PROMPT, - modelId, - ); - expectToBeANonEmptyString(response); - }); -}); - -describe("Invoke Anthropic Claude 2.1 using the Messages API", () => { - it("should return a response", async () => { - const modelId = FoundationModels.CLAUDE_2_1.modelId; - const response = await invokeClaude2MessagesApi(TEXT_PROMPT, modelId); - expectToBeANonEmptyString(response); - }); -}); - describe("Invoke Anthropic Claude 3 Haiku using the Messages API", () => { it("should return a response", async () => { const modelId = FoundationModels.CLAUDE_3_HAIKU.modelId; diff --git a/javascriptv3/example_code/bedrock-runtime/tests/converse.integration.test.js b/javascriptv3/example_code/bedrock-runtime/tests/converse.integration.test.js index c771d09ff91..6f9bd173c7c 100644 --- a/javascriptv3/example_code/bedrock-runtime/tests/converse.integration.test.js +++ b/javascriptv3/example_code/bedrock-runtime/tests/converse.integration.test.js @@ -10,7 +10,6 @@ describe("Converse with text generation models", () => { const models = { amazonNovaText: "Amazon Nova", - amazonTitanText: "Amazon Titan", anthropicClaude: "Anthropic Claude", cohereCommand: "Cohere Command", metaLlama: "Meta Llama", diff --git a/javascriptv3/example_code/bedrock-runtime/tests/converse_stream.integration.test.js b/javascriptv3/example_code/bedrock-runtime/tests/converse_stream.integration.test.js index 77f0c0a48ce..8e1e93e61aa 100644 --- a/javascriptv3/example_code/bedrock-runtime/tests/converse_stream.integration.test.js +++ b/javascriptv3/example_code/bedrock-runtime/tests/converse_stream.integration.test.js @@ -11,7 +11,6 @@ describe("ConverseStream with text generation models", () => { const models = { amazonNovaText: "Amazon Nova", - amazonTitanText: "Amazon Titan", anthropicClaude: "Anthropic Claude", cohereCommand: "Cohere Command", metaLlama: "Meta Llama", diff --git a/javascriptv3/example_code/bedrock-runtime/tests/image_generation.integration.test.js b/javascriptv3/example_code/bedrock-runtime/tests/image_generation.integration.test.js index fccb5495126..3139a2ed6d7 100644 --- a/javascriptv3/example_code/bedrock-runtime/tests/image_generation.integration.test.js +++ b/javascriptv3/example_code/bedrock-runtime/tests/image_generation.integration.test.js @@ -6,8 +6,12 @@ import { invokeModel } from "../models/amazonNovaCanvas/invokeModel.js"; import { expectToBeANonEmptyString } from "./test_tools.js"; describe("Invoking Amazon Nova Canvas", () => { - it("should return a response", async () => { - const response = await invokeModel(); - expectToBeANonEmptyString(response); - }); + it( + "should return a response", + async () => { + const response = await invokeModel(); + expectToBeANonEmptyString(response); + }, + { timeout: 600000 }, + ); }); diff --git a/javascriptv3/example_code/codepipeline/MyCodePipelineFunction.py b/javascriptv3/example_code/codepipeline/MyCodePipelineFunction.py index ab6e6ec527c..f9058f03976 100644 --- a/javascriptv3/example_code/codepipeline/MyCodePipelineFunction.py +++ b/javascriptv3/example_code/codepipeline/MyCodePipelineFunction.py @@ -1,405 +1,405 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - -from __future__ import print_function -from boto3.session import Session - -import json -import urllib -import boto3 -import zipfile -import tempfile -import botocore -import traceback - -print("Loading function") - -cf = boto3.client("cloudformation") -code_pipeline = boto3.client("codepipeline") - - -def find_artifact(artifacts, name): - """Finds the artifact 'name' among the 'artifacts' - - Args: - artifacts: The list of artifacts available to the function - name: The artifact we wish to use - Returns: - The artifact dictionary found - Raises: - Exception: If no matching artifact is found - - """ - for artifact in artifacts: - if artifact["name"] == name: - return artifact - - raise Exception('Input artifact named "{0}" not found in event'.format(name)) - - -def get_template(s3, artifact, file_in_zip): - """Gets the template artifact - - Downloads the artifact from the S3 artifact store to a temporary file - then extracts the zip and returns the file containing the CloudFormation - template. - - Args: - artifact: The artifact to download - file_in_zip: The path to the file within the zip containing the template - - Returns: - The CloudFormation template as a string - - Raises: - Exception: Any exception thrown while downloading the artifact or unzipping it - - """ - tmp_file = tempfile.NamedTemporaryFile() - bucket = artifact["location"]["s3Location"]["bucketName"] - key = artifact["location"]["s3Location"]["objectKey"] - - with tempfile.NamedTemporaryFile() as tmp_file: - s3.download_file(bucket, key, tmp_file.name) - with zipfile.ZipFile(tmp_file.name, "r") as zip: - return zip.read(file_in_zip) - - -def update_stack(stack, template): - """Start a CloudFormation stack update - - Args: - stack: The stack to update - template: The template to apply - - Returns: - True if an update was started, false if there were no changes - to the template since the last update. - - Raises: - Exception: Any exception besides "No updates are to be performed." - - """ - try: - cf.update_stack(StackName=stack, TemplateBody=template) - return True - - except botocore.exceptions.ClientError as e: - if e.response["Error"]["Message"] == "No updates are to be performed.": - return False - else: - raise Exception( - 'Error updating CloudFormation stack "{0}"'.format(stack), e - ) - - -def stack_exists(stack): - """Check if a stack exists or not - - Args: - stack: The stack to check - - Returns: - True or False depending on whether the stack exists - - Raises: - Any exceptions raised .describe_stacks() besides that - the stack doesn't exist. - - """ - try: - cf.describe_stacks(StackName=stack) - return True - except botocore.exceptions.ClientError as e: - if "does not exist" in e.response["Error"]["Message"]: - return False - else: - raise e - - -def create_stack(stack, template): - """Starts a new CloudFormation stack creation - - Args: - stack: The stack to be created - template: The template for the stack to be created with - - Throws: - Exception: Any exception thrown by .create_stack() - """ - cf.create_stack(StackName=stack, TemplateBody=template) - - -def get_stack_status(stack): - """Get the status of an existing CloudFormation stack - - Args: - stack: The name of the stack to check - - Returns: - The CloudFormation status string of the stack such as CREATE_COMPLETE - - Raises: - Exception: Any exception thrown by .describe_stacks() - - """ - stack_description = cf.describe_stacks(StackName=stack) - return stack_description["Stacks"][0]["StackStatus"] - - -def put_job_success(job, message): - """Notify CodePipeline of a successful job - - Args: - job: The CodePipeline job ID - message: A message to be logged relating to the job status - - Raises: - Exception: Any exception thrown by .put_job_success_result() - - """ - print("Putting job success") - print(message) - code_pipeline.put_job_success_result(jobId=job) - - -def put_job_failure(job, message): - """Notify CodePipeline of a failed job - - Args: - job: The CodePipeline job ID - message: A message to be logged relating to the job status - - Raises: - Exception: Any exception thrown by .put_job_failure_result() - - """ - print("Putting job failure") - print(message) - code_pipeline.put_job_failure_result( - jobId=job, failureDetails={"message": message, "type": "JobFailed"} - ) - - -def continue_job_later(job, message): - """Notify CodePipeline of a continuing job - - This will cause CodePipeline to invoke the function again with the - supplied continuation token. - - Args: - job: The JobID - message: A message to be logged relating to the job status - continuation_token: The continuation token - - Raises: - Exception: Any exception thrown by .put_job_success_result() - - """ - - # Use the continuation token to keep track of any job execution state - # This data will be available when a new job is scheduled to continue the current execution - continuation_token = json.dumps({"previous_job_id": job}) - - print("Putting job continuation") - print(message) - code_pipeline.put_job_success_result( - jobId=job, continuationToken=continuation_token - ) - - -def start_update_or_create(job_id, stack, template): - """Starts the stack update or create process - - If the stack exists then update, otherwise create. - - Args: - job_id: The ID of the CodePipeline job - stack: The stack to create or update - template: The template to create/update the stack with - - """ - if stack_exists(stack): - status = get_stack_status(stack) - if status not in ["CREATE_COMPLETE", "ROLLBACK_COMPLETE", "UPDATE_COMPLETE"]: - # If the CloudFormation stack is not in a state where - # it can be updated again then fail the job right away. - put_job_failure(job_id, "Stack cannot be updated when status is: " + status) - return - - were_updates = update_stack(stack, template) - - if were_updates: - # If there were updates then continue the job so it can monitor - # the progress of the update. - continue_job_later(job_id, "Stack update started") - - else: - # If there were no updates then succeed the job immediately - put_job_success(job_id, "There were no stack updates") - else: - # If the stack doesn't already exist then create it instead - # of updating it. - create_stack(stack, template) - # Continue the job so the pipeline will wait for the CloudFormation - # stack to be created. - continue_job_later(job_id, "Stack create started") - - -def check_stack_update_status(job_id, stack): - """Monitor an already-running CloudFormation update/create - - Succeeds, fails or continues the job depending on the stack status. - - Args: - job_id: The CodePipeline job ID - stack: The stack to monitor - - """ - status = get_stack_status(stack) - if status in ["UPDATE_COMPLETE", "CREATE_COMPLETE"]: - # If the update/create finished successfully then - # succeed the job and don't continue. - put_job_success(job_id, "Stack update complete") - - elif status in [ - "UPDATE_IN_PROGRESS", - "UPDATE_ROLLBACK_IN_PROGRESS", - "UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS", - "CREATE_IN_PROGRESS", - "ROLLBACK_IN_PROGRESS", - ]: - # If the job isn't finished yet then continue it - continue_job_later(job_id, "Stack update still in progress") - - else: - # If the Stack is a state which isn't "in progress" or "complete" - # then the stack update/create has failed so end the job with - # a failed result. - put_job_failure(job_id, "Update failed: " + status) - - -def get_user_params(job_data): - """Decodes the JSON user parameters and validates the required properties. - - Args: - job_data: The job data structure containing the UserParameters string which should be a valid JSON structure - - Returns: - The JSON parameters decoded as a dictionary. - - Raises: - Exception: The JSON can't be decoded or a property is missing. - - """ - try: - # Get the user parameters which contain the stack, artifact and file settings - user_parameters = job_data["actionConfiguration"]["configuration"][ - "UserParameters" - ] - decoded_parameters = json.loads(user_parameters) - - except Exception as e: - # We're expecting the user parameters to be encoded as JSON - # so we can pass multiple values. If the JSON can't be decoded - # then fail the job with a helpful message. - raise Exception("UserParameters could not be decoded as JSON") - - if "stack" not in decoded_parameters: - # Validate that the stack is provided, otherwise fail the job - # with a helpful message. - raise Exception("Your UserParameters JSON must include the stack name") - - if "artifact" not in decoded_parameters: - # Validate that the artifact name is provided, otherwise fail the job - # with a helpful message. - raise Exception("Your UserParameters JSON must include the artifact name") - - if "file" not in decoded_parameters: - # Validate that the template file is provided, otherwise fail the job - # with a helpful message. - raise Exception("Your UserParameters JSON must include the template file name") - - return decoded_parameters - - -def setup_s3_client(job_data): - """Creates an S3 client - - Uses the credentials passed in the event by CodePipeline. These - credentials can be used to access the artifact bucket. - - Args: - job_data: The job data structure - - Returns: - An S3 client with the appropriate credentials - - """ - key_id = job_data["artifactCredentials"]["accessKeyId"] - key_secret = job_data["artifactCredentials"]["secretAccessKey"] - session_token = job_data["artifactCredentials"]["sessionToken"] - - session = Session( - aws_access_key_id=key_id, - aws_secret_access_key=key_secret, - aws_session_token=session_token, - ) - return session.client("s3", config=botocore.client.Config(signature_version="s3v4")) - - -def lambda_handler(event, context): - """The Lambda function handler - - If a continuing job then checks the CloudFormation stack status - and updates the job accordingly. - - If a new job then kick of an update or creation of the target - CloudFormation stack. - - Args: - event: The event passed by Lambda - context: The context passed by Lambda - - """ - try: - # Extract the Job ID - job_id = event["CodePipeline.job"]["id"] - - # Extract the Job Data - job_data = event["CodePipeline.job"]["data"] - - # Extract the params - params = get_user_params(job_data) - - # Get the list of artifacts passed to the function - artifacts = job_data["inputArtifacts"] - - stack = params["stack"] - artifact = params["artifact"] - template_file = params["file"] - - if "continuationToken" in job_data: - # If we're continuing then the create/update has already been triggered - # we just need to check if it has finished. - check_stack_update_status(job_id, stack) - else: - # Get the artifact details - artifact_data = find_artifact(artifacts, artifact) - # Get S3 client to access artifact with - s3 = setup_s3_client(job_data) - # Get the JSON template file out of the artifact - template = get_template(s3, artifact_data, template_file) - # Kick off a stack update or create - start_update_or_create(job_id, stack, template) - - except Exception as e: - # If any other exceptions which we didn't expect are raised - # then fail the job and log the exception message. - print("Function failed due to exception.") - print(e) - traceback.print_exc() - put_job_failure(job_id, "Function exception: " + str(e)) - - print("Function complete.") - return "Complete." +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import print_function + +import boto3 +import botocore +import json +import tempfile +import traceback +import urllib +import zipfile +from boto3.session import Session + +print("Loading function") + +cf = boto3.client("cloudformation") +code_pipeline = boto3.client("codepipeline") + + +def find_artifact(artifacts, name): + """Finds the artifact 'name' among the 'artifacts' + + Args: + artifacts: The list of artifacts available to the function + name: The artifact we wish to use + Returns: + The artifact dictionary found + Raises: + Exception: If no matching artifact is found + + """ + for artifact in artifacts: + if artifact["name"] == name: + return artifact + + raise Exception('Input artifact named "{0}" not found in event'.format(name)) + + +def get_template(s3, artifact, file_in_zip): + """Gets the template artifact + + Downloads the artifact from the S3 artifact store to a temporary file + then extracts the zip and returns the file containing the CloudFormation + template. + + Args: + artifact: The artifact to download + file_in_zip: The path to the file within the zip containing the template + + Returns: + The CloudFormation template as a string + + Raises: + Exception: Any exception thrown while downloading the artifact or unzipping it + + """ + tmp_file = tempfile.NamedTemporaryFile() + bucket = artifact["location"]["s3Location"]["bucketName"] + key = artifact["location"]["s3Location"]["objectKey"] + + with tempfile.NamedTemporaryFile() as tmp_file: + s3.download_file(bucket, key, tmp_file.name) + with zipfile.ZipFile(tmp_file.name, "r") as zip: + return zip.read(file_in_zip) + + +def update_stack(stack, template): + """Start a CloudFormation stack update + + Args: + stack: The stack to update + template: The template to apply + + Returns: + True if an update was started, false if there were no changes + to the template since the last update. + + Raises: + Exception: Any exception besides "No updates are to be performed." + + """ + try: + cf.update_stack(StackName=stack, TemplateBody=template) + return True + + except botocore.exceptions.ClientError as e: + if e.response["Error"]["Message"] == "No updates are to be performed.": + return False + else: + raise Exception( + 'Error updating CloudFormation stack "{0}"'.format(stack), e + ) + + +def stack_exists(stack): + """Check if a stack exists or not + + Args: + stack: The stack to check + + Returns: + True or False depending on whether the stack exists + + Raises: + Any exceptions raised .describe_stacks() besides that + the stack doesn't exist. + + """ + try: + cf.describe_stacks(StackName=stack) + return True + except botocore.exceptions.ClientError as e: + if "does not exist" in e.response["Error"]["Message"]: + return False + else: + raise e + + +def create_stack(stack, template): + """Starts a new CloudFormation stack creation + + Args: + stack: The stack to be created + template: The template for the stack to be created with + + Throws: + Exception: Any exception thrown by .create_stack() + """ + cf.create_stack(StackName=stack, TemplateBody=template) + + +def get_stack_status(stack): + """Get the status of an existing CloudFormation stack + + Args: + stack: The name of the stack to check + + Returns: + The CloudFormation status string of the stack such as CREATE_COMPLETE + + Raises: + Exception: Any exception thrown by .describe_stacks() + + """ + stack_description = cf.describe_stacks(StackName=stack) + return stack_description["Stacks"][0]["StackStatus"] + + +def put_job_success(job, message): + """Notify CodePipeline of a successful job + + Args: + job: The CodePipeline job ID + message: A message to be logged relating to the job status + + Raises: + Exception: Any exception thrown by .put_job_success_result() + + """ + print("Putting job success") + print(message) + code_pipeline.put_job_success_result(jobId=job) + + +def put_job_failure(job, message): + """Notify CodePipeline of a failed job + + Args: + job: The CodePipeline job ID + message: A message to be logged relating to the job status + + Raises: + Exception: Any exception thrown by .put_job_failure_result() + + """ + print("Putting job failure") + print(message) + code_pipeline.put_job_failure_result( + jobId=job, failureDetails={"message": message, "type": "JobFailed"} + ) + + +def continue_job_later(job, message): + """Notify CodePipeline of a continuing job + + This will cause CodePipeline to invoke the function again with the + supplied continuation token. + + Args: + job: The JobID + message: A message to be logged relating to the job status + continuation_token: The continuation token + + Raises: + Exception: Any exception thrown by .put_job_success_result() + + """ + + # Use the continuation token to keep track of any job execution state + # This data will be available when a new job is scheduled to continue the current execution + continuation_token = json.dumps({"previous_job_id": job}) + + print("Putting job continuation") + print(message) + code_pipeline.put_job_success_result( + jobId=job, continuationToken=continuation_token + ) + + +def start_update_or_create(job_id, stack, template): + """Starts the stack update or create process + + If the stack exists then update, otherwise create. + + Args: + job_id: The ID of the CodePipeline job + stack: The stack to create or update + template: The template to create/update the stack with + + """ + if stack_exists(stack): + status = get_stack_status(stack) + if status not in ["CREATE_COMPLETE", "ROLLBACK_COMPLETE", "UPDATE_COMPLETE"]: + # If the CloudFormation stack is not in a state where + # it can be updated again then fail the job right away. + put_job_failure(job_id, "Stack cannot be updated when status is: " + status) + return + + were_updates = update_stack(stack, template) + + if were_updates: + # If there were updates then continue the job so it can monitor + # the progress of the update. + continue_job_later(job_id, "Stack update started") + + else: + # If there were no updates then succeed the job immediately + put_job_success(job_id, "There were no stack updates") + else: + # If the stack doesn't already exist then create it instead + # of updating it. + create_stack(stack, template) + # Continue the job so the pipeline will wait for the CloudFormation + # stack to be created. + continue_job_later(job_id, "Stack create started") + + +def check_stack_update_status(job_id, stack): + """Monitor an already-running CloudFormation update/create + + Succeeds, fails or continues the job depending on the stack status. + + Args: + job_id: The CodePipeline job ID + stack: The stack to monitor + + """ + status = get_stack_status(stack) + if status in ["UPDATE_COMPLETE", "CREATE_COMPLETE"]: + # If the update/create finished successfully then + # succeed the job and don't continue. + put_job_success(job_id, "Stack update complete") + + elif status in [ + "UPDATE_IN_PROGRESS", + "UPDATE_ROLLBACK_IN_PROGRESS", + "UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS", + "CREATE_IN_PROGRESS", + "ROLLBACK_IN_PROGRESS", + ]: + # If the job isn't finished yet then continue it + continue_job_later(job_id, "Stack update still in progress") + + else: + # If the Stack is a state which isn't "in progress" or "complete" + # then the stack update/create has failed so end the job with + # a failed result. + put_job_failure(job_id, "Update failed: " + status) + + +def get_user_params(job_data): + """Decodes the JSON user parameters and validates the required properties. + + Args: + job_data: The job data structure containing the UserParameters string which should be a valid JSON structure + + Returns: + The JSON parameters decoded as a dictionary. + + Raises: + Exception: The JSON can't be decoded or a property is missing. + + """ + try: + # Get the user parameters which contain the stack, artifact and file settings + user_parameters = job_data["actionConfiguration"]["configuration"][ + "UserParameters" + ] + decoded_parameters = json.loads(user_parameters) + + except Exception as e: + # We're expecting the user parameters to be encoded as JSON + # so we can pass multiple values. If the JSON can't be decoded + # then fail the job with a helpful message. + raise Exception("UserParameters could not be decoded as JSON") + + if "stack" not in decoded_parameters: + # Validate that the stack is provided, otherwise fail the job + # with a helpful message. + raise Exception("Your UserParameters JSON must include the stack name") + + if "artifact" not in decoded_parameters: + # Validate that the artifact name is provided, otherwise fail the job + # with a helpful message. + raise Exception("Your UserParameters JSON must include the artifact name") + + if "file" not in decoded_parameters: + # Validate that the template file is provided, otherwise fail the job + # with a helpful message. + raise Exception("Your UserParameters JSON must include the template file name") + + return decoded_parameters + + +def setup_s3_client(job_data): + """Creates an S3 client + + Uses the credentials passed in the event by CodePipeline. These + credentials can be used to access the artifact bucket. + + Args: + job_data: The job data structure + + Returns: + An S3 client with the appropriate credentials + + """ + key_id = job_data["artifactCredentials"]["accessKeyId"] + key_secret = job_data["artifactCredentials"]["secretAccessKey"] + session_token = job_data["artifactCredentials"]["sessionToken"] + + session = Session( + aws_access_key_id=key_id, + aws_secret_access_key=key_secret, + aws_session_token=session_token, + ) + return session.client("s3", config=botocore.client.Config(signature_version="s3v4")) + + +def lambda_handler(event, context): + """The Lambda function handler + + If a continuing job then checks the CloudFormation stack status + and updates the job accordingly. + + If a new job then kick of an update or creation of the target + CloudFormation stack. + + Args: + event: The event passed by Lambda + context: The context passed by Lambda + + """ + try: + # Extract the Job ID + job_id = event["CodePipeline.job"]["id"] + + # Extract the Job Data + job_data = event["CodePipeline.job"]["data"] + + # Extract the params + params = get_user_params(job_data) + + # Get the list of artifacts passed to the function + artifacts = job_data["inputArtifacts"] + + stack = params["stack"] + artifact = params["artifact"] + template_file = params["file"] + + if "continuationToken" in job_data: + # If we're continuing then the create/update has already been triggered + # we just need to check if it has finished. + check_stack_update_status(job_id, stack) + else: + # Get the artifact details + artifact_data = find_artifact(artifacts, artifact) + # Get S3 client to access artifact with + s3 = setup_s3_client(job_data) + # Get the JSON template file out of the artifact + template = get_template(s3, artifact_data, template_file) + # Kick off a stack update or create + start_update_or_create(job_id, stack, template) + + except Exception as e: + # If any other exceptions which we didn't expect are raised + # then fail the job and log the exception message. + print("Function failed due to exception.") + print(e) + traceback.print_exc() + put_job_failure(job_id, "Function exception: " + str(e)) + + print("Function complete.") + return "Complete." diff --git a/javascriptv3/example_code/reactnative/ReactNativeApp/android/gradlew.bat b/javascriptv3/example_code/reactnative/ReactNativeApp/android/gradlew.bat index ac1b06f9382..107acd32c4e 100644 --- a/javascriptv3/example_code/reactnative/ReactNativeApp/android/gradlew.bat +++ b/javascriptv3/example_code/reactnative/ReactNativeApp/android/gradlew.bat @@ -1,89 +1,89 @@ -@rem -@rem Copyright 2015 the original author or authors. -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem https://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. -@rem - -@if "%DEBUG%" == "" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Resolve any "." and ".." in APP_HOME to make it shorter. -for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto execute - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* - -:end -@rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/javav2/example_code/bedrock-runtime/README.md b/javav2/example_code/bedrock-runtime/README.md index 7a9d19de970..40cd519b55b 100644 --- a/javav2/example_code/bedrock-runtime/README.md +++ b/javav2/example_code/bedrock-runtime/README.md @@ -56,13 +56,6 @@ functions within the same service. - [InvokeModel](src/main/java/com/example/bedrockruntime/models/amazonTitanImage/InvokeModel.java#L6) -### Amazon Titan Text - -- [Converse](src/main/java/com/example/bedrockruntime/models/amazonTitanText/Converse.java#L7) -- [ConverseStream](src/main/java/com/example/bedrockruntime/models/amazonTitanText/ConverseStream.java#L6) -- [InvokeModel](src/main/java/com/example/bedrockruntime/models/amazonTitanText/InvokeModel.java#L6) -- [InvokeModelWithResponseStream](src/main/java/com/example/bedrockruntime/models/amazonTitanText/InvokeModelWithResponseStream.java#L6) - ### Amazon Titan Text Embeddings - [InvokeModel](src/main/java/com/example/bedrockruntime/models/amazonTitanTextEmbeddings/InvokeModel.java#L6) @@ -81,9 +74,7 @@ functions within the same service. - [Converse](src/main/java/com/example/bedrockruntime/models/cohereCommand/Converse.java#L6) - [ConverseStream](src/main/java/com/example/bedrockruntime/models/cohereCommand/ConverseStream.java#L6) - [InvokeModel: Command R and R+](src/main/java/com/example/bedrockruntime/models/cohereCommand/Command_R_InvokeModel.java#L6) -- [InvokeModel: Command and Command Light](src/main/java/com/example/bedrockruntime/models/cohereCommand/Command_InvokeModel.java#L6) - [InvokeModelWithResponseStream: Command R and R+](src/main/java/com/example/bedrockruntime/models/cohereCommand/Command_R_InvokeModelWithResponseStream.java#L6) -- [InvokeModelWithResponseStream: Command and Command Light](src/main/java/com/example/bedrockruntime/models/cohereCommand/Command_InvokeModelWithResponseStream.java#L6) ### Meta Llama diff --git a/javav2/example_code/bedrock-runtime/pom.xml b/javav2/example_code/bedrock-runtime/pom.xml index 6de2271c975..335d4358281 100644 --- a/javav2/example_code/bedrock-runtime/pom.xml +++ b/javav2/example_code/bedrock-runtime/pom.xml @@ -17,7 +17,7 @@ org.apache.maven.plugins maven-compiler-plugin - 3.1 + 3.13.0 ${java.version} ${java.version} @@ -30,7 +30,7 @@ software.amazon.awssdk bom - 2.30.27 + 2.34.7 pom import @@ -52,38 +52,38 @@ org.json json - 20240303 + 20250517 commons-io commons-io - 2.16.1 + 2.20.0 org.apache.commons commons-text - 1.12.0 + 1.14.0 com.ibm.icu icu4j - 75.1 + 77.1 com.fasterxml.jackson.core jackson-databind - 2.17.0 + 2.20.0 org.junit.jupiter junit-jupiter-api - 5.10.2 + 6.0.0 test org.junit.jupiter junit-jupiter-params - 5.10.0 + 6.0.0 test diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/ai21LabsJurassic2/Converse.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/ai21LabsJurassic2/Converse.java deleted file mode 100644 index a1270b62c32..00000000000 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/ai21LabsJurassic2/Converse.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.ai21LabsJurassic2; - -// snippet-start:[bedrock-runtime.java2.Converse_Ai21LabsJurassic2] -// Use the Converse API to send a text message to AI21 Labs Jurassic-2. - -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; -import software.amazon.awssdk.core.exception.SdkClientException; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeClient; -import software.amazon.awssdk.services.bedrockruntime.model.ContentBlock; -import software.amazon.awssdk.services.bedrockruntime.model.ConversationRole; -import software.amazon.awssdk.services.bedrockruntime.model.ConverseResponse; -import software.amazon.awssdk.services.bedrockruntime.model.Message; - -public class Converse { - - public static String converse() { - - // Create a Bedrock Runtime client in the AWS Region you want to use. - // Replace the DefaultCredentialsProvider with your preferred credentials provider. - var client = BedrockRuntimeClient.builder() - .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Jurassic-2 Mid. - var modelId = "ai21.j2-mid-v1"; - - // Create the input text and embed it in a message object with the user role. - var inputText = "Describe the purpose of a 'hello world' program in one line."; - var message = Message.builder() - .content(ContentBlock.fromText(inputText)) - .role(ConversationRole.USER) - .build(); - - try { - // Send the message with a basic inference configuration. - ConverseResponse response = client.converse(request -> request - .modelId(modelId) - .messages(message) - .inferenceConfig(config -> config - .maxTokens(512) - .temperature(0.5F) - .topP(0.9F))); - - // Retrieve the generated text from Bedrock's response object. - var responseText = response.output().message().content().get(0).text(); - System.out.println(responseText); - - return responseText; - - } catch (SdkClientException e) { - System.err.printf("ERROR: Can't invoke '%s'. Reason: %s", modelId, e.getMessage()); - throw new RuntimeException(e); - } - } - - public static void main(String[] args) { - converse(); - } -} -// snippet-end:[bedrock-runtime.java2.Converse_Ai21LabsJurassic2] diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/ai21LabsJurassic2/ConverseAsync.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/ai21LabsJurassic2/ConverseAsync.java deleted file mode 100644 index 187f03cfef2..00000000000 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/ai21LabsJurassic2/ConverseAsync.java +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.ai21LabsJurassic2; - -// snippet-start:[bedrock-runtime.java2.ConverseAsync_Ai21LabsJurassic2] -// Use the Converse API to send a text message to AI21 Labs Jurassic-2 -// with the async Java client. - -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeAsyncClient; -import software.amazon.awssdk.services.bedrockruntime.model.ContentBlock; -import software.amazon.awssdk.services.bedrockruntime.model.ConversationRole; -import software.amazon.awssdk.services.bedrockruntime.model.Message; - -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; - -public class ConverseAsync { - - public static String converseAsync() { - - // Create a Bedrock Runtime client in the AWS Region you want to use. - // Replace the DefaultCredentialsProvider with your preferred credentials provider. - var client = BedrockRuntimeAsyncClient.builder() - .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Jurassic-2 Mid. - var modelId = "ai21.j2-mid-v1"; - - // Create the input text and embed it in a message object with the user role. - var inputText = "Describe the purpose of a 'hello world' program in one line."; - var message = Message.builder() - .content(ContentBlock.fromText(inputText)) - .role(ConversationRole.USER) - .build(); - - // Send the message with a basic inference configuration. - var request = client.converse(params -> params - .modelId(modelId) - .messages(message) - .inferenceConfig(config -> config - .maxTokens(512) - .temperature(0.5F) - .topP(0.9F)) - ); - - // Prepare a future object to handle the asynchronous response. - CompletableFuture future = new CompletableFuture<>(); - - // Handle the response or error using the future object. - request.whenComplete((response, error) -> { - if (error == null) { - // Extract the generated text from Bedrock's response object. - String responseText = response.output().message().content().get(0).text(); - future.complete(responseText); - } else { - future.completeExceptionally(error); - } - }); - - try { - // Wait for the future object to complete and retrieve the generated text. - String responseText = future.get(); - System.out.println(responseText); - - return responseText; - - } catch (ExecutionException | InterruptedException e) { - System.err.printf("Can't invoke '%s': %s", modelId, e.getMessage()); - throw new RuntimeException(e); - } - } - - public static void main(String[] args) { - converseAsync(); - } -} -// snippet-end:[bedrock-runtime.java2.ConverseAsync_Ai21LabsJurassic2] diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/ai21LabsJurassic2/InvokeModel.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/ai21LabsJurassic2/InvokeModel.java deleted file mode 100644 index 634ede3e575..00000000000 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/ai21LabsJurassic2/InvokeModel.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.ai21LabsJurassic2; - -// snippet-start:[bedrock-runtime.java2.InvokeModel_Ai21LabsJurassic2] -// Use the native inference API to send a text message to AI21 Labs Jurassic-2. - -import org.json.JSONObject; -import org.json.JSONPointer; -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; -import software.amazon.awssdk.core.SdkBytes; -import software.amazon.awssdk.core.exception.SdkClientException; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeClient; - -public class InvokeModel { - - public static String invokeModel() { - - // Create a Bedrock Runtime client in the AWS Region you want to use. - // Replace the DefaultCredentialsProvider with your preferred credentials provider. - var client = BedrockRuntimeClient.builder() - .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Jurassic-2 Mid. - var modelId = "ai21.j2-mid-v1"; - - // The InvokeModel API uses the model's native payload. - // Learn more about the available inference parameters and response fields at: - // https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-jurassic2.html - var nativeRequestTemplate = "{ \"prompt\": \"{{prompt}}\" }"; - - // Define the prompt for the model. - var prompt = "Describe the purpose of a 'hello world' program in one line."; - - // Embed the prompt in the model's native request payload. - String nativeRequest = nativeRequestTemplate.replace("{{prompt}}", prompt); - - try { - // Encode and send the request to the Bedrock Runtime. - var response = client.invokeModel(request -> request - .body(SdkBytes.fromUtf8String(nativeRequest)) - .modelId(modelId) - ); - - // Decode the response body. - var responseBody = new JSONObject(response.body().asUtf8String()); - - // Retrieve the generated text from the model's response. - var text = new JSONPointer("/completions/0/data/text").queryFrom(responseBody).toString(); - System.out.println(text); - - return text; - - } catch (SdkClientException e) { - System.err.printf("ERROR: Can't invoke '%s'. Reason: %s", modelId, e.getMessage()); - throw new RuntimeException(e); - } - } - - public static void main(String[] args) { - invokeModel(); - } -} -// snippet-end:[bedrock-runtime.java2.InvokeModel_Ai21LabsJurassic2] diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/Converse.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/Converse.java deleted file mode 100644 index 89fe1e32c9e..00000000000 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/Converse.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.amazonTitanText; - - -// snippet-start:[bedrock-runtime.java2.Converse_AmazonTitanText] -// Use the Converse API to send a text message to Amazon Titan Text. - -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; -import software.amazon.awssdk.core.exception.SdkClientException; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeClient; -import software.amazon.awssdk.services.bedrockruntime.model.ContentBlock; -import software.amazon.awssdk.services.bedrockruntime.model.ConversationRole; -import software.amazon.awssdk.services.bedrockruntime.model.ConverseResponse; -import software.amazon.awssdk.services.bedrockruntime.model.Message; - -public class Converse { - - public static String converse() { - - // Create a Bedrock Runtime client in the AWS Region you want to use. - // Replace the DefaultCredentialsProvider with your preferred credentials provider. - var client = BedrockRuntimeClient.builder() - .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Titan Text Premier. - var modelId = "amazon.titan-text-premier-v1:0"; - - // Create the input text and embed it in a message object with the user role. - var inputText = "Describe the purpose of a 'hello world' program in one line."; - var message = Message.builder() - .content(ContentBlock.fromText(inputText)) - .role(ConversationRole.USER) - .build(); - - - try { - // Send the message with a basic inference configuration. - ConverseResponse response = client.converse(request -> request - .modelId(modelId) - .messages(message) - .inferenceConfig(config -> config - .maxTokens(512) - .temperature(0.5F) - .topP(0.9F))); - - // Retrieve the generated text from Bedrock's response object. - var responseText = response.output().message().content().get(0).text(); - System.out.println(responseText); - - return responseText; - - } catch (SdkClientException e) { - System.err.printf("ERROR: Can't invoke '%s'. Reason: %s", modelId, e.getMessage()); - throw new RuntimeException(e); - } - } - - public static void main(String[] args) { - converse(); - } -} - -// snippet-end:[bedrock-runtime.java2.Converse_AmazonTitanText] diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/ConverseAsync.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/ConverseAsync.java deleted file mode 100644 index 8cf144518dd..00000000000 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/ConverseAsync.java +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.amazonTitanText; - -// snippet-start:[bedrock-runtime.java2.ConverseAsync_AmazonTitanText] -// Use the Converse API to send a text message to Amazon Titan Text -// with the async Java client. - -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeAsyncClient; -import software.amazon.awssdk.services.bedrockruntime.model.ContentBlock; -import software.amazon.awssdk.services.bedrockruntime.model.ConversationRole; -import software.amazon.awssdk.services.bedrockruntime.model.Message; - -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; - -public class ConverseAsync { - - public static String converseAsync() { - - // Create a Bedrock Runtime client in the AWS Region you want to use. - // Replace the DefaultCredentialsProvider with your preferred credentials provider. - var client = BedrockRuntimeAsyncClient.builder() - .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Titan Text Premier. - var modelId = "amazon.titan-text-premier-v1:0"; - - // Create the input text and embed it in a message object with the user role. - var inputText = "Describe the purpose of a 'hello world' program in one line."; - var message = Message.builder() - .content(ContentBlock.fromText(inputText)) - .role(ConversationRole.USER) - .build(); - - // Send the message with a basic inference configuration. - var request = client.converse(params -> params - .modelId(modelId) - .messages(message) - .inferenceConfig(config -> config - .maxTokens(512) - .temperature(0.5F) - .topP(0.9F)) - ); - - // Prepare a future object to handle the asynchronous response. - CompletableFuture future = new CompletableFuture<>(); - - // Handle the response or error using the future object. - request.whenComplete((response, error) -> { - if (error == null) { - // Extract the generated text from Bedrock's response object. - String responseText = response.output().message().content().get(0).text(); - future.complete(responseText); - } else { - future.completeExceptionally(error); - } - }); - - try { - // Wait for the future object to complete and retrieve the generated text. - String responseText = future.get(); - System.out.println(responseText); - - return responseText; - - } catch (ExecutionException | InterruptedException e) { - System.err.printf("Can't invoke '%s': %s", modelId, e.getMessage()); - throw new RuntimeException(e); - } - } - - public static void main(String[] args) { - converseAsync(); - } -} -// snippet-end:[bedrock-runtime.java2.ConverseAsync_AmazonTitanText] diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/ConverseStream.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/ConverseStream.java deleted file mode 100644 index 42b9627ca9a..00000000000 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/ConverseStream.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.amazonTitanText; - -// snippet-start:[bedrock-runtime.java2.ConverseStream_AmazonTitanText] -// Use the Converse API to send a text message to Amazon Titan Text -// and print the response stream. - -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeAsyncClient; -import software.amazon.awssdk.services.bedrockruntime.model.ContentBlock; -import software.amazon.awssdk.services.bedrockruntime.model.ConversationRole; -import software.amazon.awssdk.services.bedrockruntime.model.ConverseStreamResponseHandler; -import software.amazon.awssdk.services.bedrockruntime.model.Message; - -import java.util.concurrent.ExecutionException; - -public class ConverseStream { - - public static void main(String[] args) { - - // Create a Bedrock Runtime client in the AWS Region you want to use. - // Replace the DefaultCredentialsProvider with your preferred credentials provider. - var client = BedrockRuntimeAsyncClient.builder() - .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Titan Text Premier. - var modelId = "amazon.titan-text-premier-v1:0"; - - // Create the input text and embed it in a message object with the user role. - var inputText = "Describe the purpose of a 'hello world' program in one line."; - var message = Message.builder() - .content(ContentBlock.fromText(inputText)) - .role(ConversationRole.USER) - .build(); - - // Create a handler to extract and print the response text in real-time. - var responseStreamHandler = ConverseStreamResponseHandler.builder() - .subscriber(ConverseStreamResponseHandler.Visitor.builder() - .onContentBlockDelta(chunk -> { - String responseText = chunk.delta().text(); - System.out.print(responseText); - }).build() - ).onError(err -> - System.err.printf("Can't invoke '%s': %s", modelId, err.getMessage()) - ).build(); - - try { - // Send the message with a basic inference configuration and attach the handler. - client.converseStream(request -> request - .modelId(modelId) - .messages(message) - .inferenceConfig(config -> config - .maxTokens(512) - .temperature(0.5F) - .topP(0.9F) - ), responseStreamHandler).get(); - - } catch (ExecutionException | InterruptedException e) { - System.err.printf("Can't invoke '%s': %s", modelId, e.getCause().getMessage()); - } - } -} -// snippet-end:[bedrock-runtime.java2.ConverseStream_AmazonTitanText] diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/InvokeModel.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/InvokeModel.java deleted file mode 100644 index 8b8093c81d0..00000000000 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/InvokeModel.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.amazonTitanText; - -// snippet-start:[bedrock-runtime.java2.InvokeModel_AmazonTitanText] -// Use the native inference API to send a text message to Amazon Titan Text. - -import org.json.JSONObject; -import org.json.JSONPointer; -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; -import software.amazon.awssdk.core.SdkBytes; -import software.amazon.awssdk.core.exception.SdkClientException; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeClient; - -public class InvokeModel { - - public static String invokeModel() { - - // Create a Bedrock Runtime client in the AWS Region you want to use. - // Replace the DefaultCredentialsProvider with your preferred credentials provider. - var client = BedrockRuntimeClient.builder() - .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Titan Text Premier. - var modelId = "amazon.titan-text-premier-v1:0"; - - // The InvokeModel API uses the model's native payload. - // Learn more about the available inference parameters and response fields at: - // https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-text.html - var nativeRequestTemplate = "{ \"inputText\": \"{{prompt}}\" }"; - - // Define the prompt for the model. - var prompt = "Describe the purpose of a 'hello world' program in one line."; - - // Embed the prompt in the model's native request payload. - String nativeRequest = nativeRequestTemplate.replace("{{prompt}}", prompt); - - try { - // Encode and send the request to the Bedrock Runtime. - var response = client.invokeModel(request -> request - .body(SdkBytes.fromUtf8String(nativeRequest)) - .modelId(modelId) - ); - - // Decode the response body. - var responseBody = new JSONObject(response.body().asUtf8String()); - - // Retrieve the generated text from the model's response. - var text = new JSONPointer("/results/0/outputText").queryFrom(responseBody).toString(); - System.out.println(text); - - return text; - - } catch (SdkClientException e) { - System.err.printf("ERROR: Can't invoke '%s'. Reason: %s", modelId, e.getMessage()); - throw new RuntimeException(e); - } - } - - public static void main(String[] args) { - invokeModel(); - } -} -// snippet-end:[bedrock-runtime.java2.InvokeModel_AmazonTitanText] diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/InvokeModelWithResponseStream.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/InvokeModelWithResponseStream.java deleted file mode 100644 index 084c00ef738..00000000000 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/InvokeModelWithResponseStream.java +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.amazonTitanText; - -// snippet-start:[bedrock-runtime.java2.InvokeModelWithResponseStream_AmazonTitanText] -// Use the native inference API to send a text message to Amazon Titan Text -// and print the response stream. - -import org.json.JSONObject; -import org.json.JSONPointer; -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; -import software.amazon.awssdk.core.SdkBytes; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeAsyncClient; -import software.amazon.awssdk.services.bedrockruntime.model.InvokeModelWithResponseStreamRequest; -import software.amazon.awssdk.services.bedrockruntime.model.InvokeModelWithResponseStreamResponseHandler; - -import java.util.concurrent.ExecutionException; - -import static software.amazon.awssdk.services.bedrockruntime.model.InvokeModelWithResponseStreamResponseHandler.Visitor; - -public class InvokeModelWithResponseStream { - - public static String invokeModelWithResponseStream() { - - // Create a Bedrock Runtime client in the AWS Region you want to use. - // Replace the DefaultCredentialsProvider with your preferred credentials provider. - var client = BedrockRuntimeAsyncClient.builder() - .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Titan Text Premier. - var modelId = "amazon.titan-text-premier-v1:0"; - - // The InvokeModelWithResponseStream API uses the model's native payload. - // Learn more about the available inference parameters and response fields at: - // https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-text.html - var nativeRequestTemplate = "{ \"inputText\": \"{{prompt}}\" }"; - - // Define the prompt for the model. - var prompt = "Describe the purpose of a 'hello world' program in one line."; - - // Embed the prompt in the model's native request payload. - String nativeRequest = nativeRequestTemplate.replace("{{prompt}}", prompt); - - // Create a request with the model ID and the model's native request payload. - var request = InvokeModelWithResponseStreamRequest.builder() - .body(SdkBytes.fromUtf8String(nativeRequest)) - .modelId(modelId) - .build(); - - // Prepare a buffer to accumulate the generated response text. - var completeResponseTextBuffer = new StringBuilder(); - - // Prepare a handler to extract, accumulate, and print the response text in real-time. - var responseStreamHandler = InvokeModelWithResponseStreamResponseHandler.builder() - .subscriber(Visitor.builder().onChunk(chunk -> { - // Extract and print the text from the model's native response. - var response = new JSONObject(chunk.bytes().asUtf8String()); - var text = new JSONPointer("/outputText").queryFrom(response); - System.out.print(text); - - // Append the text to the response text buffer. - completeResponseTextBuffer.append(text); - }).build()).build(); - - try { - // Send the request and wait for the handler to process the response. - client.invokeModelWithResponseStream(request, responseStreamHandler).get(); - - // Return the complete response text. - return completeResponseTextBuffer.toString(); - - } catch (ExecutionException | InterruptedException e) { - System.err.printf("Can't invoke '%s': %s", modelId, e.getCause().getMessage()); - throw new RuntimeException(e); - } - } - - public static void main(String[] args) throws ExecutionException, InterruptedException { - invokeModelWithResponseStream(); - } -} -// snippet-end:[bedrock-runtime.java2.InvokeModelWithResponseStream_AmazonTitanText] diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/TextScenarios.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/TextScenarios.java deleted file mode 100644 index 18262ba8103..00000000000 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazonTitanText/TextScenarios.java +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.amazonTitanText; - -import com.example.bedrockruntime.libs.demo.DemoRunner; -import com.example.bedrockruntime.libs.demo.scenarios.SystemPromptScenario; -import com.example.bedrockruntime.libs.demo.scenarios.TitanConversationScenario; -import org.json.JSONObject; -import software.amazon.awssdk.core.SdkBytes; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeClient; - -import java.io.IOException; -import java.util.List; - -/** - * This program demonstrates how to use InvokeModel with Amazon Titan Text models on Amazon Bedrock, - * using Titan's native request/response structure. - *

- * For more examples in different programming languages check out the Amazon Bedrock User Guide at: - * https://docs.aws.amazon.com/bedrock/latest/userguide/service_code_examples.html - */ - -public class TextScenarios { - // snippet-start:[bedrock-runtime.java2.InvokeModel_TitanText_SingleMessage] - - /** - * Invoke Titan Text with a system prompt and additional inference parameters, - * using Titan's native request/response structure. - * - * @param userPrompt - The text prompt to send to the model. - * @param systemPrompt - A system prompt to provide additional context and instructions. - * @return The {@link JSONObject} representing the model's response. - */ - public static JSONObject invokeWithSystemPrompt(String userPrompt, String systemPrompt) { - - // Create a Bedrock Runtime client in the AWS Region of your choice. - var client = BedrockRuntimeClient.builder() - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Titan Text Premier. - var modelId = "amazon.titan-text-premier-v1:0"; - - /* Assemble the input text. - * For best results, use the following input text format: - * {{ system instruction }} - * User: {{ user input }} - * Bot: - */ - var inputText = """ - %s - User: %s - Bot: - """.formatted(systemPrompt, userPrompt); - - // Format the request payload using the model's native structure. - var nativeRequest = new JSONObject() - .put("inputText", inputText) - .put("textGenerationConfig", new JSONObject() - .put("maxTokenCount", 512) - .put("temperature", 0.7F) - .put("topP", 0.9F) - ) - .toString(); - - // Encode and send the request. - var response = client.invokeModel(request -> { - request.body(SdkBytes.fromUtf8String(nativeRequest)); - request.modelId(modelId); - }); - - // Decode the native response body. - var nativeResponse = new JSONObject(response.body().asUtf8String()); - - // Extract and print the response text. - var responseText = nativeResponse.getJSONArray("results").getJSONObject(0).getString("outputText"); - System.out.println(responseText); - - // Return the model's native response. - return nativeResponse; - } - // snippet-end:[bedrock-runtime.java2.InvokeModel_TitanText_SingleMessage] - - // snippet-start:[bedrock-runtime.java2.InvokeModel_TitanText_Conversation] - - /** - * Create a chat-like experience with a conversation history, using Titan's native - * request/response structure. - * - * @param prompt - The text prompt to send to the model. - * @param conversation - A String representing previous conversational turns in the format - * User: {{ previous user prompt}} - * Bot: {{ previous model response }} - * ... - * @return The {@link JSONObject} representing the model's response. - */ - public static JSONObject invokeWithConversation(String prompt, String conversation) { - - // Create a Bedrock Runtime client in the AWS Region of your choice. - var client = BedrockRuntimeClient.builder() - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Titan Text Premier. - var modelId = "amazon.titan-text-premier-v1:0"; - - /* Append the new prompt to the conversation. - * For best results, use the following text format: - * User: {{ previous user prompt}} - * Bot: {{ previous model response }} - * User: {{ new user prompt }} - * Bot: """ - */ - conversation = conversation + """ - %nUser: %s - Bot: - """.formatted(prompt); - - // Format the request payload using the model's native structure. - var nativeRequest = new JSONObject().put("inputText", conversation); - - // Encode and send the request. - var response = client.invokeModel(request -> { - request.body(SdkBytes.fromUtf8String(nativeRequest.toString())); - request.modelId(modelId); - }); - - // Decode the native response body. - var nativeResponse = new JSONObject(response.body().asUtf8String()); - - // Extract and print the response text. - var responseText = nativeResponse.getJSONArray("results").getJSONObject(0).getString("outputText"); - System.out.println(responseText); - - // Return the model's native response. - return nativeResponse; - } - // snippet-end:[bedrock-runtime.java2.InvokeModel_TitanText_Conversation] - - - public static void main(String[] args) throws IOException { - new DemoRunner(List.of( - new SystemPromptScenario(TextScenarios::invokeWithSystemPrompt), - new TitanConversationScenario(TextScenarios::invokeWithConversation) - )).run(); - } -} diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/cohereCommand/Command_InvokeModel.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/cohereCommand/Command_InvokeModel.java deleted file mode 100644 index c0a2d678d09..00000000000 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/cohereCommand/Command_InvokeModel.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.cohereCommand; - -// snippet-start:[bedrock-runtime.java2.InvokeModel_CohereCommand] -// Use the native inference API to send a text message to Cohere Command. - -import org.json.JSONObject; -import org.json.JSONPointer; -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; -import software.amazon.awssdk.core.SdkBytes; -import software.amazon.awssdk.core.exception.SdkClientException; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeClient; - -public class Command_InvokeModel { - - public static String invokeModel() { - - // Create a Bedrock Runtime client in the AWS Region you want to use. - // Replace the DefaultCredentialsProvider with your preferred credentials provider. - var client = BedrockRuntimeClient.builder() - .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Command Light. - var modelId = "cohere.command-light-text-v14"; - - // The InvokeModel API uses the model's native payload. - // Learn more about the available inference parameters and response fields at: - // https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-cohere-command.html - var nativeRequestTemplate = "{ \"prompt\": \"{{prompt}}\" }"; - - // Define the prompt for the model. - var prompt = "Describe the purpose of a 'hello world' program in one line."; - - // Embed the prompt in the model's native request payload. - String nativeRequest = nativeRequestTemplate.replace("{{prompt}}", prompt); - - try { - // Encode and send the request to the Bedrock Runtime. - var response = client.invokeModel(request -> request - .body(SdkBytes.fromUtf8String(nativeRequest)) - .modelId(modelId) - ); - - // Decode the response body. - var responseBody = new JSONObject(response.body().asUtf8String()); - - // Retrieve the generated text from the model's response. - var text = new JSONPointer("/generations/0/text").queryFrom(responseBody).toString(); - System.out.println(text); - - return text; - - } catch (SdkClientException e) { - System.err.printf("ERROR: Can't invoke '%s'. Reason: %s", modelId, e.getMessage()); - throw new RuntimeException(e); - } - } - - public static void main(String[] args) { - invokeModel(); - } -} -// snippet-end:[bedrock-runtime.java2.InvokeModel_CohereCommand] diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/cohereCommand/Command_InvokeModelWithResponseStream.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/cohereCommand/Command_InvokeModelWithResponseStream.java deleted file mode 100644 index 1ca32fcf63b..00000000000 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/cohereCommand/Command_InvokeModelWithResponseStream.java +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.cohereCommand; - -// snippet-start:[bedrock-runtime.java2.InvokeModelWithResponseStream_CohereCommand] -// Use the native inference API to send a text message to Cohere Command -// and print the response stream. - -import org.json.JSONObject; -import org.json.JSONPointer; -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; -import software.amazon.awssdk.core.SdkBytes; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeAsyncClient; -import software.amazon.awssdk.services.bedrockruntime.model.InvokeModelWithResponseStreamRequest; -import software.amazon.awssdk.services.bedrockruntime.model.InvokeModelWithResponseStreamResponseHandler; - -import java.util.concurrent.ExecutionException; - -import static software.amazon.awssdk.services.bedrockruntime.model.InvokeModelWithResponseStreamResponseHandler.Visitor; - -public class Command_InvokeModelWithResponseStream { - - public static String invokeModelWithResponseStream() { - - // Create a Bedrock Runtime client in the AWS Region you want to use. - // Replace the DefaultCredentialsProvider with your preferred credentials provider. - var client = BedrockRuntimeAsyncClient.builder() - .credentialsProvider(DefaultCredentialsProvider.create()) - .region(Region.US_EAST_1) - .build(); - - // Set the model ID, e.g., Command Light. - var modelId = "cohere.command-light-text-v14"; - - // The InvokeModelWithResponseStream API uses the model's native payload. - // Learn more about the available inference parameters and response fields at: - // https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-cohere-command.html - var nativeRequestTemplate = "{ \"prompt\": \"{{prompt}}\" }"; - - // Define the prompt for the model. - var prompt = "Describe the purpose of a 'hello world' program in one line."; - - // Embed the prompt in the model's native request payload. - String nativeRequest = nativeRequestTemplate.replace("{{prompt}}", prompt); - - // Create a request with the model ID and the model's native request payload. - var request = InvokeModelWithResponseStreamRequest.builder() - .body(SdkBytes.fromUtf8String(nativeRequest)) - .modelId(modelId) - .build(); - - // Prepare a buffer to accumulate the generated response text. - var completeResponseTextBuffer = new StringBuilder(); - - // Prepare a handler to extract, accumulate, and print the response text in real-time. - var responseStreamHandler = InvokeModelWithResponseStreamResponseHandler.builder() - .subscriber(Visitor.builder().onChunk(chunk -> { - // Extract and print the text from the model's native response. - var response = new JSONObject(chunk.bytes().asUtf8String()); - var text = new JSONPointer("/generations/0/text").queryFrom(response); - System.out.print(text); - - // Append the text to the response text buffer. - completeResponseTextBuffer.append(text); - }).build()).build(); - - try { - // Send the request and wait for the handler to process the response. - client.invokeModelWithResponseStream(request, responseStreamHandler).get(); - - // Return the complete response text. - return completeResponseTextBuffer.toString(); - - } catch (ExecutionException | InterruptedException e) { - System.err.printf("Can't invoke '%s': %s", modelId, e.getCause().getMessage()); - throw new RuntimeException(e); - } - } - - public static void main(String[] args) throws ExecutionException, InterruptedException { - invokeModelWithResponseStream(); - } -} -// snippet-end:[bedrock-runtime.java2.InvokeModelWithResponseStream_CohereCommand] diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverse.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverse.java index ff0a87d27ff..aa6db3bfadb 100644 --- a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverse.java +++ b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverse.java @@ -10,10 +10,8 @@ protected Stream modelProvider() { return Stream.of( new ModelTest("Claude", com.example.bedrockruntime.models.anthropicClaude.Converse::converse), new ModelTest("CohereCommand", com.example.bedrockruntime.models.cohereCommand.Converse::converse), - new ModelTest("Jurassic2", com.example.bedrockruntime.models.ai21LabsJurassic2.Converse::converse), new ModelTest("Mistral", com.example.bedrockruntime.models.mistral.Converse::converse), - new ModelTest("NovaText", com.example.bedrockruntime.models.amazon.nova.text.Converse::converse), - new ModelTest("TitanText", com.example.bedrockruntime.models.amazonTitanText.Converse::converse) + new ModelTest("NovaText", com.example.bedrockruntime.models.amazon.nova.text.Converse::converse) ); } } \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverseAsync.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverseAsync.java index 89193026375..f5c45d53f95 100644 --- a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverseAsync.java +++ b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverseAsync.java @@ -9,9 +9,7 @@ public class TestConverseAsync extends AbstractModelTest { protected Stream modelProvider() { return Stream.of( new ModelTest("Claude", com.example.bedrockruntime.models.anthropicClaude.ConverseAsync::converseAsync), - new ModelTest("Jurassic2", com.example.bedrockruntime.models.ai21LabsJurassic2.ConverseAsync::converseAsync), new ModelTest("NovaText", com.example.bedrockruntime.models.amazon.nova.text.ConverseAsync::converseAsync), - new ModelTest("TitanText", com.example.bedrockruntime.models.amazonTitanText.ConverseAsync::converseAsync), new ModelTest("CohereCommand", com.example.bedrockruntime.models.cohereCommand.ConverseAsync::converseAsync), new ModelTest("Mistral", com.example.bedrockruntime.models.mistral.ConverseAsync::converseAsync) ); diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestImageGeneration.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestImageGeneration.java index b45139b9ef4..03b868828f3 100644 --- a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestImageGeneration.java +++ b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestImageGeneration.java @@ -9,8 +9,7 @@ public class TestImageGeneration extends AbstractModelTest { protected Stream modelProvider() { return Stream.of( new ModelTest("NovaCanvas", com.example.bedrockruntime.models.amazon.nova.canvas.InvokeModel::invokeModel), - new ModelTest("StableDiffusion", com.example.bedrockruntime.models.stabilityAi.InvokeModel::invokeModel), - new ModelTest("TitanImage", com.example.bedrockruntime.models.amazonTitanText.InvokeModel::invokeModel) + new ModelTest("StableDiffusion", com.example.bedrockruntime.models.stabilityAi.InvokeModel::invokeModel) ); } } \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModel.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModel.java index b86bdfe18b8..5b6cfc818be 100644 --- a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModel.java +++ b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModel.java @@ -9,13 +9,9 @@ public class TestInvokeModel extends AbstractModelTest { protected Stream modelProvider() { return Stream.of( new ModelTest("Claude", com.example.bedrockruntime.models.anthropicClaude.InvokeModel::invokeModel), - new ModelTest("CohereCommand", com.example.bedrockruntime.models.cohereCommand.Command_InvokeModel::invokeModel), new ModelTest("CohereCommandR", com.example.bedrockruntime.models.cohereCommand.Command_R_InvokeModel::invokeModel), - new ModelTest("Jurassic2", com.example.bedrockruntime.models.ai21LabsJurassic2.InvokeModel::invokeModel), new ModelTest("Llama", com.example.bedrockruntime.models.metaLlama.Llama3_InvokeModel::invokeModel), - new ModelTest("Mistral", com.example.bedrockruntime.models.mistral.InvokeModel::invokeModel), - new ModelTest("TitanText", com.example.bedrockruntime.models.amazonTitanText.InvokeModel::invokeModel), - new ModelTest("TitanTextEmbeddings", com.example.bedrockruntime.models.amazonTitanText.InvokeModel::invokeModel) + new ModelTest("Mistral", com.example.bedrockruntime.models.mistral.InvokeModel::invokeModel) ); } } \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModelWithResponseStream.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModelWithResponseStream.java index 2f8bd717488..2cfd76fd440 100644 --- a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModelWithResponseStream.java +++ b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModelWithResponseStream.java @@ -9,11 +9,9 @@ public class TestInvokeModelWithResponseStream extends AbstractModelTest { protected Stream modelProvider() { return Stream.of( new ModelTest("Claude", com.example.bedrockruntime.models.anthropicClaude.InvokeModelWithResponseStream::invokeModelWithResponseStream), - new ModelTest("CohereCommand", com.example.bedrockruntime.models.cohereCommand.Command_InvokeModelWithResponseStream::invokeModelWithResponseStream), new ModelTest("CohereCommandR", com.example.bedrockruntime.models.cohereCommand.Command_R_InvokeModelWithResponseStream::invokeModelWithResponseStream), new ModelTest("Llama", com.example.bedrockruntime.models.metaLlama.Llama3_InvokeModelWithResponseStream::invokeModelWithResponseStream), - new ModelTest("Mistral", com.example.bedrockruntime.models.mistral.InvokeModelWithResponseStream::invokeModelWithResponseStream), - new ModelTest("TitanText", com.example.bedrockruntime.models.amazonTitanText.InvokeModelWithResponseStream::invokeModelWithResponseStream) + new ModelTest("Mistral", com.example.bedrockruntime.models.mistral.InvokeModelWithResponseStream::invokeModelWithResponseStream) ); } } diff --git a/javav2/example_code/bedrock-runtime/src/test/java/scenarios/TestAmazonTitanTextScenarios.java b/javav2/example_code/bedrock-runtime/src/test/java/scenarios/TestAmazonTitanTextScenarios.java deleted file mode 100644 index 2787bf67c9a..00000000000 --- a/javav2/example_code/bedrock-runtime/src/test/java/scenarios/TestAmazonTitanTextScenarios.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package scenarios; - -import org.junit.jupiter.api.Test; - -import static com.example.bedrockruntime.models.amazonTitanText.TextScenarios.invokeWithConversation; -import static com.example.bedrockruntime.models.amazonTitanText.TextScenarios.invokeWithSystemPrompt; -import static org.junit.jupiter.api.Assertions.assertFalse; - -class TestAmazonTitanTextScenarios { - - @Test - void invokeWithSystemPromptScenario() { - var inputText = "Hello, how are you today?"; - var systemPrompt = "Only respond with 'OK'"; - var response = invokeWithSystemPrompt(inputText, systemPrompt); - assertFalse(response.getJSONArray("results").isEmpty()); - } - - @Test - void invokeWithConversationScenario() { - var conversation = """ - User: Hello, how are you today? - Bot: OK - """; - - var inputText = "What was my first question? Respond with 'Your question was \"[QUESTION]\n'"; - - var response = invokeWithConversation(inputText, conversation); - assertFalse(response.getJSONArray("results").isEmpty()); - } -} diff --git a/kotlin/services/bedrock-runtime/README.md b/kotlin/services/bedrock-runtime/README.md index 420906a1ddd..8f021db4244 100644 --- a/kotlin/services/bedrock-runtime/README.md +++ b/kotlin/services/bedrock-runtime/README.md @@ -35,10 +35,6 @@ For prerequisites, see the [README](../../README.md#Prerequisites) in the `kotli - [Converse](src/main/kotlin/com/example/bedrockruntime/models/amazon/nova/text/Converse.kt#L6) - [ConverseStream](src/main/kotlin/com/example/bedrockruntime/models/amazon/nova/text/ConverseStream.kt#L6) -### Amazon Titan Text - -- [InvokeModel](src/main/kotlin/com/example/bedrockruntime/models/amazon/titan/text/InvokeModel.kt#L6) - diff --git a/kotlin/services/bedrock-runtime/src/main/kotlin/com/example/bedrockruntime/models/amazon/titan/text/InvokeModel.kt b/kotlin/services/bedrock-runtime/src/main/kotlin/com/example/bedrockruntime/models/amazon/titan/text/InvokeModel.kt deleted file mode 100644 index d5ae1b07398..00000000000 --- a/kotlin/services/bedrock-runtime/src/main/kotlin/com/example/bedrockruntime/models/amazon/titan/text/InvokeModel.kt +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package com.example.bedrockruntime.models.amazon.titan.text - -// snippet-start:[bedrock-runtime.kotlin.InvokeModel_AmazonTitanText] - -import aws.sdk.kotlin.services.bedrockruntime.BedrockRuntimeClient -import aws.sdk.kotlin.services.bedrockruntime.model.InvokeModelRequest -import kotlinx.serialization.Serializable -import kotlinx.serialization.json.Json - -/** - * This example demonstrates how to use the Amazon Titan foundation models to generate text. - * It shows how to: - * - Set up the Amazon Bedrock runtime client - * - Create a request payload - * - Configure and send a request - * - Process the response - */ -suspend fun main() { - invokeModel().also { println(it) } -} - -// Data class for parsing the model's response -@Serializable -private data class BedrockResponse(val results: List) { - @Serializable - data class Result( - val outputText: String, - ) -} - -// Initialize JSON parser with relaxed configuration -private val json = Json { ignoreUnknownKeys = true } - -suspend fun invokeModel(): String { - // Create and configure the Bedrock runtime client - BedrockRuntimeClient { region = "us-east-1" }.use { client -> - - // Specify the model ID. For the latest available models, see: - // https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html - val modelId = "amazon.titan-text-lite-v1" - - // Create the request payload with optional configuration parameters - // For detailed parameter descriptions, see: - // https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-text.html - val prompt = "Describe the purpose of a 'hello world' program in one line." - val request = """ - { - "inputText": "$prompt", - "textGenerationConfig": { - "maxTokenCount": 500, - "temperature": 0.5 - } - } - """.trimIndent() - - // Send the request and process the model's response - runCatching { - // Send the request to the model - val response = client.invokeModel( - InvokeModelRequest { - this.modelId = modelId - body = request.toByteArray() - }, - ) - - // Convert the response bytes to a JSON string - val jsonResponse = response.body.toString(Charsets.UTF_8) - - // Parse the JSON into a Kotlin object - val parsedResponse = json.decodeFromString(jsonResponse) - - // Extract and return the generated text - return parsedResponse.results.firstOrNull()!!.outputText - }.getOrElse { error -> - error.message?.let { msg -> - System.err.println("ERROR: Can't invoke '$modelId'. Reason: $msg") - } - throw RuntimeException("Failed to generate text with model $modelId", error) - } - } -} - -// snippet-end:[bedrock-runtime.kotlin.InvokeModel_AmazonTitanText] diff --git a/kotlin/services/bedrock-runtime/src/test/kotlin/models/TestInvokeModel.kt b/kotlin/services/bedrock-runtime/src/test/kotlin/models/TestInvokeModel.kt index 2c14291e782..adec12e30bb 100644 --- a/kotlin/services/bedrock-runtime/src/test/kotlin/models/TestInvokeModel.kt +++ b/kotlin/services/bedrock-runtime/src/test/kotlin/models/TestInvokeModel.kt @@ -15,7 +15,6 @@ class TestInvokeModel : AbstractModelTest() { * and return text or byte[] responses. */ override fun modelProvider(): Stream = listOf( - ModelTest("Amazon Titan Text") { com.example.bedrockruntime.models.amazon.titan.text.invokeModel() }, ModelTest("Amazon Nova Canvas") { com.example.bedrockruntime.models.amazon.nova.canvas.invokeModel() }, ).stream() } diff --git a/python/example_code/bedrock-runtime/README.md b/python/example_code/bedrock-runtime/README.md index 18c7a076e87..7a74274ab38 100644 --- a/python/example_code/bedrock-runtime/README.md +++ b/python/example_code/bedrock-runtime/README.md @@ -38,11 +38,6 @@ python -m pip install -r requirements.txt > see [Model access](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access.html). > - -### Get started - -- [Hello Amazon Bedrock Runtime](hello/hello_bedrock_runtime_invoke.py#L5) (`InvokeModel`) - ### Scenarios Code examples that show you how to accomplish a specific task by calling multiple @@ -69,13 +64,6 @@ functions within the same service. - [InvokeModel](models/amazon_titan_image_generator/invoke_model.py#L4) -### Amazon Titan Text - -- [Converse](models/amazon_titan_text/converse.py#L4) -- [ConverseStream](models/amazon_titan_text/converse_stream.py#L4) -- [InvokeModel](models/amazon_titan_text/invoke_model.py#L4) -- [InvokeModelWithResponseStream](models/amazon_titan_text/invoke_model_with_response_stream.py#L4) - ### Amazon Titan Text Embeddings - [InvokeModel](models/amazon_titan_text_embeddings/invoke_model.py#L4) @@ -95,9 +83,7 @@ functions within the same service. - [ConverseStream](models/cohere_command/converse_stream.py#L4) - [Document understanding](models/cohere_command/document_understanding.py#L4) - [InvokeModel: Command R and R+](models/cohere_command/command_r_invoke_model.py#L4) -- [InvokeModel: Command and Command Light](models/cohere_command/command_invoke_model.py#L4) - [InvokeModelWithResponseStream: Command R and R+](models/cohere_command/command_r_invoke_model_with_response_stream.py#L4) -- [InvokeModelWithResponseStream: Command and Command Light](models/cohere_command/command_invoke_model_with_response_stream.py#L4) - [Scenario: Tool use with the Converse API](cross-model-scenarios/tool_use_demo/tool_use_demo.py) ### DeepSeek @@ -150,13 +136,6 @@ Mistral AI. -#### Hello Amazon Bedrock Runtime - -This example shows you how to get started using Amazon Bedrock Runtime. - -``` -python hello/hello_bedrock_runtime_invoke.py -``` #### Create and invoke a managed prompt diff --git a/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_converse.py b/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_converse.py deleted file mode 100644 index e6139428532..00000000000 --- a/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_converse.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - - -# snippet-start:[bedrock-runtime.example_code.hello_bedrock_converse.complete] - -""" -Uses the Amazon Bedrock runtime client Converse operation to send a user message to a model. -""" -import logging -import boto3 - -from botocore.exceptions import ClientError - - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -def converse(brt, model_id, user_message): - """ - Uses the Converse operation to send a user message to the supplied model. - param brt: A bedrock runtime boto3 client - param model_id: The model ID for the model that you want to use. - param user message: The user message that you want to send to the model. - - :return: The text response from the model. - """ - - # Format the request payload using the model's native structure. - conversation = [ - { - "role": "user", - "content": [{"text": user_message}], - } -] - - try: - # Send the message to the model, using a basic inference configuration. - response = brt.converse( - modelId=model_id, - messages=conversation, - inferenceConfig={"maxTokens": 512, "temperature": 0.5, "topP": 0.9}, - ) - - # Extract and print the response text. - response_text = response["output"]["message"]["content"][0]["text"] - return response_text - - except (ClientError, Exception) as e: - print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") - raise - - -def main(): - """Entry point for the example. Uses the AWS SDK for Python (Boto3) - to create an Amazon Bedrock runtime client. Then sends a user message to a model - in the region set in the callers profile and credentials. - """ - - # Create an Amazon Bedrock Runtime client. - brt = boto3.client("bedrock-runtime") - - # Set the model ID, e.g., Amazon Titan Text G1 - Express. - model_id = "amazon.titan-text-express-v1" - - # Define the message for the model. - message = "Describe the purpose of a 'hello world' program in one line." - - # Send the message to the model. - response = converse(brt, model_id, message) - - print(f"Response: {response}") - - logger.info("Done.") - - -if __name__ == "__main__": - main() - - # snippet-end:[bedrock-runtime.example_code.hello_bedrock_converse.complete] - - diff --git a/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_invoke.py b/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_invoke.py deleted file mode 100644 index d86268579dc..00000000000 --- a/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_invoke.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - - -# snippet-start:[bedrock-runtime.example_code.hello_bedrock_invoke.complete] - -""" -Uses the Amazon Bedrock runtime client InvokeModel operation to send a prompt to a model. -""" -import logging -import json -import boto3 - - -from botocore.exceptions import ClientError - - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -def invoke_model(brt, model_id, prompt): - """ - Invokes the specified model with the supplied prompt. - param brt: A bedrock runtime boto3 client - param model_id: The model ID for the model that you want to use. - param prompt: The prompt that you want to send to the model. - - :return: The text response from the model. - """ - - # Format the request payload using the model's native structure. - native_request = { - "inputText": prompt, - "textGenerationConfig": { - "maxTokenCount": 512, - "temperature": 0.5, - "topP": 0.9 - } - } - - # Convert the native request to JSON. - request = json.dumps(native_request) - - try: - # Invoke the model with the request. - response = brt.invoke_model(modelId=model_id, body=request) - - # Decode the response body. - model_response = json.loads(response["body"].read()) - - # Extract and print the response text. - response_text = model_response["results"][0]["outputText"] - return response_text - - except (ClientError, Exception) as e: - print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") - raise - - -def main(): - """Entry point for the example. Uses the AWS SDK for Python (Boto3) - to create an Amazon Bedrock runtime client. Then sends a prompt to a model - in the region set in the callers profile and credentials. - """ - - # Create an Amazon Bedrock Runtime client. - brt = boto3.client("bedrock-runtime") - - # Set the model ID, e.g., Amazon Titan Text G1 - Express. - model_id = "amazon.titan-text-express-v1" - - # Define the prompt for the model. - prompt = "Describe the purpose of a 'hello world' program in one line." - - # Send the prompt to the model. - response = invoke_model(brt, model_id, prompt) - - print(f"Response: {response}") - - logger.info("Done.") - - -if __name__ == "__main__": - main() - - # snippet-end:[bedrock-runtime.example_code.hello_bedrock_invoke.complete] diff --git a/python/example_code/bedrock-runtime/models/amazon_titan_text/converse.py b/python/example_code/bedrock-runtime/models/amazon_titan_text/converse.py deleted file mode 100644 index 7728c244eb5..00000000000 --- a/python/example_code/bedrock-runtime/models/amazon_titan_text/converse.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - -# snippet-start:[python.example_code.bedrock-runtime.Converse_AmazonTitanText] -# Use the Conversation API to send a text message to Amazon Titan Text. - -import boto3 -from botocore.exceptions import ClientError - -# Create a Bedrock Runtime client in the AWS Region you want to use. -client = boto3.client("bedrock-runtime", region_name="us-east-1") - -# Set the model ID, e.g., Titan Text Premier. -model_id = "amazon.titan-text-premier-v1:0" - -# Start a conversation with the user message. -user_message = "Describe the purpose of a 'hello world' program in one line." -conversation = [ - { - "role": "user", - "content": [{"text": user_message}], - } -] - -try: - # Send the message to the model, using a basic inference configuration. - response = client.converse( - modelId=model_id, - messages=conversation, - inferenceConfig={"maxTokens": 512, "temperature": 0.5, "topP": 0.9}, - ) - - # Extract and print the response text. - response_text = response["output"]["message"]["content"][0]["text"] - print(response_text) - -except (ClientError, Exception) as e: - print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") - exit(1) - -# snippet-end:[python.example_code.bedrock-runtime.Converse_AmazonTitanText] diff --git a/python/example_code/bedrock-runtime/models/amazon_titan_text/converse_stream.py b/python/example_code/bedrock-runtime/models/amazon_titan_text/converse_stream.py deleted file mode 100644 index e396d8f350f..00000000000 --- a/python/example_code/bedrock-runtime/models/amazon_titan_text/converse_stream.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - -# snippet-start:[python.example_code.bedrock-runtime.ConverseStream_AmazonTitanText] -# Use the Conversation API to send a text message to Amazon Titan Text -# and print the response stream. - -import boto3 -from botocore.exceptions import ClientError - -# Create a Bedrock Runtime client in the AWS Region you want to use. -client = boto3.client("bedrock-runtime", region_name="us-east-1") - -# Set the model ID, e.g., Titan Text Premier. -model_id = "amazon.titan-text-premier-v1:0" - -# Start a conversation with the user message. -user_message = "Describe the purpose of a 'hello world' program in one line." -conversation = [ - { - "role": "user", - "content": [{"text": user_message}], - } -] - -try: - # Send the message to the model, using a basic inference configuration. - streaming_response = client.converse_stream( - modelId=model_id, - messages=conversation, - inferenceConfig={"maxTokens": 512, "temperature": 0.5, "topP": 0.9}, - ) - - # Extract and print the streamed response text in real-time. - for chunk in streaming_response["stream"]: - if "contentBlockDelta" in chunk: - text = chunk["contentBlockDelta"]["delta"]["text"] - print(text, end="") - -except (ClientError, Exception) as e: - print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") - exit(1) - -# snippet-end:[python.example_code.bedrock-runtime.ConverseStream_AmazonTitanText] diff --git a/python/example_code/bedrock-runtime/models/amazon_titan_text/invoke_model.py b/python/example_code/bedrock-runtime/models/amazon_titan_text/invoke_model.py deleted file mode 100644 index 88c5b009524..00000000000 --- a/python/example_code/bedrock-runtime/models/amazon_titan_text/invoke_model.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - -# snippet-start:[python.example_code.bedrock-runtime.InvokeModel_TitanText] -# Use the native inference API to send a text message to Amazon Titan Text. - -import boto3 -import json - -from botocore.exceptions import ClientError - -# Create a Bedrock Runtime client in the AWS Region of your choice. -client = boto3.client("bedrock-runtime", region_name="us-east-1") - -# Set the model ID, e.g., Titan Text Premier. -model_id = "amazon.titan-text-premier-v1:0" - -# Define the prompt for the model. -prompt = "Describe the purpose of a 'hello world' program in one line." - -# Format the request payload using the model's native structure. -native_request = { - "inputText": prompt, - "textGenerationConfig": { - "maxTokenCount": 512, - "temperature": 0.5, - }, -} - -# Convert the native request to JSON. -request = json.dumps(native_request) - -try: - # Invoke the model with the request. - response = client.invoke_model(modelId=model_id, body=request) - -except (ClientError, Exception) as e: - print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") - exit(1) - -# Decode the response body. -model_response = json.loads(response["body"].read()) - -# Extract and print the response text. -response_text = model_response["results"][0]["outputText"] -print(response_text) - -# snippet-end:[python.example_code.bedrock-runtime.InvokeModel_TitanText] diff --git a/python/example_code/bedrock-runtime/models/amazon_titan_text/invoke_model_with_response_stream.py b/python/example_code/bedrock-runtime/models/amazon_titan_text/invoke_model_with_response_stream.py deleted file mode 100644 index d8a92bbc182..00000000000 --- a/python/example_code/bedrock-runtime/models/amazon_titan_text/invoke_model_with_response_stream.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - -# snippet-start:[python.example_code.bedrock-runtime.InvokeModelWithResponseStream_TitanText] -# Use the native inference API to send a text message to Amazon Titan Text -# and print the response stream. - -import boto3 -import json - -# Create a Bedrock Runtime client in the AWS Region of your choice. -client = boto3.client("bedrock-runtime", region_name="us-east-1") - -# Set the model ID, e.g., Titan Text Premier. -model_id = "amazon.titan-text-premier-v1:0" - -# Define the prompt for the model. -prompt = "Describe the purpose of a 'hello world' program in one line." - -# Format the request payload using the model's native structure. -native_request = { - "inputText": prompt, - "textGenerationConfig": { - "maxTokenCount": 512, - "temperature": 0.5, - }, -} - -# Convert the native request to JSON. -request = json.dumps(native_request) - -# Invoke the model with the request. -streaming_response = client.invoke_model_with_response_stream( - modelId=model_id, body=request -) - -# Extract and print the response text in real-time. -for event in streaming_response["body"]: - chunk = json.loads(event["chunk"]["bytes"]) - if "outputText" in chunk: - print(chunk["outputText"], end="") - -# snippet-end:[python.example_code.bedrock-runtime.InvokeModelWithResponseStream_TitanText] diff --git a/python/example_code/bedrock-runtime/models/cohere_command/command_invoke_model.py b/python/example_code/bedrock-runtime/models/cohere_command/command_invoke_model.py deleted file mode 100644 index 4b3c6b51a6d..00000000000 --- a/python/example_code/bedrock-runtime/models/cohere_command/command_invoke_model.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - -# snippet-start:[python.example_code.bedrock-runtime.InvokeModel_CohereCommand] -# Use the native inference API to send a text message to Cohere Command. - -import boto3 -import json - -from botocore.exceptions import ClientError - -# Create a Bedrock Runtime client in the AWS Region of your choice. -client = boto3.client("bedrock-runtime", region_name="us-east-1") - -# Set the model ID, e.g., Command Light. -model_id = "cohere.command-light-text-v14" - -# Define the prompt for the model. -prompt = "Describe the purpose of a 'hello world' program in one line." - -# Format the request payload using the model's native structure. -native_request = { - "prompt": prompt, - "max_tokens": 512, - "temperature": 0.5, -} - -# Convert the native request to JSON. -request = json.dumps(native_request) - -try: - # Invoke the model with the request. - response = client.invoke_model(modelId=model_id, body=request) - -except (ClientError, Exception) as e: - print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") - exit(1) - -# Decode the response body. -model_response = json.loads(response["body"].read()) - -# Extract and print the response text. -response_text = model_response["generations"][0]["text"] -print(response_text) - -# snippet-end:[python.example_code.bedrock-runtime.InvokeModel_CohereCommand] diff --git a/python/example_code/bedrock-runtime/models/cohere_command/command_invoke_model_with_response_stream.py b/python/example_code/bedrock-runtime/models/cohere_command/command_invoke_model_with_response_stream.py deleted file mode 100644 index de28ffed1bb..00000000000 --- a/python/example_code/bedrock-runtime/models/cohere_command/command_invoke_model_with_response_stream.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - -# snippet-start:[python.example_code.bedrock-runtime.InvokeModelWithResponseStream_CohereCommand] -# Use the native inference API to send a text message to Cohere Command -# and print the response stream. - -import boto3 -import json - -from botocore.exceptions import ClientError - -# Create a Bedrock Runtime client in the AWS Region of your choice. -client = boto3.client("bedrock-runtime", region_name="us-east-1") - -# Set the model ID, e.g., Command Light. -model_id = "cohere.command-light-text-v14" - -# Define the prompt for the model. -prompt = "Describe the purpose of a 'hello world' program in one line." - -# Format the request payload using the model's native structure. -native_request = { - "prompt": prompt, - "max_tokens": 512, - "temperature": 0.5, -} - -# Convert the native request to JSON. -request = json.dumps(native_request) - -try: - # Invoke the model with the request. - streaming_response = client.invoke_model_with_response_stream( - modelId=model_id, body=request - ) - - # Extract and print the response text in real-time. - for event in streaming_response["body"]: - chunk = json.loads(event["chunk"]["bytes"]) - if "generations" in chunk: - print(chunk["generations"][0]["text"], end="") - -except (ClientError, Exception) as e: - print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") - exit(1) - -# snippet-end:[python.example_code.bedrock-runtime.InvokeModelWithResponseStream_CohereCommand] diff --git a/python/example_code/bedrock-runtime/requirements.txt b/python/example_code/bedrock-runtime/requirements.txt index 8ff36118066..cab38d23bac 100644 --- a/python/example_code/bedrock-runtime/requirements.txt +++ b/python/example_code/bedrock-runtime/requirements.txt @@ -1,12 +1,13 @@ -boto3==1.38.15 -botocore==1.38.15 +boto3==1.40.42 +botocore==1.40.42 colorama==0.4.6 iniconfig==2.0.0 jmespath==1.0.1 packaging==24.2 pluggy==1.5.0 -pytest==8.3.4 +Pygments==2.19.2 +pytest==8.4.2 python-dateutil==2.9.0.post0 -s3transfer==0.12.0 +s3transfer==0.14.0 six==1.17.0 urllib3==2.5.0 diff --git a/python/example_code/bedrock-runtime/test/test_converse.py b/python/example_code/bedrock-runtime/test/test_converse.py index d781c7e6f2d..eb107c0a48e 100644 --- a/python/example_code/bedrock-runtime/test/test_converse.py +++ b/python/example_code/bedrock-runtime/test/test_converse.py @@ -1,14 +1,12 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +import pytest import subprocess import sys -import pytest - files_under_test = [ "models/amazon_nova/amazon_nova_text/converse.py", - "models/amazon_titan_text/converse.py", "models/anthropic_claude/converse.py", "models/cohere_command/converse.py", "models/meta_llama/converse.py", diff --git a/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime.py b/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime.py deleted file mode 100644 index f69602ba462..00000000000 --- a/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - -import pytest -import subprocess -import sys - -files_under_test = [ - # Text models - "hello/hello_bedrock_runtime_invoke.py" -] - - -@pytest.mark.integ -@pytest.mark.parametrize("file", files_under_test) -def test_hello_bedrock(file): - result = subprocess.run( - [sys.executable, file], - capture_output=True, - text=True, - ) - assert result.stdout != "" - assert result.returncode == 0 diff --git a/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime_invoke.py b/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime_invoke.py deleted file mode 100644 index 8de7d56b722..00000000000 --- a/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime_invoke.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 - -import pytest -import subprocess -import sys - -files_under_test = [ - # Text models - "hello/hello_bedrock_runtime_invoke.py", - "hello/hello_bedrock_runtime_converse.py" -] - - -@pytest.mark.integ -@pytest.mark.parametrize("file", files_under_test) -def test_hello_bedrock(file): - result = subprocess.run( - [sys.executable, file], - capture_output=True, - text=True, - ) - assert result.stdout != "" - assert result.returncode == 0 diff --git a/python/example_code/bedrock-runtime/test/test_invoke_model.py b/python/example_code/bedrock-runtime/test/test_invoke_model.py index f9c28e1c65e..734332d8694 100644 --- a/python/example_code/bedrock-runtime/test/test_invoke_model.py +++ b/python/example_code/bedrock-runtime/test/test_invoke_model.py @@ -1,17 +1,15 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +import pytest import subprocess import sys - -import pytest +import time files_under_test = [ # Text models "models/amazon_nova/amazon_nova_canvas/invoke_model.py", - "models/amazon_titan_text/invoke_model.py", "models/anthropic_claude/invoke_model.py", - "models/cohere_command/command_invoke_model.py", "models/cohere_command/command_r_invoke_model.py", "models/meta_llama/llama3_invoke_model.py", "models/mistral_ai/invoke_model.py", @@ -33,3 +31,4 @@ def test_invoke_model(file): ) assert result.stdout != "" assert result.returncode == 0 + time.sleep(1) # Wait a second to prevent throttling