Building an AI Dev Space With a Little Assistance from Aspire
public class chatDialog
{
public string? systemMessage;
public string? inputText;
public string? outputText;
public int maxTokens = 400;
public float temperature = 0.7f;
}
//This view is hardwired to use the simulator so we can adjust accordingly
private string oaiEndpoint = string.Empty;
private string oaiDeploymentName = string.Empty;
private string oaiKey = string.Empty;
public static chatDialog dialog = new();
protected override void OnInitialized()
{
oaiEndpoint = “http://localhost:8000”;
oaiDeploymentName = Configuration[“oaiDeploymentName”] ?? “gpt-4o”;
oaiKey = Configuration[“oaiKey”] ?? string.Empty;
dialog = new()
{
systemMessage = “I am a hiking enthusiast named Forest who helps people discover hikes in their area. If no area is specified, I will default to near Rainier National Park. I will then provide three suggestions for nearby hikes that vary in length. I will also share an interesting fact about the local nature on the hikes when making a recommendation.”,
inputText = “Can you recommend some good hikes in the Redmond area?”,
outputText = string.Empty,
temperature = 0.7f,
maxTokens = 400,
};
}
protected async Task chat()
{
AzureOpenAIClient client = new AzureOpenAIClient(new Uri(oaiEndpoint), new System.ClientModel.ApiKeyCredential(oaiKey));
OpenAI.Chat.ChatClient chatClient = client.GetChatClient(oaiDeploymentName);
OpenAI.Chat.ChatCompletionOptions chatCompletionOptions = new()
{
MaxOutputTokenCount = dialog.maxTokens,
Temperature = dialog.temperature,
};
OpenAI.Chat.ChatCompletion completion = await chatClient.CompleteChatAsync(
[
new OpenAI.Chat.SystemChatMessage(dialog.systemMessage),
new OpenAI.Chat.UserChatMessage(dialog.inputText),
],chatCompletionOptions);
var response = $”Response:rn{completion.Content[0].Text} rnOutput tokens: {completion.Usage.OutputTokenCount}rnTotal tokens: {completion.Usage.TotalTokenCount}”;
dialog.outputText = response;
}
}
.WithHttpEndpoint(port: 8000, targetPort:oaiSimulatorPort)
.WithEnvironment(“SIMULATOR_MODE”, “generate”)
.WithEnvironment(“SIMULATOR_API_KEY”, localOaiKey)
.ExcludeFromManifest();
name: “AI”,
bicepFile: “../infra/ai.bicep”)
.WithParameter(AzureBicepResource.KnownParameters.KeyVaultName);
var cloudEndpoint = azaoai.GetOutput(“endpoint”);
var accountName = azaoai.GetOutput(“accountName”);
var cloudKey = azaoai.GetSecretOutput(“accountKey”);
var cloudDeployment = “gpt-4o”;
builder.AddDockerfile(“aoai-simulator-record”, “../AOAI_API_Simulator”)
.WithBindMount(“recordings”, “/app/.recording”)
.WithHttpEndpoint(port: 8001, targetPort: oaiSimulatorPort)
.WithEnvironment(“SIMULATOR_API_KEY”, localOaiKey)
.WithEnvironment(“SIMULATOR_MODE”, “record”)
.WithEnvironment(“AZURE_OPENAI_ENDPOINT”, cloudEndpoint)
.WithEnvironment(“AZURE_OPENAI_KEY”, cloudKey)
.WithEnvironment(“AZURE_OPENAI_DEPLOYMENT”, cloudDeployment)
.WithEnvironment(“AZURE_OPENAI_EMBEDDING_DEPLOYMENT”, cloudDeployment)
.ExcludeFromManifest();
.WithBindMount(“recordings”, “/app/.recording”)
.WithHttpEndpoint(port: 8002, targetPort: oaiSimulatorPort)
.WithEnvironment(“SIMULATOR_API_KEY”, localOaiKey)
.WithEnvironment(“SIMULATOR_MODE”, “replay”)
{
OpenAI.Chat.ChatCompletion completion = await chatClient.CompleteChatAsync(
[
new OpenAI.Chat.SystemChatMessage(dialog.systemMessage),
new OpenAI.Chat.UserChatMessage(dialog.inputText),
], chatCompletionOptions);
var response = $”Response:rn{completion.Content[0].Text} rnOutput tokens: {completion.Usage.OutputTokenCount}rnTotal tokens: {completion.Usage.TotalTokenCount}”;
dialog.outputText = response;
}
catch (Exception)
{
dialog.outputText = “I don’t know what you are talking about.”;
}
name: “APIM”,
bicepFile: “../infra/apim.bicep”)
.WithParameter(AzureBicepResource.KnownParameters.KeyVaultName)
.WithParameter(“apimResourceName”, “apim”)
.WithParameter(“apimSku”, “Basicv2”)
.WithParameter(“openAIAccountName”, accountName);
var apimEndpoint = apimai.GetOutput(“apimResourceGatewayURL”);
var apimKey = apimai.GetSecretOutput(“subscriptionKey”);
builder.AddDockerfile(“aoai-simulator-record”, “../AOAI_API_Simulator”)
.WithBindMount(“recordings”, “/app/.recording”)
.WithHttpEndpoint(port: 8001, targetPort: oaiSimulatorPort)
.WithEnvironment(“SIMULATOR_API_KEY”, localOaiKey)
.WithEnvironment(“SIMULATOR_MODE”, “record”)
.WithEnvironment(“AZURE_OPENAI_ENDPOINT”, apimEndpoint)
.WithEnvironment(“AZURE_OPENAI_KEY”, apimKey)
.WithEnvironment(“AZURE_OPENAI_DEPLOYMENT”, cloudDeployment)
.WithEnvironment(“AZURE_OPENAI_EMBEDDING_DEPLOYMENT”, cloudDeployment)
.ExcludeFromManifest();
“Logging”: {
“LogLevel”: {
“Default”: “Information”,
“Microsoft.AspNetCore”: “Warning”,
“Aspire.Hosting.Dcp”: “Warning”
}
},
“Parameters”: {
“TenantId”: “guid”,
“ClientId”: “guid”,
“ClientSecret”: “secret”
},
“Azure”: {
“SubscriptionId”: “<Your subscription id>”,
“AllowResourceGroupCreation”: true,
“ResourceGroup”: “<Valid resource group name>”,
“Location”: “<Valid Azure location>”
}
}
Microsoft Tech Community – Latest Blogs –Read More