-
Notifications
You must be signed in to change notification settings - Fork 1.9k
Expand file tree
/
Copy pathProgram.cs
More file actions
85 lines (69 loc) · 2.83 KB
/
Program.cs
File metadata and controls
85 lines (69 loc) · 2.83 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
using Microsoft.Extensions.Configuration;
using OpenCvSharp;
using Azure.AI.OpenAI;
using System.ClientModel;
using Microsoft.Extensions.AI;
using System;
// define video file and data folder
string videoFile = VideosHelper.GetVideoFilePathFireTruck();
string dataFolderPath = VideosHelper.CreateDataFolder();
//////////////////////////////////////////////////////
/// VIDEO ANALYSIS using OpenCV
//////////////////////////////////////////////////////
// Extract the frames from the video
var video = new VideoCapture(videoFile);
var frames = new List<Mat>();
while (video.IsOpened())
{
var frame = new Mat();
if (!video.Read(frame) || frame.Empty())
break;
// resize the frame to half of its size if the with is greater than 800
if (frame.Width > 800)
{
Cv2.Resize(frame, frame, new OpenCvSharp.Size(frame.Width / 2, frame.Height / 2));
}
frames.Add(frame);
}
video.Release();
var config = new ConfigurationBuilder().AddUserSecrets<Program>().Build();
var endpoint = config["AZURE_OPENAI_ENDPOINT"];
var modelId = config["AzureOpenAI:Deployment"];
// create client using API Keys
var apiKey = config["AZURE_OPENAI_APIKEY"];
var credential = new ApiKeyCredential(apiKey);
IChatClient chatClient =
new AzureOpenAIClient(new Uri(endpoint),credential)
.GetChatClient(modelId)
.AsIChatClient();
List<ChatMessage> messages =
[
new ChatMessage(ChatRole.System, PromptsHelper.SystemPrompt),
new ChatMessage(ChatRole.User, PromptsHelper.UserPromptDescribeVideo),
];
// create the OpenAI files that represent the video frames
int step = (int)Math.Ceiling((double)frames.Count / PromptsHelper.NumberOfFrames);
// show in the console the total number of frames and the step that neeeds to be taken to get the desired number of frames for the video analysis
Console.WriteLine($"Video total number of frames: {frames.Count}");
Console.WriteLine($"Get 1 frame every [{step}] to get the [{PromptsHelper.NumberOfFrames}] frames for analysis");
for (int i = 0; i < frames.Count; i += step)
{
// save the frame to the "data/frames" folder
string framePath = Path.Combine(dataFolderPath, "frames", $"{i}.jpg");
Cv2.ImWrite(framePath, frames[i]);
// read the image bytes, create a new image content part and add it to the messages
AIContent aic = new DataContent(File.ReadAllBytes(framePath), "image/jpeg");
var message = new ChatMessage(ChatRole.User, [aic]);
messages.Add(message);
}
// send the messages to the chat client
var completionUpdates = chatClient.GetStreamingResponseAsync(messages);
// print the assistant responses
Console.WriteLine($"\n[Azure OpenAI Services response using Microsoft Extensions for AI]: ");
await foreach (var completionUpdate in completionUpdates)
{
if (completionUpdate.Contents.Count > 0)
{
Console.Write(completionUpdate.Contents[0]);
}
}