diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..40f8f2492 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,6 @@ +[submodule "webrtc-c/webrtc-and-directstream/amazon-kinesis-video-streams-producer-sdk-cpp"] + path = webrtc-c/webrtc-and-directstream/amazon-kinesis-video-streams-producer-sdk-cpp + url = https://github.com/awslabs/amazon-kinesis-video-streams-producer-sdk-cpp.git +[submodule "webrtc-c/webrtc-and-directstream/amazon-kinesis-video-streams-webrtc-sdk-c"] + path = webrtc-c/webrtc-and-directstream/amazon-kinesis-video-streams-webrtc-sdk-c + url = https://github.com/awslabs/amazon-kinesis-video-streams-webrtc-sdk-c.git diff --git a/webrtc-c/webrtc-and-directstream/.gitmodules b/webrtc-c/webrtc-and-directstream/.gitmodules new file mode 100644 index 000000000..cbafca7a9 --- /dev/null +++ b/webrtc-c/webrtc-and-directstream/.gitmodules @@ -0,0 +1,3 @@ +[submodule "amazon-kinesis-video-streams-producer-sdk-cpp"] + path = amazon-kinesis-video-streams-producer-sdk-cpp + url = https://github.com/awslabs/amazon-kinesis-video-streams-producer-sdk-cpp.git diff --git a/webrtc-c/webrtc-and-directstream/README.md b/webrtc-c/webrtc-and-directstream/README.md new file mode 100644 index 000000000..7f1e81f0a --- /dev/null +++ b/webrtc-c/webrtc-and-directstream/README.md @@ -0,0 +1,94 @@ +# KVS WebRTC and Direct Stream Simultaneously +## Overview + +Added a gstreamer app sample (`kvsWebRTCAndDirectStream.c`) to support streaming video to KVS with WebRTC SDK (for real-time use) and Stream Producer SDK (for video ingestion), simultaneously from 1 camera source. + +This sample is currently only tested on Raspberry Pi 3B equipped with USB Camera. + +## Prerequisites + +- AWS Account with configured: + - Kinesis Video Streams + - IAM role with appropriate permissions +- **Physical webcam required**: The application uses `autovideosrc` and will not fallback to `videotestsrc` if no camera is connected + +## Installation and Configuration + +1. Clone this repository to your Raspberry Pi with submodules: + ``` + git clone --recurse-submodules https://github.com/aws-samples/amazon-kinesis-video-streams-demos.git + ``` + + Or if already cloned, initialize submodules: + ``` + git submodule update --init --recursive + ``` + +2. Copy the sample file to the WebRTC SDK samples directory: + ``` + cp kvsWebRTCAndDirectStream.c amazon-kinesis-video-streams-webrtc-sdk-c/samples/ + ``` + +3. Configure your AWS credentials on the Raspberry Pi + +### Building the AWS SDKs + +Refer to each SDK main repository for the details of how to build. `-DALIGNED_MEMORY_MODEL=ON` flag might need to be used on Raspberry Pi 4B and 5B. + +- https://github.com/awslabs/amazon-kinesis-video-streams-webrtc-sdk-c +- https://github.com/awslabs/amazon-kinesis-video-streams-producer-sdk-cpp + +#### Building the KVS Producer SDK + +``` +cd amazon-kinesis-video-streams-producer-sdk-cpp +mkdir -p build +cd build +cmake .. -DBUILD_DEPENDENCIES=OFF +make -j +``` + +#### Building the KVS WebRTC SDK + +``` +cd amazon-kinesis-video-streams-webrtc-sdk-c +mkdir -p build +cd build +cmake .. -DBUILD_DEPENDENCIES=OFF +make -j +``` + +## Usage + +1. Configure your AWS credentials on the Raspberry Pi + +2. Configure GStreamer plugin path with the path where you build the KVS Streams Producer app, e.g., `export GST_PLUGIN_PATH=/home/pi/amazon-kinesis-video-streams-producer-sdk-cpp/build` + +3. Go to `amazon-kinesis-video-streams-webrtc-sdk-c/build/` and run `./samples/kvsWebRTCAndDirectStream ` + +## Screenshots + +The following screenshots demonstrate WebRTC and Stream Producer running simultaneously: + +### WebRTC Signaling Channel Viewer +![WebRTC Console View](screenshots/Screenshot-WebRTC.png) +*Real-time WebRTC streaming through the signaling channel* + +### KVS Stream Viewer +![KVS Stream Console View](screenshots/Screenshot-KVSStream.png) +*Near real-time video ingestion to Kinesis Video Stream* + +### Application Logs + +#### WebRTC Logs +![WebRTC Logs](screenshots/Screenshot-Log-WebRTC.png) +*WebRTC connection and streaming logs* + +#### KVS Stream Logs +![KVS Stream Logs 1](screenshots/Screenshot-Log-KVSStream-1.png) +![KVS Stream Logs 2](screenshots/Screenshot-Log-KVSStream-2.png) +*Kinesis Video Stream ingestion logs showing successful simultaneous operation* + +## License + +This project uses components from AWS Kinesis Video Streams SDKs which are licensed under the Apache License 2.0. \ No newline at end of file diff --git a/webrtc-c/webrtc-and-directstream/amazon-kinesis-video-streams-producer-sdk-cpp b/webrtc-c/webrtc-and-directstream/amazon-kinesis-video-streams-producer-sdk-cpp new file mode 160000 index 000000000..777159852 --- /dev/null +++ b/webrtc-c/webrtc-and-directstream/amazon-kinesis-video-streams-producer-sdk-cpp @@ -0,0 +1 @@ +Subproject commit 7771598527883545b2161b56a78160429def4fc1 diff --git a/webrtc-c/webrtc-and-directstream/amazon-kinesis-video-streams-webrtc-sdk-c b/webrtc-c/webrtc-and-directstream/amazon-kinesis-video-streams-webrtc-sdk-c new file mode 160000 index 000000000..cf817bc5d --- /dev/null +++ b/webrtc-c/webrtc-and-directstream/amazon-kinesis-video-streams-webrtc-sdk-c @@ -0,0 +1 @@ +Subproject commit cf817bc5d18f3e4bd499c6b0f9a68c6f4d7e01de diff --git a/webrtc-c/webrtc-and-directstream/kvsWebRTCAndDirectStream.c b/webrtc-c/webrtc-and-directstream/kvsWebRTCAndDirectStream.c new file mode 100644 index 000000000..6b8c388b4 --- /dev/null +++ b/webrtc-c/webrtc-and-directstream/kvsWebRTCAndDirectStream.c @@ -0,0 +1,402 @@ +#include +#include +#include +#include +#include +#include +#include +#include "Samples.h" + +extern PSampleConfiguration gSampleConfiguration; + +// Global variables +GstElement *pipeline = NULL; +GMainLoop *main_loop = NULL; +volatile ATOMIC_BOOL terminate = FALSE; + +// Signal handler for graceful termination +void signal_handler(int signum) { + printf("Caught signal %d, terminating...\n", signum); + ATOMIC_STORE_BOOL(&terminate, TRUE); + if (main_loop != NULL) { + g_main_loop_quit(main_loop); + } +} + +// Custom message handler to handle ICE candidates properly +VOID customMessageReceived(UINT64 customData, PReceivedSignalingMessage pReceivedSignalingMessage) { + // Forward all messages directly to the standard handler without filtering + // This ensures all ICE candidates are processed properly + signalingMessageReceived(customData, pReceivedSignalingMessage); +} + +// Bus watch callback +static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data) { + switch (GST_MESSAGE_TYPE(msg)) { + case GST_MESSAGE_ERROR: { + GError *err; + gchar *debug; + gst_message_parse_error(msg, &err, &debug); + g_print("Error: %s\n", err->message); + g_error_free(err); + g_free(debug); + g_main_loop_quit(main_loop); + break; + } + case GST_MESSAGE_EOS: + g_print("End of stream\n"); + g_main_loop_quit(main_loop); + break; + default: + break; + } + return TRUE; +} + +// WebRTC callback for new video samples +GstFlowReturn on_new_webrtc_sample(GstElement *sink, gpointer data) { + GstSample *sample; + GstBuffer *buffer; + GstMapInfo map; + Frame frame; + STATUS status; + PSampleConfiguration pSampleConfiguration = (PSampleConfiguration) data; + PSampleStreamingSession pSampleStreamingSession = NULL; + PRtcRtpTransceiver pRtcRtpTransceiver = NULL; + UINT32 i; + gboolean isDroppable, delta; + + // Pull the sample from the sink + sample = gst_app_sink_pull_sample(GST_APP_SINK(sink)); + if (sample == NULL) { + return GST_FLOW_ERROR; + } + + buffer = gst_sample_get_buffer(sample); + isDroppable = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_CORRUPTED) || + GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY) || + (GST_BUFFER_FLAGS(buffer) == GST_BUFFER_FLAG_DISCONT) || + (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT) && + GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT)) || + !GST_BUFFER_PTS_IS_VALID(buffer); + + if (!isDroppable) { + delta = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT); + frame.flags = delta ? FRAME_FLAG_NONE : FRAME_FLAG_KEY_FRAME; + + if (gst_buffer_map(buffer, &map, GST_MAP_READ)) { + frame.trackId = DEFAULT_VIDEO_TRACK_ID; + frame.duration = 0; + frame.version = FRAME_CURRENT_VERSION; + frame.size = (UINT32) map.size; + frame.frameData = (PBYTE) map.data; + + MUTEX_LOCK(pSampleConfiguration->streamingSessionListReadLock); + for (i = 0; i < pSampleConfiguration->streamingSessionCount; ++i) { + pSampleStreamingSession = pSampleConfiguration->sampleStreamingSessionList[i]; + frame.index = (UINT32) ATOMIC_INCREMENT(&pSampleStreamingSession->frameIndex); + + pRtcRtpTransceiver = pSampleStreamingSession->pVideoRtcRtpTransceiver; + frame.presentationTs = pSampleStreamingSession->videoTimestamp; + frame.decodingTs = frame.presentationTs; + pSampleStreamingSession->videoTimestamp += SAMPLE_VIDEO_FRAME_DURATION; + + status = writeFrame(pRtcRtpTransceiver, &frame); + if (status != STATUS_SRTP_NOT_READY_YET && status != STATUS_SUCCESS) { + printf("writeFrame failed with 0x%08x\n", status); + } else if (status == STATUS_SUCCESS && pSampleStreamingSession->firstFrame) { + pSampleStreamingSession->firstFrame = FALSE; + printf("First frame sent successfully\n"); + } + } + MUTEX_UNLOCK(pSampleConfiguration->streamingSessionListReadLock); + + gst_buffer_unmap(buffer, &map); + } + } + + gst_sample_unref(sample); + return GST_FLOW_OK; +} + +int main(int argc, char *argv[]) { + STATUS retStatus = STATUS_SUCCESS; + PSampleConfiguration pSampleConfiguration = NULL; + SignalingClientMetrics signalingClientMetrics; + PCHAR pChannelName; + PCHAR pStreamName; + GstElement *source, *capsfilter, *convert, *tee; + + // WebRTC branch elements + GstElement *webrtc_queue, *webrtc_convert, *webrtc_encoder, *webrtc_caps, *webrtc_appsink; + + // KVS branch elements + GstElement *kvs_queue, *kvs_convert, *kvs_encoder, *kvs_parse, *kvs_caps, *kvs_sink; + + GstBus *bus; + GstPad *tee_webrtc_pad = NULL, *tee_kvs_pad = NULL; + GstPad *webrtc_queue_pad, *kvs_queue_pad; + GstCaps *caps, *webrtc_h264_caps, *kvs_h264_caps; + + // Initialize GStreamer first + gst_init(&argc, &argv); + + // Check arguments + if (argc < 3) { + g_printerr("Usage: %s \n", argv[0]); + return 1; + } + + pChannelName = argv[1]; + pStreamName = argv[2]; + + // Set up signal handlers + signal(SIGINT, signal_handler); + signal(SIGTERM, signal_handler); + + // Create the main loop + main_loop = g_main_loop_new(NULL, FALSE); + + // Create WebRTC sample configuration - video only, no audio + retStatus = createSampleConfiguration(pChannelName, SIGNALING_CHANNEL_ROLE_TYPE_MASTER, TRUE, FALSE, TRUE, &pSampleConfiguration); + if (retStatus != STATUS_SUCCESS) { + goto CleanUp; + } + + // Enable media storage + pSampleConfiguration->channelInfo.useMediaStorage = TRUE; + + // Configure for better connectivity + pSampleConfiguration->trickleIce = TRUE; + pSampleConfiguration->useTurn = TRUE; + + // Set media type to video only + pSampleConfiguration->mediaType = SAMPLE_STREAMING_VIDEO_ONLY; + + // Set custom data for callbacks + pSampleConfiguration->customData = (UINT64) pSampleConfiguration; + + // Set KVS log configuration path + setenv("KVS_LOG_CONFIG", "../../../kvs_log_configuration", 1); + + // Initialize KVS WebRTC + retStatus = initKvsWebRtc(); + if (retStatus != STATUS_SUCCESS) { + printf("initKvsWebRtc(): operation returned status code: 0x%08x\n", retStatus); + goto CleanUp; + } + printf("KVS WebRTC initialization completed successfully\n"); + + // Set up signaling client with custom message handler + pSampleConfiguration->signalingClientCallbacks.messageReceivedFn = customMessageReceived; + pSampleConfiguration->signalingClientCallbacks.customData = (UINT64) pSampleConfiguration; + + // Generate a unique client ID with timestamp to avoid conflicts between multiple devices + char uniqueClientId[MAX_SIGNALING_CLIENT_ID_LEN]; + snprintf(uniqueClientId, MAX_SIGNALING_CLIENT_ID_LEN, "%s-%llu", SAMPLE_MASTER_CLIENT_ID, (unsigned long long)time(NULL)); + strcpy(pSampleConfiguration->clientInfo.clientId, uniqueClientId); + + printf("Using unique client ID: %s\n", uniqueClientId); + + retStatus = createSignalingClientSync(&pSampleConfiguration->clientInfo, &pSampleConfiguration->channelInfo, + &pSampleConfiguration->signalingClientCallbacks, pSampleConfiguration->pCredentialProvider, + &pSampleConfiguration->signalingClientHandle); + if (retStatus != STATUS_SUCCESS) { + printf("createSignalingClientSync(): operation returned status code: 0x%08x\n", retStatus); + goto CleanUp; + } + printf("Signaling client created successfully\n"); + + // Enable the processing of the messages + retStatus = signalingClientFetchSync(pSampleConfiguration->signalingClientHandle); + if (retStatus != STATUS_SUCCESS) { + printf("signalingClientFetchSync(): operation returned status code: 0x%08x\n", retStatus); + goto CleanUp; + } + + retStatus = signalingClientConnectSync(pSampleConfiguration->signalingClientHandle); + if (retStatus != STATUS_SUCCESS) { + printf("signalingClientConnectSync(): operation returned status code: 0x%08x\n", retStatus); + goto CleanUp; + } + printf("Signaling client connection to socket established\n"); + + // Join storage session if media storage is enabled + if (pSampleConfiguration->channelInfo.useMediaStorage == TRUE) { + printf("Invoking join storage session\n"); + retStatus = signalingClientJoinSessionSync(pSampleConfiguration->signalingClientHandle); + if (retStatus != STATUS_SUCCESS) { + goto CleanUp; + } + printf("Joined storage session successfully\n"); + } + + gSampleConfiguration = pSampleConfiguration; + + // Create GStreamer pipeline with tee for both WebRTC and KVS direct streaming + pipeline = gst_pipeline_new("dual-streaming-pipeline"); + + // Create the common elements + source = gst_element_factory_make("autovideosrc", "source"); + capsfilter = gst_element_factory_make("capsfilter", "capsfilter"); + convert = gst_element_factory_make("videoconvert", "convert"); + tee = gst_element_factory_make("tee", "tee"); + + // WebRTC branch elements + webrtc_queue = gst_element_factory_make("queue", "webrtc_queue"); + webrtc_convert = gst_element_factory_make("videoconvert", "webrtc_convert"); + webrtc_encoder = gst_element_factory_make("x264enc", "webrtc_encoder"); + webrtc_caps = gst_element_factory_make("capsfilter", "webrtc_caps"); + webrtc_appsink = gst_element_factory_make("appsink", "webrtc_appsink"); + + // KVS branch elements + kvs_queue = gst_element_factory_make("queue", "kvs_queue"); + kvs_convert = gst_element_factory_make("videoconvert", "kvs_convert"); + kvs_encoder = gst_element_factory_make("x264enc", "kvs_encoder"); + kvs_parse = gst_element_factory_make("h264parse", "kvs_parse"); + kvs_caps = gst_element_factory_make("capsfilter", "kvs_caps"); + kvs_sink = gst_element_factory_make("kvssink", "kvs_sink"); + + // Check if all elements were created successfully + if (!source || !capsfilter || !convert || !tee || + !webrtc_queue || !webrtc_convert || !webrtc_encoder || !webrtc_caps || !webrtc_appsink || + !kvs_queue || !kvs_convert || !kvs_encoder || !kvs_parse || !kvs_caps || !kvs_sink) { + g_printerr("Not all elements could be created. Exiting.\n"); + goto CleanUp; + } + + // Configure common elements + caps = gst_caps_from_string("video/x-raw,width=640,height=480,framerate=25/1"); + g_object_set(capsfilter, "caps", caps, NULL); + gst_caps_unref(caps); + + // Configure WebRTC branch - using settings from kvsWebrtcClientMasterGstSample.c + g_object_set(webrtc_encoder, + "bframes", 0, + "speed-preset", 1, // veryfast + "bitrate", 150, + "byte-stream", TRUE, + "tune", 0x04, // zerolatency + NULL); + + webrtc_h264_caps = gst_caps_from_string("video/x-h264,stream-format=byte-stream,alignment=au,profile=baseline"); + g_object_set(webrtc_caps, "caps", webrtc_h264_caps, NULL); + gst_caps_unref(webrtc_h264_caps); + + g_object_set(webrtc_appsink, "sync", TRUE, "emit-signals", TRUE, NULL); + g_signal_connect(webrtc_appsink, "new-sample", G_CALLBACK(on_new_webrtc_sample), pSampleConfiguration); + + // Configure KVS branch - using default settings that worked before + g_object_set(kvs_encoder, + "bitrate", 150, + "key-int-max", 45, + "tune", 0x00000004, // zerolatency + NULL); + + kvs_h264_caps = gst_caps_from_string("video/x-h264,profile=baseline"); + g_object_set(kvs_caps, "caps", kvs_h264_caps, NULL); + gst_caps_unref(kvs_h264_caps); + + // Configure KVS sink + g_object_set(kvs_sink, "stream-name", pStreamName, NULL); + + // Add all elements to the pipeline + gst_bin_add_many(GST_BIN(pipeline), source, capsfilter, convert, tee, NULL); + + // Add WebRTC branch elements + gst_bin_add_many(GST_BIN(pipeline), + webrtc_queue, webrtc_convert, webrtc_encoder, webrtc_caps, webrtc_appsink, + NULL); + + // Add KVS branch elements + gst_bin_add_many(GST_BIN(pipeline), + kvs_queue, kvs_convert, kvs_encoder, kvs_parse, kvs_caps, kvs_sink, + NULL); + + // Link common elements + if (!gst_element_link_many(source, capsfilter, convert, tee, NULL)) { + g_printerr("Common elements could not be linked. Exiting.\n"); + goto CleanUp; + } + + // Link WebRTC branch + if (!gst_element_link_many(webrtc_queue, webrtc_convert, webrtc_encoder, webrtc_caps, webrtc_appsink, NULL)) { + g_printerr("WebRTC branch elements could not be linked. Exiting.\n"); + goto CleanUp; + } + + tee_webrtc_pad = gst_element_get_request_pad(tee, "src_%u"); + webrtc_queue_pad = gst_element_get_static_pad(webrtc_queue, "sink"); + if (gst_pad_link(tee_webrtc_pad, webrtc_queue_pad) != GST_PAD_LINK_OK) { + g_printerr("WebRTC branch could not be linked to tee. Exiting.\n"); + goto CleanUp; + } + gst_object_unref(webrtc_queue_pad); + + // Link KVS branch + if (!gst_element_link_many(kvs_queue, kvs_convert, kvs_encoder, kvs_parse, kvs_caps, kvs_sink, NULL)) { + g_printerr("KVS branch elements could not be linked. Exiting.\n"); + goto CleanUp; + } + + tee_kvs_pad = gst_element_get_request_pad(tee, "src_%u"); + kvs_queue_pad = gst_element_get_static_pad(kvs_queue, "sink"); + if (gst_pad_link(tee_kvs_pad, kvs_queue_pad) != GST_PAD_LINK_OK) { + g_printerr("KVS branch could not be linked to tee. Exiting.\n"); + goto CleanUp; + } + gst_object_unref(kvs_queue_pad); + + // Set up bus watch + bus = gst_element_get_bus(pipeline); + gst_bus_add_watch(bus, bus_call, main_loop); + gst_object_unref(bus); + + // Start the pipeline + gst_element_set_state(pipeline, GST_STATE_PLAYING); + printf("Pipeline started\n"); + + // Start the main loop + g_main_loop_run(main_loop); + + // Clean up the pipeline + gst_element_set_state(pipeline, GST_STATE_NULL); + if (tee_webrtc_pad != NULL) { + gst_element_release_request_pad(tee, tee_webrtc_pad); + gst_object_unref(tee_webrtc_pad); + } + if (tee_kvs_pad != NULL) { + gst_element_release_request_pad(tee, tee_kvs_pad); + gst_object_unref(tee_kvs_pad); + } + +CleanUp: + if (pSampleConfiguration != NULL) { + // Get signaling client metrics + retStatus = signalingClientGetMetrics(pSampleConfiguration->signalingClientHandle, &signalingClientMetrics); + if (retStatus == STATUS_SUCCESS) { + printf("Signaling client metrics retrieved successfully\n"); + } + + // Cleanup resources + if (IS_VALID_SIGNALING_CLIENT_HANDLE(pSampleConfiguration->signalingClientHandle)) { + signalingClientDeleteSync(pSampleConfiguration->signalingClientHandle); + } + + freeSampleConfiguration(&pSampleConfiguration); + } + + printf("Cleanup done\n"); + + if (pipeline != NULL) { + gst_object_unref(GST_OBJECT(pipeline)); + } + + if (main_loop != NULL) { + g_main_loop_unref(main_loop); + } + + deinitKvsWebRtc(); + + return (retStatus == STATUS_SUCCESS) ? EXIT_SUCCESS : EXIT_FAILURE; +} \ No newline at end of file diff --git a/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-KVSStream.png b/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-KVSStream.png new file mode 100644 index 000000000..50211b93c Binary files /dev/null and b/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-KVSStream.png differ diff --git a/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-Log-KVSStream-1.png b/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-Log-KVSStream-1.png new file mode 100644 index 000000000..84a69a8f1 Binary files /dev/null and b/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-Log-KVSStream-1.png differ diff --git a/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-Log-KVSStream-2.png b/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-Log-KVSStream-2.png new file mode 100644 index 000000000..1ea306575 Binary files /dev/null and b/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-Log-KVSStream-2.png differ diff --git a/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-Log-WebRTC.png b/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-Log-WebRTC.png new file mode 100644 index 000000000..fc2a5e721 Binary files /dev/null and b/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-Log-WebRTC.png differ diff --git a/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-WebRTC.png b/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-WebRTC.png new file mode 100644 index 000000000..fa2568206 Binary files /dev/null and b/webrtc-c/webrtc-and-directstream/screenshots/Screenshot-WebRTC.png differ