This article provides an introduction to the concepts and resources provided by the Avaya Client SDK to support integration of video features into your application.
Video components are connected via VideoSources and VideoSinks.
There are three main components for handling video:
The first video component we need to create is the video capturer: CameraVideoSource. In most applications, a single instance is all that is needed. This object will capture video frames from the selected camera and output them to the selected VideoSink.
CameraVideoSource cameraSource = new CameraVideoSource();
The VideoChannel is responsible for transmitting the video the remote user. In order to send the video from our camera, we need to connect the CameraVideoSource and the VideoChannel.
// get the VideoSink associated with the VideoChannel
VideoSink sink = videoInterface.getLocalVideoSink(channelId);
// connect the CameraVideoSource and the VideoChannel
cameraSource.setVideoSink(sink);
Now any video frames produced by "cameraSource" will be sent to the VideoChannel and eventually to the remote user.
There are typically two video renderers we want to create. One for the remote video and one for a local preview of our own video.
// Create a renderer for the video received from the remote user
VideoRenderer2 remoteRenderer = new VideoRenderer2();
// This is a very important step. Without this, nothing will be rendered
// See Microsoft's documentation for details on CompositionTarget.Rendering
CompositionTarget.Rendering += remoteRenderer.onRendering;
// You can optionally be notified of a change to the rendered frame size
remoteRenderer.FrameSizeChanged += handleRemoteFrameSizeChanged;
// Create another renderer for local preview
VideoRenderer2 previewRenderer = new VideoRenderer2();
CompositionTarget.Rendering += previewRenderer.onRendering;
previewRenderer.FrameSizeChanged += handleLocalFrameSizeChanged;
// The remote renderer needs to be connected to the VideoChannel
VideoSource source = videoInterface.getRemoteVideoSource(channelId);
source.setVideoSink(remoteRenderer);
// The local renderer needs to be connected to the CameraVideoSource
// However, the CameraVideoSource already has a sink connected (VideoChannel)
// The CameraVideoSource provides an additional method to allow two sinks to
// be attached.
cameraVideoSource.setPreviewVideoSink(previewRenderer);
The CameraVideoSource instance provides a collection of cameras current installed on your computer. Typically this list of cameras would be presented to the user to allow the selection of the desired camera. This collection of cameras will automatically update as cameras are added or removed from the computer.
// Pick a camera from the collection
CameraDevice selectedCamera = cameraSource.Cameras.First();
// cameraSource.Cameras is of type ReadOnlyObservableCollection
// which means we can listen for changes to the collection
((INotifyCollectionChanged)cameraSource.Cameras).CollectionChanged +=
(object sender, NotifyCollectionChangedEventArgs e) =>
{
MessageBox.Show("Camera collection changed");
};
Now that we have selected a camera to use, we are ready to start capture. Cameras support multiple capture formats. These formats can vary in resolution and frame rate. When starting a capture session, constraints can be placed on these formats in order to capture at the desired quality.
// CameraVideoSource will use the best available capture format within these
// constraints max width, max height, min framerate, max framerate
VideoCaptureFormat format = new VideoCaptureFormat(1280, 720, 15, 30);
// starting capture is an asynchronous process.
// The third parameter is a delegate that is called once the Start operation
// has finished.
cameraSource.Start(selectedCamera, format, (CameraVideoSourceResult result) =>
{
if (result != CameraVideoSourceResult.Success)
{
MessageBox.Show("cameraSource.Start failed");
}
});
You may decide after capture has begun that you want to switch cameras. You can do this by simply calling "cameraSource.Start" again. There is no need to stop the current capture session before starting again with a new camera. Also note that there is no changes required relating to the video channel or video renderer. Those associations are made with the CameraVideoSource, not an individual camera.
Stopping the video capture is an easy process. You simply call "Stop".
cameraSource.Stop();
At this point, you may wish to disassociate the cameraVideoSource with the video channel and video renderer.
cameraSource.setVideoSink(null);
cameraSource.setPreviewVideoSink(null);
Once you are completely done with an object, it is important that you dispose of it properly.
// dispose of preview renderer
// disassociate with any video source
cameraSource.setPreviewVideoSink(null);
// remote all event handlers
previewRenderer.FrameSizeChanged -= handleLocalFrameSizeChanged;
CompositionTargeting.Rendering -= previewRenderer.onRendering;
// finally dispose the renderer itself
previewRenderer.Dispose();
previewRenderer = null;
// Dispose of remote renderer
// disassociate with any video source
videoInterface.getRemoteVideoSource(channelId).setVideoSink(null);
// remove all event handlers
remoteRenderer.FrameSizeChanged -= handleRemoteFrameSizeChanged;
CompositionTargeting.Rendering -= remoteRenderer.onRendering;
// finally dispose the renderer itself
remoteRenderer.Dispose();
remoteRenderer = null;
public class VideoExample
{
private CameraVideoSource cameraSource = null;
private CameraDevice selectedCamera = null;
private VideoRenderer2 remoteRenderer = null;
private VideoRenderer2 previewRenderer = null;
public VideoExample(VideoInterface videoInterface, int channelId)
: IDisposable
{
// create capture source
cameraSource = new CameraVideoSource();
// select a camera to use
selectedCamera = cameraSource.Cameras.First();
// listen for changes to the camera listen
((INotifyCollectionChanged)cameraSource.Cameras).CollectionChanged +=
(object sender, NotifyCollectionChangedEventArgs e) =>
{
MessageBox.Show("Camera collection changed");
};
// get VideoSink associated with the VideoChannel
VideoSink sink = videoInterface.getLocalVideoSink(channelId);
// associate CameraVideoSource and the VideoChannel
cameraSource.setVideoSink(sink);
// create a renderer for remote video
remoteRenderer = new VideoRenderer2();
remoteRenderer.FrameSizeChanged += handleFrameSizeChanged; // optional
CompositionTarget.Rendering += remoteRenderer.onRendering; // required
// get VideoSource associated with the Video Channel
//(encoded video received from the network)
VideoSource source = videoInterface.getRemoteVideoSource(channelId);
// associate the remote renderer with this video source
source.setVideoSink(remoteRenderer);
// create a renderer for local preview
previewRenderer = new VideoRenderer2();
previewRenderer.FrameSizeChanged += handleFrameSizeChanged; // optional
CompositionTarget.Rendering += previewRenderer.onRendering; // required
// associate the preview renderer with the video source
// Note that the CameraVideoSource provides a special method for this
cameraSource.setPreviewVideoSink(previewRenderer);
}
public void StartCapture()
{
// CameraVideoSource will use the best available capture format within
// these constraints max width, max height, min framerate, max framerate
VideoCaptureFormat format = new VideoCaptureFormat(1280, 720, 15, 30);
// starting capture is an asynchronous process.
cameraSource.Start(selectedCamera, format,
(CameraVideoSourceResult result) =>
{
if (result != CameraVideoSourceResult.Success)
{
MessageBox.Show("cameraSource.Start failed");
}
});
}
public void SwitchCamera(CameraDevice camera)
{
// It is unnecessary to stop the CameraVideoSource when switching
// cameras. Just start capture with a new CameraDevice
// Also note that no changes were required to the renderers or
// video channels
selectedCamera = camera;
StartCapture();
}
public void StopCapture()
{
cameraSource.Stop();
}
public void Dispose()
{
// clean up preview renderer
cameraSource.setPreviewVideoSink(null);
previewRenderer.FrameSizeChanged -= handleFrameSizeChanged;
CompositionTarget.Rendering -= previewRenderer.onRendering;
previewRenderer.Dispose();
previewRenderer = null;
// clean up remote renderer
cameraSource.setVideoSink(null);
remoteRenderer.FrameSizeChanged -= handleFrameSizeChanged;
CompositionTarget.Rendering -= remoteRenderer.onRendering;
remoteRenderer.Dispose();
remoteRenderer = null;
cameraSource.Dispose();
cameraSource = null;
}
private handleLocalFrameSizeChanged(object sender, FrameSizeChangedArgs e)
{
// respond to preview frame size change
}
private handleRemoteFrameSizeChanged(object sender, FrameSizeChangedArgs e)
{
// respond to remote frame size change
}
}