Commit 641b1d97 by Christoph

V1.985 + major videoinput update & optimization

* videoinput updated with optimization & scaling support & addcanvas support for c# * support copy video images to webgl textures * added a webgl rendering example based on mozillas webgl samples * V1.985 * updated dependencies
parent 2fba5d08
This source diff could not be displayed because it is too large. You can view the blob instead.
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>WebGL Demo</title>
<link rel="stylesheet" href="../webgl.css" type="text/css">
</head>
<body>
<canvas id="glcanvas" width="640" height="480"></canvas>
</body>
<script src="../bundle/awrtc.js"></script>
<script src="gl-matrix.js"></script>
<script src="webgl-demo_changed.js"></script>
<script>
let canvas = document.querySelector("#glcanvas");
let nconfig = new awrtc.NetworkConfig();
let call = new awrtc.BrowserWebRtcCall(nconfig);
call.addEventListener((sender, args) => {
if(args.Type === awrtc.CallEventType.FrameUpdate)
{
let gl = canvas.getContext("webgl");
if(args.Frame.Width != globalTextureWidth || args.Frame.Height != globalTextureHeight)
{
const pixel = new Uint8Array(args.Frame.Width * args.Frame.Height * 3 );
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, args.Frame.Width, args.Frame.Height, 0, gl.RGB, gl.UNSIGNED_BYTE, pixel);
globalTextureWidth = args.Frame.Width ;
globalTextureHeight = args.Frame.Height ;
}
args.Frame.ToTexture(gl, globalTextureId);
}
});
//As the system is designed for realtime graphics we have to call the Update method. Events are only
//triggered during this Update call!
let intervalId = setInterval(() => {
call.Update();
}, 50);
let config = new awrtc.MediaConfig();
config.Audio = false;
config.Video = true;
config.FrameUpdates = true;
config.IdealWidth = 640;
config.IdealHeight = 480;
config.IdealFps = 30;
console.log("requested config:" + JSON.stringify(config));
call.Configure(config);
</script>
</html>
\ No newline at end of file
Slightly changed WebGL example from mozzila to test the frame copy from WebRTC -> VideoElement -> WebGL Texture
Source:
https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/Tutorial/Using_textures_in_WebGL
https://github.com/mdn/webgl-examples/tree/gh-pages/tutorial/sample6
\ No newline at end of file
// Karma configuration
// Generated on Mon Jun 24 2019 19:59:32 GMT+1200 (New Zealand Standard Time)
module.exports = function(config) {
config.set({
// base path that will be used to resolve all patterns (eg. files, exclude)
basePath: '',
// frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ['jasmine'],
// list of files / patterns to load in the browser
files: [
'build/bundle/*.js'
],
// list of files / patterns to exclude
exclude: [
],
// preprocess matching files before serving them to the browser
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors: {
},
// test results reporter to use
// possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ['progress'],
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: true,
// start these browsers
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
browsers: ['FirefoxCustom'],
customLaunchers: {
FirefoxCustom: {
base: 'Firefox',
prefs: {
'media.navigator.permission.disabled': true,
'media.navigator.streams.fake' : true
}
}
},
// Continuous Integration mode
// if true, Karma captures browsers, runs the tests and exits
singleRun: false,
// Concurrency level
// how many browser should be started simultaneous
concurrency: Infinity
})
}
{
"name": "awrtc_browser",
"version": "0.984.4",
"version": "1.985.0",
"description": "",
"author": "because-why-not.com Limited",
"license": "BSD-3-Clause",
"dependencies": {},
"main": "build/awrtc/index.js",
"main": "build/bundle/awrtc.js",
"types": "build/awrtc/index.d.ts",
"scripts": {
"tsc": "tsc",
......@@ -14,11 +14,19 @@
"watch": "webpack --watch",
"clean": "shx rm -rf ./build/awrtc ./build/bundle"
},
"files": [
"build",
"src"
],
"repository": {
"type": "git",
"url": "https://github.com/because-why-not/awrtc_browser"
},
"devDependencies": {
"@types/jasmine": "^2.8.16",
"jasmine": "^2.99.0",
"jasmine-core": "^3.5.0",
"karma": "^4.4.1",
"karma": "^5.0.5",
"karma-chrome-launcher": "^2.2.0",
"karma-firefox-launcher": "^1.3.0",
"karma-jasmine": "^2.0.1",
......@@ -26,9 +34,9 @@
"source-map-loader": "^0.2.4",
"ts-loader": "^5.4.5",
"tsconfig-paths-webpack-plugin": "^3.2.0",
"typescript": "^3.8.2",
"typescript": "^3.8.3",
"uglify-js": "^2.8.29",
"webpack": "^4.41.6",
"webpack": "^4.43.0",
"webpack-cli": "^3.3.11",
"webrtc-adapter": "^6.4.8"
}
......
......@@ -256,7 +256,6 @@ class MinimalCall
//other.
export function BrowserWebRtcCall_minimal() {
awrtc.BrowserMediaStream.sUseLazyFrames = true;
let netConfig = new awrtc.NetworkConfig();
netConfig.IsConference = false;
netConfig.SignalingUrl = DefaultValues.Signaling;
......
......@@ -29,7 +29,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import * as awrtc from "../awrtc/index"
import {DefaultValues, GetRandomKey} from "./apphelpers"
import { DeviceApi, DeviceInfo } from "../awrtc/index";
import { DeviceApi, DeviceInfo, BrowserMediaStream } from "../awrtc/index";
//This file only contains badly maintained
//test apps. Use only experimentation.
......@@ -387,26 +387,34 @@ class FpsCounter
lastRefresh = 0;
fps = 0;
counter = 0;
isNew = false;
public get Fps()
{
return Math.round(this.fps);
}
public get Counter()
public get IsNew() : boolean
{
return this.counter;
if(this.isNew){
this.isNew = false;
return true;
}
return false;
}
Update():void
{
this.counter++;
let diff = new Date().getTime() - this.lastRefresh;
if(diff > 1000)
let refresh_time = 2000;
if(diff > refresh_time)
{
this.fps = this.counter / (diff / 1000);
this.counter = 0;
this.lastRefresh = new Date().getTime();
this.isNew = true;
}
}
}
......@@ -415,7 +423,7 @@ class FpsCounter
//and accesses the resulting frame data directly
export function BrowserMediaNetwork_frameaccess() {
//BrowserMediaStream.DEFAULT_FRAMERATE = 60;
//awrtc.BrowserMediaStream.DEBUG_SHOW_ELEMENTS = true;
let address = GetRandomKey();
......@@ -427,8 +435,15 @@ export function BrowserMediaNetwork_frameaccess() {
let network2 = new awrtc.BrowserMediaNetwork(networkConfig);
let mediaConfig1 = new awrtc.MediaConfig();
mediaConfig1.Audio = true;
mediaConfig1.Audio = false;
mediaConfig1.Video = true;
/*
mediaConfig1.IdealWidth = 320;
mediaConfig1.IdealHeight = 240;
//fps seems to be ignored by browsers even if
//the camera specifically supports that setting
mediaConfig1.IdealFps = 15;
*/
let mediaConfig2 = new awrtc.MediaConfig();
mediaConfig2.Audio = false;
mediaConfig2.Video = false;
......@@ -436,6 +451,7 @@ export function BrowserMediaNetwork_frameaccess() {
let localFps = new FpsCounter();
let remoteFps = new FpsCounter();
let loopRate = new FpsCounter();
......@@ -466,15 +482,17 @@ export function BrowserMediaNetwork_frameaccess() {
setInterval(() => {
network1.Update();
loopRate.Update();
if(loopRate.IsNew)
console.log("Loop rate: " + loopRate.Fps);
let frame1: awrtc.IFrameData = null;
let frame2: awrtc.IFrameData = null;
frame1 = network1.TryGetFrame(awrtc.ConnectionId.INVALID);
if (frame1 != null)
{
localFps.Update();
if(localFps.Counter % 30 == 0)
if(localFps.IsNew)
console.log("local1 width" + frame1.Width + " height:" + frame1.Height + "fps: " + localFps.Fps + " data:" + frame1.Buffer[0]);
}
......@@ -515,7 +533,7 @@ export function BrowserMediaNetwork_frameaccess() {
if (frame2 != null)
{
remoteFps.Update();
if(remoteFps.Counter % 30 == 0)
if(remoteFps.IsNew)
console.log("remote2 width" + frame2.Width + " height:" + frame2.Height + "fps: " + remoteFps.Fps + " data:" + frame2.Buffer[0]);
}
}
......
......@@ -59,6 +59,15 @@ export class IFrameData {
}
public constructor() { }
public ToTexture(gl: WebGL2RenderingContext, texture: WebGLTexture) : boolean{
return false;
}
/*
public ToTexture2(gl: WebGL2RenderingContext) : WebGLTexture{
return null;
}
*/
}
//Container for the raw bytes of the current frame + height and width.
......@@ -96,6 +105,10 @@ export class RawFrame extends IFrameData{
* only create a lazy frame which will delay the creation of the RawFrame until the user actually tries
* to access any data.
* Thus if the game slows down or the user doesn't access any data the expensive copy is avoided.
*
* This comes with the downside of risking a change in Width / Height at the moment. In theory the video could
* change the resolution causing the values of Width / Height to change over time before Buffer is accessed to create
* a copy that will be save to use. This should be ok as long as the frame is used at the time it is received.
*/
export class LazyFrame extends IFrameData{
......@@ -113,20 +126,42 @@ export class LazyFrame extends IFrameData{
return this.mRawFrame.Buffer;
}
/**Returns the expected width of the frame.
* Watch out this might change inbetween frames!
*
*/
public get Width(): number {
if (this.mRawFrame == null)
{
return this.mFrameGenerator.VideoElement.videoWidth;
}else{
return this.mRawFrame.Width;
}
/*
this.GenerateFrame();
if (this.mRawFrame == null)
return -1;
return this.mRawFrame.Width;
*/
}
/**Returns the expected height of the frame.
* Watch out this might change inbetween frames!
*
*/
public get Height(): number {
if (this.mRawFrame == null)
{
return this.mFrameGenerator.VideoElement.videoHeight;
}else{
return this.mRawFrame.Height;
}
/*
this.GenerateFrame();
if (this.mRawFrame == null)
return -1;
return this.mRawFrame.Height;
*/
}
......@@ -135,6 +170,37 @@ export class LazyFrame extends IFrameData{
this.mFrameGenerator = frameGenerator;
}
/**Intendet for use via the Unity plugin.
* Will copy the image directly into a texture to avoid overhead of a CPU side copy.
*
* The given texture should have the correct size before calling this method.
*
* @param gl
* @param texture
*/
public ToTexture(gl: WebGL2RenderingContext, texture: WebGLTexture) : boolean{
gl.bindTexture(gl.TEXTURE_2D, texture);
/*
const level = 0;
const internalFormat = gl.RGBA;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, srcFormat, srcType, this.mFrameGenerator.VideoElement);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
*/
gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, gl.RGB, gl.UNSIGNED_BYTE, this.mFrameGenerator.VideoElement);
return true;
}
/*
public ToTexture2(gl: WebGL2RenderingContext) : WebGLTexture{
let tex = gl.createTexture()
this.ToTexture(gl, tex)
return;
}
*/
//Called before access of any frame data triggering the creation of the raw frame data
private GenerateFrame() {
......
......@@ -100,7 +100,6 @@ export class BrowserMediaNetwork extends WebRtcNetwork implements IMediaNetwork
let promise : Promise<MediaStream> = null;
promise = Media.SharedInstance.getUserMedia(config);
promise.then((stream) => { //user gave permission
//totally unrelated -> user gave access to devices. use this
......
......@@ -352,9 +352,14 @@ export class DeviceApi
{
deviceId = DeviceApi.GetDeviceId(config.VideoDeviceName);
SLog.L("using device " + config.VideoDeviceName);
if(deviceId !== null)
if(deviceId === "")
{
//SLog.L("using device id " + deviceId);
//Workaround for Chrome 81: If no camera access is allowed chrome returns the deviceId ""
//thus we can only request any video device. We can't select a specific one
deviceId = null;
}else if(deviceId !== null)
{
//all good
}
else{
SLog.LE("Failed to find deviceId for label " + config.VideoDeviceName);
......
......@@ -34,26 +34,35 @@ export class Media{
return real_devices.concat(virtual_devices);
}
public static IsNameSet(videoDeviceName: string) : boolean{
if(videoDeviceName !== null && videoDeviceName !== "" )
{
return true;
}
return false;
}
public getUserMedia(config: MediaConfig): Promise<MediaStream>{
if(config.VideoDeviceName !== null
&& config.VideoDeviceName !== ""
if(config.Video && Media.IsNameSet(config.VideoDeviceName)
&& this.videoInput != null
&& this.videoInput.HasDevice(config.VideoDeviceName))
{
return new Promise<MediaStream>((resolve, reject) => {
try{
const res :MediaStream = this.videoInput.GetStream(config.VideoDeviceName);
resolve(res)
}catch(err)
let res = Promise.resolve().then(async ()=>{
let stream = this.videoInput.GetStream(config.VideoDeviceName);
if(config.Audio)
{
reject(err);
let constraints = {} as MediaStreamConstraints
constraints.audio = true;
let audio_stream = await DeviceApi.getBrowserUserMedia(constraints);
stream.addTrack(audio_stream.getTracks()[0])
}
});
return stream;
})
return res;
}
return DeviceApi.getAssetUserMedia(config);
......
......@@ -32,7 +32,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import {SLog, WebRtcNetwork, SignalingConfig, NetworkEvent, ConnectionId, LocalNetwork, WebsocketNetwork} from "../network/index"
import { MediaConfigurationState, NetworkConfig, MediaConfig } from "../media/index";
import { BrowserMediaStream, BrowserMediaNetwork, DeviceApi, BrowserWebRtcCall, Media } from "../media_browser/index";
import { BrowserMediaStream, BrowserMediaNetwork, DeviceApi, BrowserWebRtcCall, Media, VideoInputType } from "../media_browser/index";
var CAPI_InitMode = {
......@@ -425,9 +425,6 @@ export function CAPI_MediaNetwork_TryGetFrame(lIndex: number, lConnectionId: num
if (frame == null || frame.Buffer == null) {
return false;
} else {
//TODO: copy frame over
lWidthInt32Array[lWidthIntArrayIndex] = frame.Width;
lHeightInt32Array[lHeightIntArrayIndex] = frame.Height;
......@@ -438,6 +435,61 @@ export function CAPI_MediaNetwork_TryGetFrame(lIndex: number, lConnectionId: num
}
}
export function CAPI_MediaNetwork_TryGetFrame_ToTexture(lIndex: number, lConnectionId: number,
lWidth: number,
lHeight: number,
gl:WebGL2RenderingContext, texture:WebGLTexture): boolean
{
//console.log("CAPI_MediaNetwork_TryGetFrame_ToTexture");
let mediaNetwork = gCAPI_WebRtcNetwork_Instances[lIndex] as BrowserMediaNetwork;
let frame = mediaNetwork.TryGetFrame(new ConnectionId(lConnectionId));
if (frame == null ) {
return false;
} else if (frame.Width != lWidth || frame.Height != lHeight) {
SLog.LW("CAPI_MediaNetwork_TryGetFrame_ToTexture failed. Width height expected: " + frame.Width + "x" + frame.Height + " but received " + lWidth + "x" + lHeight);
return false;
}else {
frame.ToTexture(gl, texture);
return true;
}
}
/*
export function CAPI_MediaNetwork_TryGetFrame_ToTexture2(lIndex: number, lConnectionId: number,
lWidthInt32Array: Int32Array, lWidthIntArrayIndex: number,
lHeightInt32Array: Int32Array, lHeightIntArrayIndex: number,
gl:WebGL2RenderingContext): WebGLTexture
{
//console.log("CAPI_MediaNetwork_TryGetFrame_ToTexture");
let mediaNetwork = gCAPI_WebRtcNetwork_Instances[lIndex] as BrowserMediaNetwork;
let frame = mediaNetwork.TryGetFrame(new ConnectionId(lConnectionId));
if (frame == null) {
return false;
} else {
lWidthInt32Array[lWidthIntArrayIndex] = frame.Width;
lHeightInt32Array[lHeightIntArrayIndex] = frame.Height;
let texture = frame.ToTexture2(gl);
return texture;
}
}
*/
export function CAPI_MediaNetwork_TryGetFrame_Resolution(lIndex: number, lConnectionId: number,
lWidthInt32Array: Int32Array, lWidthIntArrayIndex: number,
lHeightInt32Array: Int32Array, lHeightIntArrayIndex: number): boolean
{
let mediaNetwork = gCAPI_WebRtcNetwork_Instances[lIndex] as BrowserMediaNetwork;
let frame = mediaNetwork.PeekFrame(new ConnectionId(lConnectionId));
if (frame == null) {
return false;
} else {
lWidthInt32Array[lWidthIntArrayIndex] = frame.Width;
lHeightInt32Array[lHeightIntArrayIndex] = frame.Height;
return true;
}
}
//Returns the frame buffer size or -1 if no frame is available
export function CAPI_MediaNetwork_TryGetFrameDataLength(lIndex: number, connectionId: number) : number {
let mediaNetwork = gCAPI_WebRtcNetwork_Instances[lIndex] as BrowserMediaNetwork;
......@@ -496,29 +548,11 @@ export function CAPI_DeviceApi_LastUpdate():number
{
return DeviceApi.LastUpdate;
}
/*
export function CAPI_DeviceApi_Devices_Length():number{
return Object.keys(DeviceApi.Devices).length;
}
export function CAPI_DeviceApi_Devices_Get(index:number):string{
let keys = Object.keys(DeviceApi.Devices);
if(keys.length > index)
{
let key = keys[index];
return DeviceApi.Devices[key].label;
}
else
{
SLog.LE("Requested device with index " + index + " does not exist.");
return "";
}
}
*/
export function CAPI_DeviceApi_Devices_Length():number{
export function CAPI_Media_GetVideoDevices_Length():number{
return Media.SharedInstance.GetVideoDevices().length;
}
export function CAPI_DeviceApi_Devices_Get(index:number):string{
export function CAPI_Media_GetVideoDevices(index:number):string{
const devs = Media.SharedInstance.GetVideoDevices();
if(devs.length > index)
{
......@@ -531,3 +565,47 @@ export function CAPI_DeviceApi_Devices_Get(index:number):string{
return "";
}
}
export function CAPI_VideoInput_AddCanvasDevice(query:string, name:string, width: number, height: number, fps: number): boolean{
let canvas = document.querySelector(query) as HTMLCanvasElement;
if(canvas){
console.debug("CAPI_VideoInput_AddCanvasDevice", {query, name, width, height, fps});
if(width <= 0 || height <= 0){
width = canvas.width;
height = canvas.height;
}
Media.SharedInstance.VideoInput.AddCanvasDevice(canvas as HTMLCanvasElement, name, width, height, fps);//, width, height, fps);
return true;
}
return false;
}
export function CAPI_VideoInput_AddDevice(name:string, width: number, height: number, fps: number){
Media.SharedInstance.VideoInput.AddDevice(name, width, height, fps);
}
export function CAPI_VideoInput_RemoveDevice(name:string){
Media.SharedInstance.VideoInput.RemoveDevice(name);
}
export function CAPI_VideoInput_UpdateFrame(name:string,
lBufferUint8Array: Uint8Array, lBufferUint8ArrayOffset: number, lBufferUint8ArrayLength: number,
width: number, height: number,
rotation: number, firstRowIsBottom: boolean) : boolean
{
let dataPtrClamped : Uint8ClampedArray = null;
if(lBufferUint8Array && lBufferUint8ArrayLength > 0){
dataPtrClamped = new Uint8ClampedArray(lBufferUint8Array.buffer, lBufferUint8ArrayOffset, lBufferUint8ArrayLength);
}
return Media.SharedInstance.VideoInput.UpdateFrame(name, dataPtrClamped, width, height, VideoInputType.ARGB, rotation, firstRowIsBottom);
}
//TODO: This needs a proper implementation
//so far only works if unity is the only canvas and uses webgl2
export function GetUnityCanvas() : HTMLCanvasElement
{
return document.querySelector("canvas");
}
export function GetUnityContext() : WebGL2RenderingContext
{
return GetUnityCanvas().getContext("webgl2");
}
......@@ -28,19 +28,6 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import { Media } from "../media_browser/Media";
import { GetUnityCanvas } from "./CAPI";
export * from "./CAPI"
//add the canvas to video input for testing.
//done via timeout to avoid returning possible errors to unity loading routine
setTimeout(()=>{
console.debug("trying to add canvas to video input");
const canvas = document.querySelector("canvas");
if(canvas)
{
Media.SharedInstance.VideoInput.AddCanvasDevice("canvas", canvas);
console.debug("Canvas added. Make sure to turn off unity local video if streaming from a canvas. Copying images from the canvas to Unity will heavily slow down the app!");
}else{
console.error("Adding canvas failed. No canvas found");
}
}, 10);
\ No newline at end of file
......@@ -29,8 +29,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
//current setup needs to load everything as a module
import {DeviceApi, CAPI_DeviceApi_Update,
CAPI_DeviceApi_RequestUpdate, CAPI_DeviceApi_Devices_Length,
CAPI_DeviceApi_Devices_Get,
CAPI_DeviceApi_RequestUpdate, CAPI_Media_GetVideoDevices_Length,
CAPI_Media_GetVideoDevices,
MediaConfig,
Media} from "../awrtc/index"
......@@ -134,11 +134,11 @@ describe("DeviceApiTest", () => {
let update2complete = false;
let deviceCount = 0;
const devices_length_unitialized = CAPI_DeviceApi_Devices_Length();
const devices_length_unitialized = CAPI_Media_GetVideoDevices_Length();
expect(devices_length_unitialized).toBe(0);
DeviceApi.AddOnChangedHandler(()=>{
let dev_length = CAPI_DeviceApi_Devices_Length();
let dev_length = CAPI_Media_GetVideoDevices_Length();
expect(dev_length).not.toBe(0);
expect(dev_length).toBe(Object.keys(DeviceApi.Devices).length);
......@@ -147,7 +147,7 @@ describe("DeviceApiTest", () => {
for(let k of keys)
{
let expectedVal = DeviceApi.Devices[k].label;
let actual = CAPI_DeviceApi_Devices_Get(counter);
let actual = CAPI_Media_GetVideoDevices(counter);
expect(actual).toBe(expectedVal);
counter++;
......@@ -237,7 +237,7 @@ describe("DeviceApiTest", () => {
expect(DeviceApi.GetVideoDevices().length).toBe(0);
await DeviceApi.UpdateAsync();
expect(DeviceApi.GetVideoDevices().length).toBeGreaterThan(0);
expect(DeviceApi.GetVideoDevices().length).toBe(CAPI_DeviceApi_Devices_Length());
expect(DeviceApi.GetVideoDevices().length).toBe(CAPI_Media_GetVideoDevices_Length());
done();
});
......
import { VideoInput, Media, DeviceApi, MediaConfig, CAPI_DeviceApi_Devices_Length, CAPI_DeviceApi_Devices_Get } from "../awrtc/index";
import { VideoInput, Media, DeviceApi, MediaConfig, CAPI_Media_GetVideoDevices_Length, CAPI_Media_GetVideoDevices, BrowserMediaStream, WaitForIncomingCallEventArgs } from "../awrtc/index";
import { MakeTestCanvas } from "VideoInputTest";
export function MediaTest_export()
......@@ -69,7 +69,7 @@ describe("MediaTest", () => {
config.Video = true;
const canvas = MakeTestCanvas();
media.VideoInput.AddCanvasDevice(name, canvas);
media.VideoInput.AddCanvasDevice(canvas, name, canvas.width, canvas.height, 30);
const streamCamera = await media.getUserMedia(config);
expect(streamCamera).not.toBeNull();
......@@ -91,19 +91,258 @@ describe("MediaTest", () => {
it("GetUserMedia_videoinput_and_audio", async () => {
const name = "test_canvas";
const media = new Media();
const config = new MediaConfig();
config.Audio = true;
config.Video = true;
const canvas = MakeTestCanvas();
media.VideoInput.AddCanvasDevice(canvas, name, canvas.width, canvas.height, 30);
config.VideoDeviceName = name;
let stream : MediaStream = null;
try{
stream = await media.getUserMedia(config);
}catch(err){
console.error(err);
fail(err);
}
expect(stream).not.toBeNull();
expect(stream.getAudioTracks().length).toBe(1);
expect(stream.getVideoTracks().length).toBe(1);
config.VideoDeviceName = "invalid name";
stream = null;
let error_result : string = null
try{
stream = await media.getUserMedia(config);
}catch(err){
error_result = err;
}
expect(error_result).not.toBeNull();
expect(stream).toBeNull();
}, 15000);
//CAPI needs to be changed to use Media only instead the device API
it("MediaCapiVideoInput", async (done) => {
//empty normal device api
DeviceApi.Reset();
expect(CAPI_DeviceApi_Devices_Length()).toBe(0);
expect(CAPI_Media_GetVideoDevices_Length()).toBe(0);
const name = "test_canvas";
const canvas = MakeTestCanvas();
Media.SharedInstance.VideoInput.AddCanvasDevice(name, canvas);
expect(CAPI_DeviceApi_Devices_Length()).toBe(1);
expect(CAPI_DeviceApi_Devices_Get(0)).toBe(name);
Media.SharedInstance.VideoInput.AddCanvasDevice(canvas, name, canvas.width, canvas.height, 30);
expect(CAPI_Media_GetVideoDevices_Length()).toBe(1);
expect(CAPI_Media_GetVideoDevices(0)).toBe(name);
done();
});
});
describe("MediaStreamTest", () => {
beforeEach((done)=>{
let handler = ()=>{
DeviceApi.RemOnChangedHandler(handler);
done();
};
DeviceApi.AddOnChangedHandler(handler);
DeviceApi.Update();
Media.ResetSharedInstance();
});
class TestStreamContainer
{
public canvas: HTMLCanvasElement;
public stream : MediaStream;
public constructor()
{
let canvas = document.createElement("canvas");
canvas.width = 4;
canvas.height = 4;
let ctx = canvas.getContext("2d");
//make blue for debugging purposes
ctx.fillStyle = "blue";
ctx.fillRect(0, 0, canvas.width, canvas.height);
this.canvas = canvas;
this.stream = (canvas as any).captureStream() as MediaStream;
}
public MakeFrame(color : string){
let ctx = this.canvas.getContext("2d");
ctx.clearRect(0, 0, this.canvas.width, this.canvas.height)
//make blue for debugging purposes
ctx.fillStyle = color;
ctx.fillRect(0, 0, this.canvas.width, this.canvas.height);
}
}
function MakeTestStreamContainer()
{
return new TestStreamContainer();
}
//TODO: need proper way to wait and check with async/ await
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function WaitFor(){
}
it("buffer_and_trygetframe", async(done) => {
const testcontainer = MakeTestStreamContainer();
const stream = new BrowserMediaStream(testcontainer.stream);
//frames are not available at the start until fully loaded
let frame = stream.TryGetFrame();
expect(frame).toBeNull();
await sleep(100);
stream.Update();
//waited for the internals to get initialized. We should have a frame now
frame = stream.TryGetFrame();
expect(frame).not.toBeNull();;
//and a buffer
let buffer = frame.Buffer;
expect(buffer).not.toBeNull();;
//expected to be blue
let r = buffer[0];
let g = buffer[1];
let b = buffer[2];
let a = buffer[3];
expect(r).toBe(0);
expect(g).toBe(0);
expect(b).toBe(255);
expect(a).toBe(255);
//we removed the frame now. this should be null
frame = stream.TryGetFrame();
expect(frame).toBeNull();
//make a new frame with different color
testcontainer.MakeFrame("#FFFF00");
await sleep(100);
stream.Update();
//get new frame
frame = stream.TryGetFrame();
expect(frame).not.toBeNull();;
buffer = frame.Buffer;
expect(buffer).not.toBeNull();;
//should be different color now
r = buffer[0];
g = buffer[1];
b = buffer[2];
a = buffer[3];
expect(r).toBe(255);
expect(g).toBe(255);
expect(b).toBe(0);
expect(a).toBe(255);
//done
done();
});
function createTexture(gl: WebGL2RenderingContext) : WebGLTexture
{
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Because images have to be download over the internet
// they might take a moment until they are ready.
// Until then put a single pixel in the texture so we can
// use it immediately. When the image has finished downloading
// we'll update the texture with the contents of the image.
const level = 0;
const internalFormat = gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([0, 0, 255, 255]); // opaque blue
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
width, height, border, srcFormat, srcType,
pixel);
return texture;
}
it("texture", async(done) => {
//blue test container to stream from
const testcontainer = MakeTestStreamContainer();
const stream = new BrowserMediaStream(testcontainer.stream);
//document.body.appendChild(testcontainer.canvas);
//waited for the internals to get initialized. We should have a frame now
await sleep(100);
stream.Update();
let frame = stream.PeekFrame()
expect(frame).not.toBeNull();
//create another canvas but with WebGL context
//this is where we copy the texture to
let canvas = document.createElement("canvas");
canvas.width = testcontainer.canvas.width;
canvas.height = testcontainer.canvas.height;
//document.body.appendChild(canvas);
let gl = canvas.getContext("webgl2");
//testing only. draw this one red
gl.clearColor(1,0,0,1);
gl.clear(gl.COLOR_BUFFER_BIT);
//create new texture and copy the image into it
let texture = createTexture(gl);
let res = frame.ToTexture(gl, texture);
expect(res).toBe(true);
//we attach our test texture to a frame buffer, then read from it to copy the data back from the GPU
//into an array dst_buffer
const dst_buffer = new Uint8Array(testcontainer.canvas.width * testcontainer.canvas.height * 4);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
gl.readPixels(0, 0, testcontainer.canvas.width, testcontainer.canvas.height, gl.RGBA, gl.UNSIGNED_BYTE, dst_buffer);
//check if we have the expected blue color we use to setup the testcontainer canvas
let r = dst_buffer[0];
let g = dst_buffer[1];
let b = dst_buffer[2];
let a = dst_buffer[3];
expect(r).toBe(0);
expect(g).toBe(0);
expect(b).toBe(255);
expect(a).toBe(255);
//TODO: could compare whole src / dst buffer to check if something is cut off
//const compare_buffer = frame.Buffer;
done();
});
});
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment