Commit fcb4755d by Christoph

Merge branch 'videoinput'

parents fa8e1b58 641b1d97
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="utf-8" />
<title></title>
<script src="./bundle/apps.js"></script>
</head>
<body>
<div id="videoinputapp1">
<h1>Callapp1:</h1>
URL to connect:
<p class="callapp_url">
</p>
<input type="checkbox" name="audio" class="callapp_send_audio" checked autocomplete="off"> Audio
<input type="checkbox" name="video" class="callapp_send_video" checked autocomplete="off"> Video
<input type= "text" class="callapp_address" autocomplete="off">
<button class="callapp_button"> Join </button>
<div class="callapp_local_video">local video</div>
<div class="callapp_remote_video">remote video</div>
</div>
<canvas id="canvas1"> </canvas>
<script>
var rgbToHex = function (rgb) {
var hex = Number(rgb).toString(16);
if (hex.length < 2) {
hex = "0" + hex;
}
return hex;
};
const canvas = document.querySelector("#canvas1");
const ctx = canvas.getContext("2d");
let counter = 0;
setInterval(()=>{
const color = "#FFFF" + rgbToHex(counter%255);
ctx.fillStyle = color;
ctx.fillRect(0, 0, canvas.width, canvas.height);
counter++;
}, 50);
apps.videoinputapp(document.querySelector("#videoinputapp1"), canvas);
//apps.callapp(document.querySelector("#callapp2"));
</script>
</body>
</html>
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>WebGL Demo</title>
<link rel="stylesheet" href="../webgl.css" type="text/css">
</head>
<body>
<canvas id="glcanvas" width="640" height="480"></canvas>
</body>
<script src="../bundle/awrtc.js"></script>
<script src="gl-matrix.js"></script>
<script src="webgl-demo_changed.js"></script>
<script>
let canvas = document.querySelector("#glcanvas");
let nconfig = new awrtc.NetworkConfig();
let call = new awrtc.BrowserWebRtcCall(nconfig);
call.addEventListener((sender, args) => {
if(args.Type === awrtc.CallEventType.FrameUpdate)
{
let gl = canvas.getContext("webgl");
if(args.Frame.Width != globalTextureWidth || args.Frame.Height != globalTextureHeight)
{
const pixel = new Uint8Array(args.Frame.Width * args.Frame.Height * 3 );
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, args.Frame.Width, args.Frame.Height, 0, gl.RGB, gl.UNSIGNED_BYTE, pixel);
globalTextureWidth = args.Frame.Width ;
globalTextureHeight = args.Frame.Height ;
}
args.Frame.ToTexture(gl, globalTextureId);
}
});
//As the system is designed for realtime graphics we have to call the Update method. Events are only
//triggered during this Update call!
let intervalId = setInterval(() => {
call.Update();
}, 50);
let config = new awrtc.MediaConfig();
config.Audio = false;
config.Video = true;
config.FrameUpdates = true;
config.IdealWidth = 640;
config.IdealHeight = 480;
config.IdealFps = 30;
console.log("requested config:" + JSON.stringify(config));
call.Configure(config);
</script>
</html>
\ No newline at end of file
Slightly changed WebGL example from mozzila to test the frame copy from WebRTC -> VideoElement -> WebGL Texture
Source:
https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/Tutorial/Using_textures_in_WebGL
https://github.com/mdn/webgl-examples/tree/gh-pages/tutorial/sample6
\ No newline at end of file
// Karma configuration
// Generated on Mon Jun 24 2019 19:59:32 GMT+1200 (New Zealand Standard Time)
module.exports = function(config) {
config.set({
// base path that will be used to resolve all patterns (eg. files, exclude)
basePath: '',
// frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ['jasmine'],
// list of files / patterns to load in the browser
files: [
'build/bundle/*.js'
],
// list of files / patterns to exclude
exclude: [
],
// preprocess matching files before serving them to the browser
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors: {
},
// test results reporter to use
// possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ['progress'],
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: true,
// start these browsers
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
browsers: ['FirefoxCustom'],
customLaunchers: {
FirefoxCustom: {
base: 'Firefox',
prefs: {
'media.navigator.permission.disabled': true,
'media.navigator.streams.fake' : true
}
}
},
// Continuous Integration mode
// if true, Karma captures browsers, runs the tests and exits
singleRun: false,
// Concurrency level
// how many browser should be started simultaneous
concurrency: Infinity
})
}
{
"name": "awrtc_browser",
"version": "1.984.5",
"version": "0.985.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
......
{
"name": "awrtc_browser",
"version": "1.984.5",
"version": "1.985.0",
"description": "Compatible browser implementation to the Unity asset WebRTC Video Chat. Try examples in build folder",
"author": "because-why-not.com Limited",
"license": "BSD-3-Clause",
......
......@@ -29,7 +29,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import * as awrtc from "../awrtc/index"
import { ConnectionId } from "../awrtc/index";
/**
* Main (and most complicated) example for using BrowserWebRtcCall.
......@@ -357,7 +356,7 @@ export class CallApp
this.mUiLocalVideoParent.appendChild(video);
}
private Ui_OnRemoteVideo(video : HTMLVideoElement, id: ConnectionId){
private Ui_OnRemoteVideo(video : HTMLVideoElement, id: awrtc.ConnectionId){
this.mUiRemoteVideoParent.appendChild( document.createElement("br"));
this.mUiRemoteVideoParent.appendChild(new Text("connection " + id.id));
......
......@@ -256,7 +256,6 @@ class MinimalCall
//other.
export function BrowserWebRtcCall_minimal() {
awrtc.BrowserMediaStream.sUseLazyFrames = true;
let netConfig = new awrtc.NetworkConfig();
netConfig.IsConference = false;
netConfig.SignalingUrl = DefaultValues.Signaling;
......
......@@ -30,4 +30,5 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
export * from "./apphelpers"
export * from "./testapps"
export * from "./examples"
export * from "./callapp"
\ No newline at end of file
export * from "./callapp"
export * from "./videoinputapp"
\ No newline at end of file
......@@ -29,7 +29,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import * as awrtc from "../awrtc/index"
import {DefaultValues, GetRandomKey} from "./apphelpers"
import { DeviceApi, DeviceInfo } from "../awrtc/index";
import { DeviceApi, DeviceInfo, BrowserMediaStream } from "../awrtc/index";
//This file only contains badly maintained
//test apps. Use only experimentation.
......@@ -387,26 +387,34 @@ class FpsCounter
lastRefresh = 0;
fps = 0;
counter = 0;
isNew = false;
public get Fps()
{
return Math.round(this.fps);
}
public get Counter()
public get IsNew() : boolean
{
return this.counter;
if(this.isNew){
this.isNew = false;
return true;
}
return false;
}
Update():void
{
this.counter++;
let diff = new Date().getTime() - this.lastRefresh;
if(diff > 1000)
let refresh_time = 2000;
if(diff > refresh_time)
{
this.fps = this.counter / (diff / 1000);
this.counter = 0;
this.lastRefresh = new Date().getTime();
this.isNew = true;
}
}
}
......@@ -415,7 +423,7 @@ class FpsCounter
//and accesses the resulting frame data directly
export function BrowserMediaNetwork_frameaccess() {
//BrowserMediaStream.DEFAULT_FRAMERATE = 60;
//awrtc.BrowserMediaStream.DEBUG_SHOW_ELEMENTS = true;
let address = GetRandomKey();
......@@ -427,8 +435,15 @@ export function BrowserMediaNetwork_frameaccess() {
let network2 = new awrtc.BrowserMediaNetwork(networkConfig);
let mediaConfig1 = new awrtc.MediaConfig();
mediaConfig1.Audio = true;
mediaConfig1.Audio = false;
mediaConfig1.Video = true;
/*
mediaConfig1.IdealWidth = 320;
mediaConfig1.IdealHeight = 240;
//fps seems to be ignored by browsers even if
//the camera specifically supports that setting
mediaConfig1.IdealFps = 15;
*/
let mediaConfig2 = new awrtc.MediaConfig();
mediaConfig2.Audio = false;
mediaConfig2.Video = false;
......@@ -436,6 +451,7 @@ export function BrowserMediaNetwork_frameaccess() {
let localFps = new FpsCounter();
let remoteFps = new FpsCounter();
let loopRate = new FpsCounter();
......@@ -466,15 +482,17 @@ export function BrowserMediaNetwork_frameaccess() {
setInterval(() => {
network1.Update();
loopRate.Update();
if(loopRate.IsNew)
console.log("Loop rate: " + loopRate.Fps);
let frame1: awrtc.IFrameData = null;
let frame2: awrtc.IFrameData = null;
frame1 = network1.TryGetFrame(awrtc.ConnectionId.INVALID);
if (frame1 != null)
{
localFps.Update();
if(localFps.Counter % 30 == 0)
if(localFps.IsNew)
console.log("local1 width" + frame1.Width + " height:" + frame1.Height + "fps: " + localFps.Fps + " data:" + frame1.Buffer[0]);
}
......@@ -515,7 +533,7 @@ export function BrowserMediaNetwork_frameaccess() {
if (frame2 != null)
{
remoteFps.Update();
if(remoteFps.Counter % 30 == 0)
if(remoteFps.IsNew)
console.log("remote2 width" + frame2.Width + " height:" + frame2.Height + "fps: " + remoteFps.Fps + " data:" + frame2.Buffer[0]);
}
}
......
/*
Copyright (c) 2019, because-why-not.com Limited
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
//obsolete. not needed for unity build anymore
//special entry point only needed for backwards compatibility
//it will merge awrtc namespace into window so old code still works
//that accesses objects directly instead using the global awrtc object
//the index will include all external modules
import * as awrtc from "./index"
//we merge awrtc into the global namespace
Object.assign(window, awrtc);
//for less risky global access
(window as any).awrtc = awrtc;
......@@ -42,4 +42,5 @@ export * from "./media/index"
//it could as well be built and deployed separately
export * from "./media_browser/index"
export * from "./unity/index"
console.debug("loading awrtc modules completed");
\ No newline at end of file
console.debug("loading awrtc modules completed!");
......@@ -59,6 +59,15 @@ export class IFrameData {
}
public constructor() { }
public ToTexture(gl: WebGL2RenderingContext, texture: WebGLTexture) : boolean{
return false;
}
/*
public ToTexture2(gl: WebGL2RenderingContext) : WebGLTexture{
return null;
}
*/
}
//Container for the raw bytes of the current frame + height and width.
......@@ -96,6 +105,10 @@ export class RawFrame extends IFrameData{
* only create a lazy frame which will delay the creation of the RawFrame until the user actually tries
* to access any data.
* Thus if the game slows down or the user doesn't access any data the expensive copy is avoided.
*
* This comes with the downside of risking a change in Width / Height at the moment. In theory the video could
* change the resolution causing the values of Width / Height to change over time before Buffer is accessed to create
* a copy that will be save to use. This should be ok as long as the frame is used at the time it is received.
*/
export class LazyFrame extends IFrameData{
......@@ -113,20 +126,42 @@ export class LazyFrame extends IFrameData{
return this.mRawFrame.Buffer;
}
/**Returns the expected width of the frame.
* Watch out this might change inbetween frames!
*
*/
public get Width(): number {
if (this.mRawFrame == null)
{
return this.mFrameGenerator.VideoElement.videoWidth;
}else{
return this.mRawFrame.Width;
}
/*
this.GenerateFrame();
if (this.mRawFrame == null)
return -1;
return this.mRawFrame.Width;
*/
}
/**Returns the expected height of the frame.
* Watch out this might change inbetween frames!
*
*/
public get Height(): number {
if (this.mRawFrame == null)
{
return this.mFrameGenerator.VideoElement.videoHeight;
}else{
return this.mRawFrame.Height;
}
/*
this.GenerateFrame();
if (this.mRawFrame == null)
return -1;
return this.mRawFrame.Height;
*/
}
......@@ -135,6 +170,37 @@ export class LazyFrame extends IFrameData{
this.mFrameGenerator = frameGenerator;
}
/**Intendet for use via the Unity plugin.
* Will copy the image directly into a texture to avoid overhead of a CPU side copy.
*
* The given texture should have the correct size before calling this method.
*
* @param gl
* @param texture
*/
public ToTexture(gl: WebGL2RenderingContext, texture: WebGLTexture) : boolean{
gl.bindTexture(gl.TEXTURE_2D, texture);
/*
const level = 0;
const internalFormat = gl.RGBA;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, srcFormat, srcType, this.mFrameGenerator.VideoElement);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
*/
gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, gl.RGB, gl.UNSIGNED_BYTE, this.mFrameGenerator.VideoElement);
return true;
}
/*
public ToTexture2(gl: WebGL2RenderingContext) : WebGLTexture{
let tex = gl.createTexture()
this.ToTexture(gl, tex)
return;
}
*/
//Called before access of any frame data triggering the creation of the raw frame data
private GenerateFrame() {
......
......@@ -36,6 +36,7 @@ import { IFrameData } from "../media/RawFrame";
import { MediaPeer } from "./MediaPeer";
import { BrowserMediaStream } from "./BrowserMediaStream";
import { DeviceApi } from "./DeviceApi";
import { Media } from "./Media";
/**Avoid using this class directly whenever possible. Use BrowserWebRtcCall instead.
......@@ -60,6 +61,7 @@ import { DeviceApi } from "./DeviceApi";
*/
export class BrowserMediaNetwork extends WebRtcNetwork implements IMediaNetwork {
//media configuration set by the user
private mMediaConfig: MediaConfig = null;
//keeps track of audio / video tracks based on local devices
......@@ -69,6 +71,7 @@ export class BrowserMediaNetwork extends WebRtcNetwork implements IMediaNetwork
private mConfigurationError: string = null;
private mMediaEvents: Queue<MediaEvent> = new Queue<MediaEvent>();
constructor(config: NetworkConfig) {
super(BrowserMediaNetwork.BuildSignalingConfig(config.SignalingUrl),
......@@ -91,103 +94,12 @@ export class BrowserMediaNetwork extends WebRtcNetwork implements IMediaNetwork
if (config.Audio || config.Video) {
//ugly part starts -> call get user media data (no typescript support)
//different browsers have different calls...
//check getSupportedConstraints()???
//see https://w3c.github.io/mediacapture-main/getusermedia.html#constrainable-interface
//set default ideal to very common low 320x240 to avoid overloading weak computers
var constraints = {
audio: config.Audio
} as any;
let width = {} as any;
let height = {} as any;
let video = {} as any;
let fps = {} as any;
if (config.MinWidth != -1)
width.min = config.MinWidth;
if (config.MaxWidth != -1)
width.max = config.MaxWidth;
if (config.IdealWidth != -1)
width.ideal = config.IdealWidth;
if (config.MinHeight != -1)
height.min = config.MinHeight;
if (config.MaxHeight != -1)
height.max = config.MaxHeight;
if (config.IdealHeight != -1)
height.ideal = config.IdealHeight;
if (config.MinFps != -1)
fps.min = config.MinFps;
if (config.MaxFps != -1)
fps.max = config.MaxFps;
if (config.IdealFps != -1)
fps.ideal = config.IdealFps;
//user requested specific device? get it now to properly add it to the
//constraints later
let deviceId:string = null;
if(config.Video && config.VideoDeviceName && config.VideoDeviceName !== "")
{
deviceId = DeviceApi.GetDeviceId(config.VideoDeviceName);
SLog.L("using device " + config.VideoDeviceName);
if(deviceId !== null && deviceId !== "")
{
//SLog.L("using device id " + deviceId);
}
else{
deviceId = null;
SLog.LW("Failed to find deviceId for label " + config.VideoDeviceName + "! Using default device instead");
}
}
//watch out: unity changed behaviour and will now
//give 0 / 1 instead of false/true
//using === won't work
if(config.Video == false)
SLog.L("calling GetUserMedia. Media config: " + JSON.stringify(config));
if(DeviceApi.IsUserMediaAvailable())
{
//video is off
video = false;
}else {
if(Object.keys(width).length > 0){
video.width = width;
}
if(Object.keys(height).length > 0){
video.height = height;
}
if(Object.keys(fps).length > 0){
video.frameRate = fps;
}
if(deviceId !== null){
video.deviceId = {"exact":deviceId};
}
let promise : Promise<MediaStream> = null;
promise = Media.SharedInstance.getUserMedia(config);
//if we didn't add anything we need to set it to true
//at least (I assume?)
if(Object.keys(video).length == 0){
video = true;
}
}
constraints.video = video;
SLog.L("calling GetUserMedia. Media constraints: " + JSON.stringify(constraints));
if(navigator && navigator.mediaDevices)
{
let promise = navigator.mediaDevices.getUserMedia(constraints);
promise.then((stream) => { //user gave permission
//totally unrelated -> user gave access to devices. use this
......@@ -196,6 +108,7 @@ export class BrowserMediaNetwork extends WebRtcNetwork implements IMediaNetwork
//call worked -> setup a frame buffer that deals with the rest
this.mLocalStream = new BrowserMediaStream(stream as MediaStream);
//console.debug("Local tracks: ", stream.getTracks());
this.mLocalStream.InternalStreamAdded = (stream)=>{
this.EnqueueMediaEvent(MediaEventType.StreamAdded, ConnectionId.INVALID, this.mLocalStream.VideoElement);
};
......
......@@ -28,6 +28,8 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import { SLog } from "../network/index";
import { MediaConfig } from "media/MediaConfig";
import { VideoInput } from "./VideoInput";
export class DeviceInfo
{
......@@ -79,7 +81,11 @@ export class DeviceApi
{
let index = DeviceApi.sUpdateEvents.indexOf(evt);
if(index >= 0)
{
DeviceApi.sUpdateEvents.splice(index, 1);
}else{
SLog.LW("Tried to remove an unknown event handler in DeviceApi.RemOnChangedHandler");
}
}
private static TriggerChangedEvent()
......@@ -166,6 +172,15 @@ export class DeviceApi
{
return DeviceApi.sDeviceInfo;
}
public static GetVideoDevices(): string[]{
const devices = DeviceApi.Devices;
const keys = Object.keys(devices);
const labels = keys.map((x)=>{return devices[x].label});
return labels;
}
public static Reset()
{
DeviceApi.sUpdateEvents = [];
......@@ -196,7 +211,7 @@ export class DeviceApi
DeviceApi.Update();
}
static ENUM_FAILED = "Can't access mediaDevices or enumerateDevices";
/**Updates the device list based on the current
* access. Gives the devices numbers if the name isn't known.
*/
......@@ -210,9 +225,28 @@ export class DeviceApi
.then(DeviceApi.InternalOnEnum)
.catch(DeviceApi.InternalOnErrorCatch);
}else{
DeviceApi.InternalOnErrorString("Can't access mediaDevices or enumerateDevices");
DeviceApi.InternalOnErrorString(DeviceApi.ENUM_FAILED);
}
}
public static async UpdateAsync():Promise<void>
{
return new Promise((resolve, fail)=>{
DeviceApi.sLastError = null;
if(DeviceApi.IsApiAvailable() == false)
{
DeviceApi.InternalOnErrorString(DeviceApi.ENUM_FAILED);
fail(DeviceApi.ENUM_FAILED);
}
resolve();
}).then(()=>{
DeviceApi.sIsPending = true;
return navigator.mediaDevices.enumerateDevices()
.then(DeviceApi.InternalOnEnum)
.catch(DeviceApi.InternalOnErrorCatch);
});
}
/**Checks if the API is available in the browser.
* false - browser doesn't support this API
* true - browser supports the API (might still refuse to give
......@@ -255,4 +289,127 @@ export class DeviceApi
}
return null;
}
public static IsUserMediaAvailable()
{
if(navigator && navigator.mediaDevices)
return true;
return false;
}
public static ToConstraints(config: MediaConfig): MediaStreamConstraints
{
//ugly part starts -> call get user media data (no typescript support)
//different browsers have different calls...
//check getSupportedConstraints()???
//see https://w3c.github.io/mediacapture-main/getusermedia.html#constrainable-interface
//set default ideal to very common low 320x240 to avoid overloading weak computers
var constraints = {
audio: config.Audio
} as any;
let width = {} as any;
let height = {} as any;
let video = {} as any;
let fps = {} as any;
if (config.MinWidth != -1)
width.min = config.MinWidth;
if (config.MaxWidth != -1)
width.max = config.MaxWidth;
if (config.IdealWidth != -1)
width.ideal = config.IdealWidth;
if (config.MinHeight != -1)
height.min = config.MinHeight;
if (config.MaxHeight != -1)
height.max = config.MaxHeight;
if (config.IdealHeight != -1)
height.ideal = config.IdealHeight;
if (config.MinFps != -1)
fps.min = config.MinFps;
if (config.MaxFps != -1)
fps.max = config.MaxFps;
if (config.IdealFps != -1)
fps.ideal = config.IdealFps;
//user requested specific device? get it now to properly add it to the
//constraints later
let deviceId:string = null;
if(config.Video && config.VideoDeviceName && config.VideoDeviceName !== "")
{
deviceId = DeviceApi.GetDeviceId(config.VideoDeviceName);
SLog.L("using device " + config.VideoDeviceName);
if(deviceId === "")
{
//Workaround for Chrome 81: If no camera access is allowed chrome returns the deviceId ""
//thus we can only request any video device. We can't select a specific one
deviceId = null;
}else if(deviceId !== null)
{
//all good
}
else{
SLog.LE("Failed to find deviceId for label " + config.VideoDeviceName);
throw new Error("Unknown device " + config.VideoDeviceName);
}
}
//watch out: unity changed behaviour and will now
//give 0 / 1 instead of false/true
//using === won't work
if(config.Video == false)
{
//video is off
video = false;
}else {
if(Object.keys(width).length > 0){
video.width = width;
}
if(Object.keys(height).length > 0){
video.height = height;
}
if(Object.keys(fps).length > 0){
video.frameRate = fps;
}
if(deviceId !== null){
video.deviceId = {"exact":deviceId};
}
//if we didn't add anything we need to set it to true
//at least (I assume?)
if(Object.keys(video).length == 0){
video = true;
}
}
constraints.video = video;
return constraints;
}
public static getBrowserUserMedia(constraints?: MediaStreamConstraints): Promise<MediaStream>{
return navigator.mediaDevices.getUserMedia(constraints);
}
public static getAssetUserMedia(config: MediaConfig): Promise<MediaStream>{
return new Promise((resolve)=>{
const res = DeviceApi.ToConstraints(config);
resolve(res);
}).then((constraints)=>{
return DeviceApi.getBrowserUserMedia(constraints as MediaStreamConstraints);
});
}
}
\ No newline at end of file
import { DeviceApi } from "./DeviceApi";
import { VideoInput } from "./VideoInput";
import { MediaConfig } from "media/MediaConfig";
export class Media{
//experimental. Will be used instead of the device api to create streams
private static sSharedInstance :Media = new Media();
/**
* Singleton used for now as the browser version is missing a proper factory yet.
* Might be removed later.
*/
public static get SharedInstance(){
return this.sSharedInstance;
}
public static ResetSharedInstance(){
this.sSharedInstance = new Media();
}
private videoInput: VideoInput = null;
public get VideoInput() : VideoInput{
if(this.videoInput === null)
this.videoInput = new VideoInput();
return this.videoInput;
}
public constructor(){
}
public GetVideoDevices(): string[] {
const real_devices = DeviceApi.GetVideoDevices();
const virtual_devices : string[] = this.VideoInput.GetDeviceNames();
return real_devices.concat(virtual_devices);
}
public static IsNameSet(videoDeviceName: string) : boolean{
if(videoDeviceName !== null && videoDeviceName !== "" )
{
return true;
}
return false;
}
public getUserMedia(config: MediaConfig): Promise<MediaStream>{
if(config.Video && Media.IsNameSet(config.VideoDeviceName)
&& this.videoInput != null
&& this.videoInput.HasDevice(config.VideoDeviceName))
{
let res = Promise.resolve().then(async ()=>{
let stream = this.videoInput.GetStream(config.VideoDeviceName);
if(config.Audio)
{
let constraints = {} as MediaStreamConstraints
constraints.audio = true;
let audio_stream = await DeviceApi.getBrowserUserMedia(constraints);
stream.addTrack(audio_stream.getTracks()[0])
}
return stream;
})
return res;
}
return DeviceApi.getAssetUserMedia(config);
}
}
\ No newline at end of file
......@@ -32,3 +32,5 @@ export * from './BrowserWebRtcCall'
export * from './BrowserMediaStream'
export * from './MediaPeer'
export * from './DeviceApi'
export * from './VideoInput'
export * from './Media'
......@@ -213,22 +213,42 @@ export class SLog {
SLog.LogError(msg, tag);
}
public static Log(msg: any, tag?:string): void {
if(!tag)
tag = "";
if(SLog.sLogLevel >= SLogLevel.Info)
console.log(msg, tag);
{
if(tag)
{
console.log(msg, tag);
}else{
console.log(msg);
}
}
}
public static LogWarning(msg: any, tag?:string): void {
if(!tag)
tag = "";
if(SLog.sLogLevel >= SLogLevel.Warnings)
console.warn(msg, tag);
{
if(tag)
{
console.warn(msg, tag);
}else{
console.warn(msg);
}
}
}
public static LogError(msg: any, tag?:string) {
if(!tag)
tag = "";
if(SLog.sLogLevel >= SLogLevel.Errors)
console.error(msg, tag);
{
if(tag)
{
console.error(msg, tag);
}else{
console.error(msg);
}
}
}
}
\ No newline at end of file
......@@ -31,7 +31,10 @@
"./media_browser/BrowserMediaNetwork.ts",
"./media_browser/BrowserWebRtcCall.ts",
"./media_browser/BrowserMediaStream.ts",
"./media_browser/DeviceApi.ts",
"./media_browser/MediaPeer.ts",
"./media_browser/VideoInput.ts",
"./media_browser/Media.ts",
"./media_browser/index.ts",
"./unity/CAPI.ts",
"./unity/index.ts",
......
......@@ -32,7 +32,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import {SLog, WebRtcNetwork, SignalingConfig, NetworkEvent, ConnectionId, LocalNetwork, WebsocketNetwork} from "../network/index"
import { MediaConfigurationState, NetworkConfig, MediaConfig } from "../media/index";
import { BrowserMediaStream, BrowserMediaNetwork, DeviceApi, BrowserWebRtcCall } from "../media_browser/index";
import { BrowserMediaStream, BrowserMediaNetwork, DeviceApi, BrowserWebRtcCall, Media, VideoInputType } from "../media_browser/index";
var CAPI_InitMode = {
......@@ -425,9 +425,6 @@ export function CAPI_MediaNetwork_TryGetFrame(lIndex: number, lConnectionId: num
if (frame == null || frame.Buffer == null) {
return false;
} else {
//TODO: copy frame over
lWidthInt32Array[lWidthIntArrayIndex] = frame.Width;
lHeightInt32Array[lHeightIntArrayIndex] = frame.Height;
......@@ -438,6 +435,61 @@ export function CAPI_MediaNetwork_TryGetFrame(lIndex: number, lConnectionId: num
}
}
export function CAPI_MediaNetwork_TryGetFrame_ToTexture(lIndex: number, lConnectionId: number,
lWidth: number,
lHeight: number,
gl:WebGL2RenderingContext, texture:WebGLTexture): boolean
{
//console.log("CAPI_MediaNetwork_TryGetFrame_ToTexture");
let mediaNetwork = gCAPI_WebRtcNetwork_Instances[lIndex] as BrowserMediaNetwork;
let frame = mediaNetwork.TryGetFrame(new ConnectionId(lConnectionId));
if (frame == null ) {
return false;
} else if (frame.Width != lWidth || frame.Height != lHeight) {
SLog.LW("CAPI_MediaNetwork_TryGetFrame_ToTexture failed. Width height expected: " + frame.Width + "x" + frame.Height + " but received " + lWidth + "x" + lHeight);
return false;
}else {
frame.ToTexture(gl, texture);
return true;
}
}
/*
export function CAPI_MediaNetwork_TryGetFrame_ToTexture2(lIndex: number, lConnectionId: number,
lWidthInt32Array: Int32Array, lWidthIntArrayIndex: number,
lHeightInt32Array: Int32Array, lHeightIntArrayIndex: number,
gl:WebGL2RenderingContext): WebGLTexture
{
//console.log("CAPI_MediaNetwork_TryGetFrame_ToTexture");
let mediaNetwork = gCAPI_WebRtcNetwork_Instances[lIndex] as BrowserMediaNetwork;
let frame = mediaNetwork.TryGetFrame(new ConnectionId(lConnectionId));
if (frame == null) {
return false;
} else {
lWidthInt32Array[lWidthIntArrayIndex] = frame.Width;
lHeightInt32Array[lHeightIntArrayIndex] = frame.Height;
let texture = frame.ToTexture2(gl);
return texture;
}
}
*/
export function CAPI_MediaNetwork_TryGetFrame_Resolution(lIndex: number, lConnectionId: number,
lWidthInt32Array: Int32Array, lWidthIntArrayIndex: number,
lHeightInt32Array: Int32Array, lHeightIntArrayIndex: number): boolean
{
let mediaNetwork = gCAPI_WebRtcNetwork_Instances[lIndex] as BrowserMediaNetwork;
let frame = mediaNetwork.PeekFrame(new ConnectionId(lConnectionId));
if (frame == null) {
return false;
} else {
lWidthInt32Array[lWidthIntArrayIndex] = frame.Width;
lHeightInt32Array[lHeightIntArrayIndex] = frame.Height;
return true;
}
}
//Returns the frame buffer size or -1 if no frame is available
export function CAPI_MediaNetwork_TryGetFrameDataLength(lIndex: number, connectionId: number) : number {
let mediaNetwork = gCAPI_WebRtcNetwork_Instances[lIndex] as BrowserMediaNetwork;
......@@ -497,19 +549,63 @@ export function CAPI_DeviceApi_LastUpdate():number
return DeviceApi.LastUpdate;
}
export function CAPI_DeviceApi_Devices_Length():number{
return Object.keys(DeviceApi.Devices).length;
export function CAPI_Media_GetVideoDevices_Length():number{
return Media.SharedInstance.GetVideoDevices().length;
}
export function CAPI_DeviceApi_Devices_Get(index:number):string{
let keys = Object.keys(DeviceApi.Devices);
if(keys.length > index)
export function CAPI_Media_GetVideoDevices(index:number):string{
const devs = Media.SharedInstance.GetVideoDevices();
if(devs.length > index)
{
let key = keys[index];
return DeviceApi.Devices[key].label;
return devs[index];
}
else
{
SLog.LE("Requested device with index " + index + " does not exist.");
//it needs to be "" to behave the same to the C++ API. std::string can't be null
return "";
}
}
\ No newline at end of file
}
export function CAPI_VideoInput_AddCanvasDevice(query:string, name:string, width: number, height: number, fps: number): boolean{
let canvas = document.querySelector(query) as HTMLCanvasElement;
if(canvas){
console.debug("CAPI_VideoInput_AddCanvasDevice", {query, name, width, height, fps});
if(width <= 0 || height <= 0){
width = canvas.width;
height = canvas.height;
}
Media.SharedInstance.VideoInput.AddCanvasDevice(canvas as HTMLCanvasElement, name, width, height, fps);//, width, height, fps);
return true;
}
return false;
}
export function CAPI_VideoInput_AddDevice(name:string, width: number, height: number, fps: number){
Media.SharedInstance.VideoInput.AddDevice(name, width, height, fps);
}
export function CAPI_VideoInput_RemoveDevice(name:string){
Media.SharedInstance.VideoInput.RemoveDevice(name);
}
export function CAPI_VideoInput_UpdateFrame(name:string,
lBufferUint8Array: Uint8Array, lBufferUint8ArrayOffset: number, lBufferUint8ArrayLength: number,
width: number, height: number,
rotation: number, firstRowIsBottom: boolean) : boolean
{
let dataPtrClamped : Uint8ClampedArray = null;
if(lBufferUint8Array && lBufferUint8ArrayLength > 0){
dataPtrClamped = new Uint8ClampedArray(lBufferUint8Array.buffer, lBufferUint8ArrayOffset, lBufferUint8ArrayLength);
}
return Media.SharedInstance.VideoInput.UpdateFrame(name, dataPtrClamped, width, height, VideoInputType.ARGB, rotation, firstRowIsBottom);
}
//TODO: This needs a proper implementation
//so far only works if unity is the only canvas and uses webgl2
export function GetUnityCanvas() : HTMLCanvasElement
{
return document.querySelector("canvas");
}
export function GetUnityContext() : WebGL2RenderingContext
{
return GetUnityCanvas().getContext("webgl2");
}
......@@ -27,4 +27,7 @@ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import { Media } from "../media_browser/Media";
import { GetUnityCanvas } from "./CAPI";
export * from "./CAPI"
......@@ -29,8 +29,10 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
//current setup needs to load everything as a module
import {DeviceApi, CAPI_DeviceApi_Update,
CAPI_DeviceApi_RequestUpdate, CAPI_DeviceApi_Devices_Length,
CAPI_DeviceApi_Devices_Get} from "../awrtc/index"
CAPI_DeviceApi_RequestUpdate, CAPI_Media_GetVideoDevices_Length,
CAPI_Media_GetVideoDevices,
MediaConfig,
Media} from "../awrtc/index"
export function DeviceApiTest_export()
{
......@@ -132,11 +134,11 @@ describe("DeviceApiTest", () => {
let update2complete = false;
let deviceCount = 0;
const devices_length_unitialized = CAPI_DeviceApi_Devices_Length();
const devices_length_unitialized = CAPI_Media_GetVideoDevices_Length();
expect(devices_length_unitialized).toBe(0);
DeviceApi.AddOnChangedHandler(()=>{
let dev_length = CAPI_DeviceApi_Devices_Length();
let dev_length = CAPI_Media_GetVideoDevices_Length();
expect(dev_length).not.toBe(0);
expect(dev_length).toBe(Object.keys(DeviceApi.Devices).length);
......@@ -145,7 +147,7 @@ describe("DeviceApiTest", () => {
for(let k of keys)
{
let expectedVal = DeviceApi.Devices[k].label;
let actual = CAPI_DeviceApi_Devices_Get(counter);
let actual = CAPI_Media_GetVideoDevices(counter);
expect(actual).toBe(expectedVal);
counter++;
......@@ -153,8 +155,114 @@ describe("DeviceApiTest", () => {
done();
});
CAPI_DeviceApi_Update();
});
it("isMediaAvailable", () => {
const res = DeviceApi.IsUserMediaAvailable();
expect(res).toBe(true);
});
it("getUserMedia", async () => {
let stream = await DeviceApi.getBrowserUserMedia({audio:true});
expect(stream).not.toBeNull();
expect(stream.getVideoTracks().length).toBe(0);
expect(stream.getAudioTracks().length).toBe(1);
stream = await DeviceApi.getBrowserUserMedia({video:true});
expect(stream).not.toBeNull();
expect(stream.getAudioTracks().length).toBe(0);
expect(stream.getVideoTracks().length).toBe(1);
});
it("getAssetMedia", async () => {
let config = new MediaConfig();
config.Audio = true;
config.Video = false;
let stream = await DeviceApi.getAssetUserMedia(config);
expect(stream).not.toBeNull();
expect(stream.getVideoTracks().length).toBe(0);
expect(stream.getAudioTracks().length).toBe(1);
config = new MediaConfig();
config.Audio = false;
config.Video = true;
stream = await DeviceApi.getAssetUserMedia(config);
expect(stream).not.toBeNull();
expect(stream.getAudioTracks().length).toBe(0);
expect(stream.getVideoTracks().length).toBe(1);
});
it("getAssetMedia_invalid", async () => {
let config = new MediaConfig();
config.Audio = false;
config.Video = true;
config.VideoDeviceName = "invalid name"
let error = null;
let stream :MediaStream = null;
console.log("Expecting error message: Failed to find deviceId for label invalid name");
try
{
stream = await DeviceApi.getAssetUserMedia(config);
}catch(err){
error = err;
}
expect(stream).toBeNull();
expect(error).toBeTruthy();
});
//check for a specific bug causing promise catch not to trigger correctly
//due to error in ToConstraints
it("getAssetMedia_invalid_promise", (done) => {
let config = new MediaConfig();
config.Audio = false;
config.Video = true;
config.VideoDeviceName = "invalid name"
let result: Promise<MediaStream> = null;
result = DeviceApi.getAssetUserMedia(config);
result.then(()=>{
fail("getAssetUserMedia returned but was expected to fail");
}).catch((error)=>{
expect(error).toBeTruthy();
done();
})
});
it("UpdateAsync", async (done) => {
expect(DeviceApi.GetVideoDevices().length).toBe(0);
await DeviceApi.UpdateAsync();
expect(DeviceApi.GetVideoDevices().length).toBeGreaterThan(0);
expect(DeviceApi.GetVideoDevices().length).toBe(CAPI_Media_GetVideoDevices_Length());
done();
});
/*
it("Devices", async () => {
DeviceApi.RequestUpdate
let config = new MediaConfig();
config.Audio = false;
config.Video = true;
config.VideoDeviceName = "invalid name"
let error = null;
let stream :MediaStream = null;
console.log("Expecting error message: Failed to find deviceId for label invalid name");
try
{
stream = await DeviceApi.getAssetUserMedia(config);
}catch(err){
error = err;
}
expect(stream).toBeNull();
expect(error).toBeTruthy();
});
*/
});
......@@ -34,3 +34,6 @@ export * from "./CallTest"
export * from "./MediaNetworkTest"
export * from "./BrowserApiTest"
export * from "./DeviceApiTest"
export * from "./VideoInputTest"
export * from "./MediaTest"
......@@ -21,6 +21,8 @@
"LocalNetworkTest.ts",
"MediaNetworkTest.ts",
"DeviceApiTest.ts",
"BrowserApiTest.ts"
"VideoInputTest.ts",
"BrowserApiTest.ts",
"MediaTest.ts"
]
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment