功能:更新到最新版本渲染器的像素流

pull/1939/head
ChenX 2 years ago
parent f66e1f9f5e
commit 8936edf28a

@ -5,7 +5,7 @@ export class WebRtcPlayer
pcClient: any;
dcClient: any;
tnClient: any;
sdpConstraints: { offerToReceiveAudio: number; offerToReceiveVideo: number; };
sdpConstraints: { offerToReceiveAudio: number; offerToReceiveVideo: number; voiceActivityDetection: boolean; };
dataChannelOptions: { ordered: boolean; };
onVideoInitialised: any;
video: HTMLVideoElement;
@ -23,144 +23,398 @@ export class WebRtcPlayer
send: (data: any) => void;
getStats: (onStats: any) => void;
aggregateStats: (checkInterval: any) => void;
constructor(parOptions?: { peerConnectionOptions?: { iceServers?: string[]; }; })
forceTURN: boolean;
forceMaxBundle: boolean;
startVideoMuted: any;
autoPlayAudio: any;
useMic: boolean;
preferSFU: boolean;
latencyTestTimings: { TestStartTimeMs: any; UEReceiptTimeMs: any; UEEncodeMs: any; UECaptureToSendMs: any; UETransmissionTimeMs: any; BrowserReceiptTimeMs: any; FrameDisplayDeltaTimeMs: any; Reset: () => void; SetUETimings: (UETimings: any) => void; SetFrameDisplayDeltaTime: (DeltaTimeMs: any) => void; OnAllLatencyTimingsReady: (Timings: any) => void; };
createWebRtcVideo: () => HTMLVideoElement;
availableVideoStreams: Map<any, any>;
setVideoEnabled: (enabled: any) => void;
startLatencyTest: (onTestStarted: any) => void;
receiveOffer: (offer: any) => void;
onWebRtcAnswer: any;
constructor(parOptions?: {
startVideoMuted?: boolean,
autoPlayAudio?: boolean,
peerConnectionOptions?: {
iceServers?: any[],
sdpSemantics: string,
offerExtmapAllowMixed: boolean,
bundlePolicy: string,
},
type: string,
})
{
parOptions = parOptions || {};
parOptions = parOptions || {
"type": "config",
"peerConnectionOptions": {
"iceServers": [
{
"urls": [
"stun:stun.l.google.com:19302"
]
}
],
"sdpSemantics": "unified-plan",
"offerExtmapAllowMixed": false,
"bundlePolicy": "balanced"
}
};
const self = this;
const urlParams = new URLSearchParams(window.location.search);
//**********************
//Config setup
//**********************;
this.cfg = parOptions.peerConnectionOptions || {};
this.cfg.sdpSemantics = 'unified-plan';
// this.cfg.rtcAudioJitterBufferMaxPackets = 10;
// this.cfg.rtcAudioJitterBufferFastAccelerate = true;
// this.cfg.rtcAudioJitterBufferMinDelayMs = 0;
// If this is true in Chrome 89+ SDP is sent that is incompatible with UE Pixel Streaming 4.26 and below.
// However 4.27 Pixel Streaming does not need this set to false as it supports `offerExtmapAllowMixed`.
// tdlr; uncomment this line for older versions of Pixel Streaming that need Chrome 89+.
this.cfg.offerExtmapAllowMixed = false;
this.forceTURN = urlParams.has('ForceTURN');
if (this.forceTURN)
{
console.log("Forcing TURN usage by setting ICE Transport Policy in peer connection config.");
this.cfg.iceTransportPolicy = "relay";
}
this.cfg.bundlePolicy = "balanced";
this.forceMaxBundle = urlParams.has('ForceMaxBundle');
if (this.forceMaxBundle)
{
this.cfg.bundlePolicy = "max-bundle";
}
//**********************
//Variables
//**********************
this.pcClient = null;
this.dcClient = null;
this.tnClient = null;
this.sdpConstraints = {
offerToReceiveAudio: 1,
offerToReceiveVideo: 1
offerToReceiveAudio: 1, //Note: if you don't need audio you can get improved latency by turning this off.
offerToReceiveVideo: 1,
voiceActivityDetection: false
};
// See https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit for values
// See https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit for values (this is needed for Firefox to be consistent with Chrome.)
this.dataChannelOptions = { ordered: true };
// This is useful if the video/audio needs to autoplay (without user input) as browsers do not allow autoplay non-muted of sound sources without user interaction.
this.startVideoMuted = typeof parOptions.startVideoMuted !== 'undefined' ? parOptions.startVideoMuted : false;
this.autoPlayAudio = typeof parOptions.autoPlayAudio !== 'undefined' ? parOptions.autoPlayAudio : true;
// To enable mic in browser use SSL/localhost and have ?useMic in the query string.
this.useMic = urlParams.has('useMic');
if (!this.useMic)
{
console.log("Microphone access is not enabled. Pass ?useMic in the url to enable it.");
}
// When ?useMic check for SSL or localhost
let isLocalhostConnection = location.hostname === "localhost" || location.hostname === "127.0.0.1";
let isHttpsConnection = location.protocol === 'https:';
if (this.useMic && !isLocalhostConnection && !isHttpsConnection)
{
this.useMic = false;
console.error("Microphone access in the browser will not work if you are not on HTTPS or localhost. Disabling mic access.");
console.error("For testing you can enable HTTP microphone access Chrome by visiting chrome://flags/ and enabling 'unsafely-treat-insecure-origin-as-secure'");
}
// Prefer SFU or P2P connection
this.preferSFU = urlParams.has('preferSFU');
console.log(this.preferSFU ?
"The browser will signal it would prefer an SFU connection. Remove ?preferSFU from the url to signal for P2P usage." :
"The browser will signal for a P2P connection. Pass ?preferSFU in the url to signal for SFU usage.");
// Latency tester
this.latencyTestTimings =
{
TestStartTimeMs: null,
UEReceiptTimeMs: null,
UEEncodeMs: null,
UECaptureToSendMs: null,
UETransmissionTimeMs: null,
BrowserReceiptTimeMs: null,
FrameDisplayDeltaTimeMs: null,
Reset: function ()
{
this.TestStartTimeMs = null;
this.UEReceiptTimeMs = null;
this.UEEncodeMs = null,
this.UECaptureToSendMs = null,
this.UETransmissionTimeMs = null;
this.BrowserReceiptTimeMs = null;
this.FrameDisplayDeltaTimeMs = null;
},
SetUETimings: function (UETimings)
{
this.UEReceiptTimeMs = UETimings.ReceiptTimeMs;
this.UEEncodeMs = UETimings.EncodeMs,
this.UECaptureToSendMs = UETimings.CaptureToSendMs,
this.UETransmissionTimeMs = UETimings.TransmissionTimeMs;
this.BrowserReceiptTimeMs = Date.now();
this.OnAllLatencyTimingsReady(this);
},
SetFrameDisplayDeltaTime: function (DeltaTimeMs)
{
if (this.FrameDisplayDeltaTimeMs == null)
{
this.FrameDisplayDeltaTimeMs = Math.round(DeltaTimeMs);
this.OnAllLatencyTimingsReady(this);
}
},
OnAllLatencyTimingsReady: function (Timings) { }
};
//**********************
//Functions
//**********************
//Create Video element and expose that as a parameter
function createWebRtcVideo()
this.createWebRtcVideo = function ()
{
const video = document.createElement('video');
var video = document.createElement('video');
video.id = "streamingVideo";
video.playsInline = true;
video.addEventListener('loadedmetadata', e =>
//@ts-ignore
video.disablepictureinpicture = true;
video.muted = self.startVideoMuted;;
video.addEventListener('loadedmetadata', function (e)
{
if (self.onVideoInitialised)
{
self.onVideoInitialised();
}
}, true);
// Check if request video frame callback is supported
if ('requestVideoFrameCallback' in HTMLVideoElement.prototype)
{
// The API is supported!
const onVideoFrameReady = (now, metadata) =>
{
if (metadata.receiveTime && metadata.expectedDisplayTime)
{
const receiveToCompositeMs = metadata.presentationTime - metadata.receiveTime;
self.aggregatedStats.receiveToCompositeMs = receiveToCompositeMs;
}
// Re-register the callback to be notified about the next frame.
//@ts-ignore
video.requestVideoFrameCallback(onVideoFrameReady);
};
// Initially register the callback to be notified about the first frame.
//@ts-ignore
video.requestVideoFrameCallback(onVideoFrameReady);
}
return video;
}
};
this.video = createWebRtcVideo();
this.video = this.createWebRtcVideo();
this.availableVideoStreams = new Map();
const onsignalingstatechange = state =>
const onsignalingstatechange = function (state)
{
// console.info('signaling state change:', state);
console.info('Signaling state change. |', state.srcElement.signalingState, "|");
};
const oniceconnectionstatechange = state =>
const oniceconnectionstatechange = function (state)
{
// console.info('ice connection state change:', state);
console.info('Browser ICE connection |', state.srcElement.iceConnectionState, '|');
};
const onicegatheringstatechange = state =>
const onicegatheringstatechange = function (state)
{
// console.info('ice gathering state change:', state);
console.info('Browser ICE gathering |', state.srcElement.iceGatheringState, '|');
};
const handleOnTrack = e =>
const handleOnTrack = function (e)
{
// console.log('handleOnTrack', e.streams);
if (self.video.srcObject !== e.streams[0])
if (e.track)
{
console.log('Got track. | Kind=' + e.track.kind + ' | Id=' + e.track.id + ' | readyState=' + e.track.readyState + ' |');
}
if (e.track.kind == "audio")
{
handleOnAudioTrack(e.streams[0]);
return;
}
else (e.track.kind == "video");
{
// console.log('setting video stream from ontrack');
for (const s of e.streams)
{
if (!self.availableVideoStreams.has(s.id))
{
self.availableVideoStreams.set(s.id, s);
}
}
self.video.srcObject = e.streams[0];
// All tracks are added "muted" by WebRTC/browser and become unmuted when media is being sent
e.track.onunmute = () =>
{
if (self.video.srcObject !== e.streams[0])//直接设置导致了错误???
self.video.srcObject = e.streams[0];
};
}
};
const handleOnAudioTrack = function (audioMediaStream)
{
// do nothing the video has the same media stream as the audio track we have here (they are linked)
if (self.video.srcObject === audioMediaStream)
{
return;
}
// video element has some other media stream that is not associated with this audio track
else if (self.video.srcObject && self.video.srcObject !== audioMediaStream)
{
// create a new audio element
let audioElem = document.createElement("Audio") as HTMLAudioElement;
//@ts-ignore
audioElem.srcObject = audioMediaStream;
// there is no way to autoplay audio (even muted), so we defer audio until first click
if (!self.autoPlayAudio)
{
let clickToPlayAudio = function ()
{
audioElem.play();
self.video.removeEventListener("click", clickToPlayAudio);
};
self.video.addEventListener("click", clickToPlayAudio);
}
// we assume the user has clicked somewhere on the page and autoplaying audio will work
else
audioElem.play();
console.log("%c[OK]", "background: green; color: black", 'Created new audio element to play seperate audio stream.');
}
};
const onDataChannel = function (dataChannelEvent)
{
// This is the primary data channel code path when we are "receiving"
console.log("Data channel created for us by browser as we are a receiving peer.");
self.dcClient = dataChannelEvent.channel;
setupDataChannelCallbacks(self.dcClient);
};
const setupDataChannel = (pc, label, options) =>
const createDataChannel = function (pc, label, options)
{
// This is the primary data channel code path when we are "offering"
let datachannel = pc.createDataChannel(label, options);
console.log(`Created datachannel (${label})`);
setupDataChannelCallbacks(datachannel);
return datachannel;
};
const setupDataChannelCallbacks = function (datachannel)
{
try
{
const datachannel = pc.createDataChannel(label, options);
// console.log(`Created datachannel (${label})`);
// Inform browser we would like binary data as an ArrayBuffer (FF chooses Blob by default!)
datachannel.binaryType = "arraybuffer";
datachannel.onopen = e =>
datachannel.onopen = function (e)
{
// console.log(`data channel (${label}) connect`);
console.log("Data channel onopen");
if (self.onDataChannelConnected)
{
self.onDataChannelConnected();
}
};
datachannel.onclose = e =>
datachannel.onclose = function (e)
{
// console.log(`data channel (${label}) closed`);
console.log("Data channel onclose", e);
};
datachannel.onmessage = e =>
datachannel.onmessage = function (e)
{
// console.log(`Got message (${label})`, e.data); rtc状态 输出质量
if (self.onDataChannelMessage)
{
self.onDataChannelMessage(e.data);
}
};
datachannel.onerror = function (e)
{
console.error("Data channel error", e);
};
return datachannel;
} catch (e)
{
// console.warn('No data channel', e);
console.warn('No data channel', e);
return null;
}
};
const onicecandidate = e =>
const onicecandidate = function (e)
{
// console.log('ICE candidate', e);
if (e.candidate && e.candidate.candidate)
let candidate = e.candidate;
if (candidate && candidate.candidate)
{
self.onWebRtcCandidate(e.candidate);
console.log("%c[Browser ICE candidate]", "background: violet; color: black", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
self.onWebRtcCandidate(candidate);
}
};
const handleCreateOffer = pc =>
const handleCreateOffer = function (pc)
{
pc.createOffer(self.sdpConstraints).then(offer =>
pc.createOffer(self.sdpConstraints).then(function (offer)
{
offer.sdp = offer.sdp.replace("useinbandfec=1", "useinbandfec=1;stereo=1;maxaveragebitrate=128000");
// Munging is where we modifying the sdp string to set parameters that are not exposed to the browser's WebRTC API
mungeSDPOffer(offer);
// Set our munged SDP on the local peer connection so it is "set" and will be send across
pc.setLocalDescription(offer);
if (self.onWebRtcOffer)
{
// (andriy): increase start bitrate from 300 kbps to 20 mbps and max bitrate from 2.5 mbps to 100 mbps
// (100 mbps means we don't restrict encoder at all)
// after we `setLocalDescription` because other browsers are not c happy to see google-specific config
offer.sdp = offer.sdp.replace(/(a=fmtp:\d+ .*level-asymmetry-allowed=.*)\r\n/gm, "$1;x-google-start-bitrate=10000;x-google-max-bitrate=20000\r\n");
self.onWebRtcOffer(offer);
}
},
() => { console.warn("Couldn't create offer"); });
function () { console.warn("Couldn't create offer"); });
};
const setupPeerConnection = pc =>
const mungeSDPOffer = function (offer)
{
// if (pc.SetBitrate)
// console.log("Hurray! there's RTCPeerConnection.SetBitrate function");
// turn off video-timing sdp sent from browser
//offer.sdp = offer.sdp.replace("http://www.webrtc.org/experiments/rtp-hdrext/playout-delay", "");
// this indicate we support stereo (Chrome needs this)
offer.sdp = offer.sdp.replace('useinbandfec=1', 'useinbandfec=1;stereo=1;sprop-maxcapturerate=48000');
};
const setupPeerConnection = function (pc)
{
//Setup peerConnection events
pc.onsignalingstatechange = onsignalingstatechange;
pc.oniceconnectionstatechange = oniceconnectionstatechange;
@ -168,19 +422,20 @@ export class WebRtcPlayer
pc.ontrack = handleOnTrack;
pc.onicecandidate = onicecandidate;
pc.ondatachannel = onDataChannel;
};
const generateAggregatedStatsFunction = () =>
const generateAggregatedStatsFunction = function ()
{
if (!self.aggregatedStats)
self.aggregatedStats = {};
return stats =>
return function (stats)
{
//console.log('Printing Stats');
let newStat = {} as any;
// console.log('----------------------------- Stats start -----------------------------');
stats.forEach(stat =>
{
// console.log(JSON.stringify(stat, undefined, 4));
@ -250,7 +505,13 @@ export class WebRtcPlayer
}
});
//console.log(JSON.stringify(newStat));
if (self.aggregatedStats.receiveToCompositeMs)
{
newStat.receiveToCompositeMs = self.aggregatedStats.receiveToCompositeMs;
self.latencyTestTimings.SetFrameDisplayDeltaTime(self.aggregatedStats.receiveToCompositeMs);
}
self.aggregatedStats = newStat;
if (self.onAggregatedStats)
@ -258,51 +519,197 @@ export class WebRtcPlayer
};
};
const setupTransceiversAsync = async function (pc)
{
let hasTransceivers = pc.getTransceivers().length > 0;
// Setup a transceiver for getting UE video
pc.addTransceiver("video", { direction: "recvonly" });
// Setup a transceiver for sending mic audio to UE and receiving audio from UE
if (!self.useMic)
{
pc.addTransceiver("audio", { direction: "recvonly" });
}
else
{
let audioSendOptions = self.useMic ?
{
autoGainControl: false,
channelCount: 1,
echoCancellation: false,
latency: 0,
noiseSuppression: false,
sampleRate: 48000,
volume: 1.0
} : false;
// Note using mic on android chrome requires SSL or chrome://flags/ "unsafely-treat-insecure-origin-as-secure"
const stream = await navigator.mediaDevices.getUserMedia({ video: false, audio: audioSendOptions });
if (stream)
{
if (hasTransceivers)
{
for (let transceiver of pc.getTransceivers())
{
if (transceiver && transceiver.receiver && transceiver.receiver.track && transceiver.receiver.track.kind === "audio")
{
for (const track of stream.getTracks())
{
if (track.kind && track.kind == "audio")
{
transceiver.sender.replaceTrack(track);
transceiver.direction = "sendrecv";
}
}
}
}
}
else
{
for (const track of stream.getTracks())
{
if (track.kind && track.kind == "audio")
{
pc.addTransceiver(track, { direction: "sendrecv" });
}
}
}
}
else
{
pc.addTransceiver("audio", { direction: "recvonly" });
}
}
};
//**********************
//Public functions
//**********************
this.setVideoEnabled = function (enabled)
{
//@ts-ignore
self.video.srcObject.getTracks().forEach(track => track.enabled = enabled);
};
this.startLatencyTest = function (onTestStarted)
{
// Can't start latency test without a video element
if (!self.video)
{
return;
}
self.latencyTestTimings.Reset();
self.latencyTestTimings.TestStartTimeMs = Date.now();
onTestStarted(self.latencyTestTimings.TestStartTimeMs);
};
//This is called when revceiving new ice candidates individually instead of part of the offer
//This is currently not used but would be called externally from this class
this.handleCandidateFromServer = iceCandidate =>
this.handleCandidateFromServer = function (iceCandidate)
{
// console.log("ICE candidate: ", iceCandidate);
let candidate = new RTCIceCandidate(iceCandidate);
self.pcClient.addIceCandidate(candidate).then(_ =>
console.log("%c[Unreal ICE candidate]", "background: pink; color: black", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
// if forcing TURN, reject any candidates not relay
if (self.forceTURN)
{
// check if no relay address is found, if so, we are assuming it means no TURN server
if (candidate.candidate.indexOf("relay") < 0)
{
console.warn("Dropping candidate because it was not TURN relay.", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
return;
}
}
self.pcClient.addIceCandidate(candidate).catch(function (e)
{
// console.log('ICE candidate successfully added');
console.error("Failed to add ICE candidate", e);
});
};
//Called externaly to create an offer for the server
this.createOffer = () =>
this.createOffer = function ()
{
if (self.pcClient)
{
// console.log("Closing existing PeerConnection");
console.log("Closing existing PeerConnection");
self.pcClient.close();
self.pcClient = null;
}
self.cfg.offerExtmapAllowMixed = false;
self.pcClient = new RTCPeerConnection(self.cfg);
setupPeerConnection(self.pcClient);
self.dcClient = setupDataChannel(self.pcClient, 'cirrus', self.dataChannelOptions);
handleCreateOffer(self.pcClient);
setupTransceiversAsync(self.pcClient).finally(function ()
{
self.dcClient = createDataChannel(self.pcClient, 'cirrus', self.dataChannelOptions);
handleCreateOffer(self.pcClient);
});
};
//Called externaly when an offer is received from the server
this.receiveOffer = function (offer)
{
var offerDesc = new RTCSessionDescription(offer);
if (!self.pcClient)
{
console.log("Creating a new PeerConnection in the browser.");
self.pcClient = new RTCPeerConnection(self.cfg);
setupPeerConnection(self.pcClient);
// Put things here that happen post transceiver setup
self.pcClient.setRemoteDescription(offerDesc)
.then(() =>
{
setupTransceiversAsync(self.pcClient).finally(function ()
{
self.pcClient.createAnswer()
.then(answer => self.pcClient.setLocalDescription(answer))
.then(() =>
{
if (self.onWebRtcAnswer)
{
self.onWebRtcAnswer(self.pcClient.currentLocalDescription);
}
})
.then(() =>
{
let receivers = self.pcClient.getReceivers();
for (let receiver of receivers)
{
receiver.playoutDelayHint = 0;
}
})
.catch((error) => console.error("createAnswer() failed:", error));
});
});
}
};
//Called externaly when an answer is received from the server
this.receiveAnswer = answer =>
this.receiveAnswer = function (answer)
{
// console.log(`Received answer:\n${answer}`);
const answerDesc = new RTCSessionDescription(answer);
var answerDesc = new RTCSessionDescription(answer);
self.pcClient.setRemoteDescription(answerDesc);
let receivers = self.pcClient.getReceivers();
for (let receiver of receivers)
{
receiver.playoutDelayHint = 0;
}
};
this.close = () =>
this.close = function ()
{
if (self.pcClient)
{
// console.log("Closing existing peerClient");
console.log("Closing existing peerClient");
self.pcClient.close();
self.pcClient = null;
}
@ -311,7 +718,7 @@ export class WebRtcPlayer
};
//Sends data across the datachannel
this.send = data =>
this.send = function (data)
{
if (self.dcClient && self.dcClient.readyState == 'open')
{
@ -320,7 +727,7 @@ export class WebRtcPlayer
}
};
this.getStats = onStats =>
this.getStats = function (onStats)
{
if (self.pcClient && onStats)
{
@ -331,7 +738,7 @@ export class WebRtcPlayer
}
};
this.aggregateStats = checkInterval =>
this.aggregateStats = function (checkInterval)
{
let calcAggregatedStats = generateAggregatedStatsFunction();
let printAggregatedStats = () => { self.getStats(calcAggregatedStats); };

@ -30,6 +30,8 @@ export class WebRtcRenderer
if (isLink)
{
//不能发送这个 否则无法连接(奇怪的问题)
// this.ws.Send(JSON.stringify({ type: "playerConnected", dataChannel: true, sfu: false }));
}
else if (this.webRtcPlayerObj)
{
@ -70,21 +72,18 @@ export class WebRtcRenderer
if (!this.ws.IsLink || this.webRtcPlayerObj)
return;
this.webRtcPlayerObj = new WebRtcPlayer({ peerConnectionOptions: {} });
this.webRtcPlayerObj = new WebRtcPlayer();
//offer创建成功
this.webRtcPlayerObj.onWebRtcOffer = (offer: { sdp: string; }) =>
{
if (this.ws && this.ws.IsLink)
{
offer.sdp = offer.sdp.replace("a=extmap-allow-mixed\r\n", "");
let offerStr = JSON.stringify(offer);
this.ws.Send(offerStr);
}
};
this.webRtcPlayerObj.onWebRtcCandidate = (candidate) =>
{
// console.log("onWebRtcCandidate");
@ -108,15 +107,140 @@ export class WebRtcRenderer
{
if (this.ws.IsLink)
{
// console.log('WebRTC connected, waiting for video');
// showTextOverlay('WebRTC connected, waiting for video');
// requestQualityControl();//代码如下 请求质量控制 (我们必须请求质量控制权,否则第二次连接时画面丢失)
if (!this._qualityController)
this.sendInputData(new Uint8Array([MessageType.RequestQualityControl]).buffer);
}
};
//收到数据
this.webRtcPlayerObj.onDataChannelMessage = (data) =>
{
// console.log("onDataChannelMessage");
return;//如果不在这里return 会出现第二次连接失败的问题
let view = new Uint8Array(data);
if (view[0] === ToClientMessageType.QualityControlOwnership)
{
let ownership = view[1] === 0 ? false : true;
console.log("Received quality controller message, will control quality: " + ownership);
this._qualityController = ownership;
// If we own the quality control, we can't relinquish it. We only lose
// quality control when another peer asks for it
// 如果我们拥有质量控制,我们不能放弃它。 只有当另一个同行要求时,我们才会失去质量控制
// if (qualityControlOwnershipCheckBox !== null)
// {
// qualityControlOwnershipCheckBox.disabled = ownership;
// qualityControlOwnershipCheckBox.checked = ownership;
// }
} else if (view[0] === ToClientMessageType.Response)
{
let response = new TextDecoder("utf-16").decode(data.slice(1));
for (let listener of responseEventListeners.values())
{
listener(response);
}
} else if (view[0] === ToClientMessageType.Command)
{
let commandAsString = new TextDecoder("utf-16").decode(data.slice(1));
console.log(commandAsString);
let command = JSON.parse(commandAsString);
if (command.command === 'onScreenKeyboard')
{
// showOnScreenKeyboard(command);
}
} else if (view[0] === ToClientMessageType.FreezeFrame)
{
processFreezeFrameMessage(view);
} else if (view[0] === ToClientMessageType.UnfreezeFrame)
{
// invalidateFreezeFrameOverlay(); //使冻结帧叠加无效
} else if (view[0] === ToClientMessageType.VideoEncoderAvgQP)
{
// VideoEncoderQP = new TextDecoder("utf-16").decode(data.slice(1));
//console.log(`received VideoEncoderAvgQP ${VideoEncoderQP}`);
} else if (view[0] == ToClientMessageType.LatencyTest)
{
let latencyTimingsAsString = new TextDecoder("utf-16").decode(data.slice(1));
console.log("Got latency timings from UE.");
console.log(latencyTimingsAsString);
let latencyTimingsFromUE = JSON.parse(latencyTimingsAsString);
if (this.webRtcPlayerObj)
{
this.webRtcPlayerObj.latencyTestTimings.SetUETimings(latencyTimingsFromUE);
}
} else if (view[0] == ToClientMessageType.InitialSettings)
{
let settingsString = new TextDecoder("utf-16").decode(data.slice(1));
let settingsJSON = JSON.parse(settingsString);
if (settingsJSON.PixelStreaming)
{
// let allowConsoleCommands = settingsJSON.PixelStreaming.AllowPixelStreamingCommands;
// if (allowConsoleCommands === false)
// {
// document.getElementById('encoder-min-qp-text').disabled = true;
// document.getElementById('encoder-max-qp-text').disabled = true;
// document.getElementById("webrtc-fps-text").disabled = true;
// document.getElementById("webrtc-min-bitrate-text").disabled = true;
// document.getElementById("webrtc-max-bitrate-text").disabled = true;
// document.getElementById("show-fps-button").disabled = true;
// document.getElementById("encoder-params-submit").disabled = true;
// document.getElementById("webrtc-params-submit").disabled = true;
// if (!document.getElementById("warning-elem-webrtc"))
// {
// let warningElem1 = document.createElement("p");
// warningElem1.innerText = "(Disabled by -AllowPixelStreamingCommands=false)";
// warningElem1.classList = "subtitle-text";
// warningElem1.id = "warning-elem-webrtc";
// document.getElementById("webRTCSettingsHeader").appendChild(warningElem1);
// }
// if (!document.getElementById("warning-elem-encoder"))
// {
// let warningElem2 = document.createElement("p");
// warningElem2.innerText = "(Disabled by -AllowPixelStreamingCommands=false)";
// warningElem2.classList = "subtitle-text";
// warningElem2.id = "warning-elem-encoder";
// document.getElementById("encoderSettingsHeader").appendChild(warningElem2);
// }
// console.warn("-AllowPixelStreamingCommands=false, sending console commands from browser to UE is disabled, including toggling FPS and changing encoder settings from the browser.");
// }
// let disableLatencyTest = settingsJSON.PixelStreaming.DisableLatencyTest;
// if (disableLatencyTest)
// {
// document.getElementById("test-latency-button").disabled = true;
// document.getElementById("test-latency-button").title = "Disabled by -PixelStreamingDisableLatencyTester=true";
// console.warn("-PixelStreamingDisableLatencyTester=true, requesting latency report from the the browser to UE is disabled.");
// }
}
if (settingsJSON.Encoder)
{
// document.getElementById('encoder-min-qp-text').value = settingsJSON.Encoder.MinQP;
// document.getElementById('encoder-max-qp-text').value = settingsJSON.Encoder.MaxQP;
}
if (settingsJSON.WebRTC)
{
// document.getElementById("webrtc-fps-text").value = settingsJSON.WebRTC.FPS;
// // reminder bitrates are sent in bps but displayed in kbps
// document.getElementById("webrtc-min-bitrate-text").value = settingsJSON.WebRTC.MinBitrate / 1000;
// document.getElementById("webrtc-max-bitrate-text").value = settingsJSON.WebRTC.MaxBitrate / 1000;
}
} else if (view[0] == ToClientMessageType.FileExtension)
{
// processFileExtension(view);
} else if (view[0] == ToClientMessageType.FileMimeType)
{
// processFileMimeType(view);
} else if (view[0] == ToClientMessageType.FileContents)
{
// processFileContents(view);
} else
{
console.error(`unrecognized data received, packet ID ${view[0]}`);
}
};
this.webRtcPlayerObj.video.style.position = "absolute";
@ -131,6 +255,7 @@ export class WebRtcRenderer
this.webRtcPlayerObj.video.play();
app.SendCameraPosToRenderer();
let d = { type: "ResSize", w: app.Viewer.Width, h: app.Viewer.Height };
AppendUserInfo(d);
app.WebSocket.Send(JSON.stringify(d));
@ -148,12 +273,12 @@ export class WebRtcRenderer
{
this.rm1();
this.rm1 = undefined;
};
}
if (this.rm2)
{
this.rm2();
this.rm2 = undefined;
};
}
if (this.webRtcPlayerObj)
{
@ -165,4 +290,156 @@ export class WebRtcRenderer
this.InitWebSocket();
});
}
private _qualityController: boolean = false;
private sendInputData(data: ArrayBufferLike)
{
if (this.webRtcPlayerObj)
{
// resetAfkWarningTimer();
this.webRtcPlayerObj.send(data);
}
}
}
// Must be kept in sync with PixelStreamingProtocol::EToUE4Msg C++ enum.
// 必须与 PixelStreamingProtocol::EToUE4Msg C++ 枚举保持同步。
const MessageType = {
/**********************************************************************/
/*
* Control Messages. Range = 0..49.
*/
IFrameRequest: 0,
RequestQualityControl: 1,
FpsRequest: 2,
AverageBitrateRequest: 3,
StartStreaming: 4,
StopStreaming: 5,
LatencyTest: 6,
RequestInitialSettings: 7,
/**********************************************************************/
/*
* Input Messages. Range = 50..89.
*/
// Generic Input Messages. Range = 50..59.
UIInteraction: 50,
Command: 51,
// Keyboard Input Message. Range = 60..69.
KeyDown: 60,
KeyUp: 61,
KeyPress: 62,
// Mouse Input Messages. Range = 70..79.
MouseEnter: 70,
MouseLeave: 71,
MouseDown: 72,
MouseUp: 73,
MouseMove: 74,
MouseWheel: 75,
// Touch Input Messages. Range = 80..89.
TouchStart: 80,
TouchEnd: 81,
TouchMove: 82,
// Gamepad Input Messages. Range = 90..99
GamepadButtonPressed: 90,
GamepadButtonReleased: 91,
GamepadAnalog: 92
/**************************************************************************/
};
// Must be kept in sync with PixelStreamingProtocol::EToPlayerMsg C++ enum.
const ToClientMessageType = {
QualityControlOwnership: 0,
Response: 1,
Command: 2,
FreezeFrame: 3,
UnfreezeFrame: 4,
VideoEncoderAvgQP: 5,
LatencyTest: 6,
InitialSettings: 7,
FileExtension: 8,
FileMimeType: 9,
FileContents: 10
};
let responseEventListeners = new Map();
function addResponseEventListener(name, listener)
{
responseEventListeners.set(name, listener);
}
function removeResponseEventListener(name)
{
responseEventListeners.delete(name);
}
// A freeze frame is a still JPEG image shown instead of the video.
let freezeFrame = {
receiving: false,
size: 0,
jpeg: undefined,
height: 0,
width: 0,
valid: false
};
function processFreezeFrameMessage(view)
{
// Reset freeze frame if we got a freeze frame message and we are not "receiving" yet.
if (!freezeFrame.receiving)
{
freezeFrame.receiving = true;
freezeFrame.valid = false;
freezeFrame.size = 0;
freezeFrame.jpeg = undefined;
}
// Extract total size of freeze frame (across all chunks)
freezeFrame.size = (new DataView(view.slice(1, 5).buffer)).getInt32(0, true);
// Get the jpeg part of the payload
let jpegBytes = view.slice(1 + 4);
// Append to existing jpeg that holds the freeze frame
if (freezeFrame.jpeg)
{
let jpeg = new Uint8Array(freezeFrame.jpeg.length + jpegBytes.length);
jpeg.set(freezeFrame.jpeg, 0);
jpeg.set(jpegBytes, freezeFrame.jpeg.length);
freezeFrame.jpeg = jpeg;
}
// No existing freeze frame jpeg, make one
else
{
freezeFrame.jpeg = jpegBytes;
freezeFrame.receiving = true;
console.log(`received first chunk of freeze frame: ${freezeFrame.jpeg.length}/${freezeFrame.size}`);
}
// Uncomment for debug
//console.log(`Received freeze frame chunk: ${freezeFrame.jpeg.length}/${freezeFrame.size}`);
// Finished receiving freeze frame, we can show it now
if (freezeFrame.jpeg.length === freezeFrame.size)
{
freezeFrame.receiving = false;
freezeFrame.valid = true;
console.log(`received complete freeze frame ${freezeFrame.size}`);
// showFreezeFrame(); //弹出
}
// We received more data than the freeze frame payload message indicate (this is an error)
else if (freezeFrame.jpeg.length > freezeFrame.size)
{
console.error(`received bigger freeze frame than advertised: ${freezeFrame.jpeg.length}/${freezeFrame.size}`);
freezeFrame.jpeg = undefined;
freezeFrame.receiving = false;
}
}

Loading…
Cancel
Save