!2227 功能:同步渲染器版本

pull/2192/MERGE
ChenX 1 year ago
parent 0659dba768
commit c91c48a755

@ -99,6 +99,7 @@
"@blueprintjs/core": "^3.54.0",
"@blueprintjs/popover2": "^0.12.9",
"@blueprintjs/table": "^3.10.0",
"@epicgames-ps/lib-pixelstreamingfrontend-ue5.2": "^0.5.1",
"@jscad/modeling": "^2.11.0",
"blueimp-md5": "^2.19.0",
"detect-browser": "^5.3.0",

@ -6,7 +6,7 @@ export class CMD_Renderer implements Command
{
async exec()
{
if (app.WebRtcRenderer.webRtcPlayerObj)
if (app.WebRtcRenderer.pixelStreaming)
app.WebRtcRenderer.EndRenderer();
else if (app.WebRtcRenderer)
{

@ -11,7 +11,7 @@ export async function ParseMaterialImages(url: string): Promise<Map<string, HTML
{
let name = ParseUrlName(url);
let importUrl = GenerateCdnUrl(`/Paks/paks_cooked/ue_resource/Content${encodeURI(url)}/Mesh_${encodeURI(name)}.uimp`);//第一次引用的url
let importUrl = GenerateCdnUrl(`/Paks/paks_cooked2/ue_resource/Content${encodeURI(url)}/Mesh_${encodeURI(name)}.uimp`);//第一次引用的url
let res = await fetch(importUrl);
let map = new Map<string, HTMLImageElement>();
@ -32,7 +32,7 @@ export function ParseFBXUrl(url: string): string
export function ParseBoxUrl(url: string): string
{
let name = ParseUrlName(url);
return GenerateCdnUrl(`/Paks/paks_cooked/ue_resource/Content${url}/Mesh_${name}.ubox`, true);
return GenerateCdnUrl(`/Paks/paks_cooked2/ue_resource/Content${url}/Mesh_${name}.ubox`, true);
}
export async function ConverMaterial2(m: MeshPhongMaterial, url: string)
@ -61,7 +61,7 @@ export async function ConverMaterial2(m: MeshPhongMaterial, url: string)
try
{
let dataString = await (await fetch(GenerateCdnUrl(`/Paks/paks_cooked/ue_resource/Content${encodeURI(url)}/${name}.json`))).text();
let dataString = await (await fetch(GenerateCdnUrl(`/Paks/paks_cooked2/ue_resource/Content${encodeURI(url)}/${name}.json`))).text();
let data = JSON.parse(dataString);
let mtl = new MeshPhysicalMaterial2({
@ -218,7 +218,7 @@ async function ParseMaterialImage(importData: string, forceName: string, map: Ma
async function GetImportData(url: string): Promise<string>
{
let allurl = GenerateCdnUrl(`/Paks/paks_cooked/ue_resource/Content${encodeURI(url)}.uimp`);//?
let allurl = GenerateCdnUrl(`/Paks/paks_cooked2/ue_resource/Content${encodeURI(url)}.uimp`);//?
let res = await fetch(allurl);
if (res.status !== 200) return "";
let text: string = await res.text();

@ -3,8 +3,8 @@ import hotkeys from 'hotkeys-js-ext';
import { MathUtils, Matrix4, Object3D, PerspectiveCamera, Vector3 } from 'three';
import { begin, end } from 'xaop';
import { HardwareCuttingReactor } from '../Add-on/BoardCutting/HardwareCuttingReactor';
import { DwgDxfImport } from '../Add-on/DXFLoad';
import { DrillingReactor } from '../Add-on/DrawDrilling/DrillingReactor';
import { DwgDxfImport } from '../Add-on/DXFLoad';
import { AppendUserInfo } from '../Add-on/ExportData';
import { ImportJiajuFile } from '../Add-on/JiaJu/Import/JiaJuImport';
import { ImportKJLData } from '../Add-on/KJL/Import/KJLImport';
@ -23,7 +23,7 @@ import { FontLoader } from '../DatabaseServices/Text/FontLoader';
import { AutoSaveServer } from '../Editor/AutoSave';
import { BoardMoveTool } from '../Editor/BoardMoveTool';
import { CameraControls } from '../Editor/CameraControls';
import { CommandWrap, commandMachine } from '../Editor/CommandMachine';
import { commandMachine, CommandWrap } from '../Editor/CommandMachine';
import { CommandState } from '../Editor/CommandState';
import { Editor } from '../Editor/Editor';
import { Gesture } from '../Editor/Gesture';
@ -120,29 +120,28 @@ export class ApplicationService
this.SendCameraPosToRenderer();
});
setTimeout(() =>
//渲染器
end(this.WebSocket, this.WebSocket.OnMessageEvent, (msg) =>
{
this.WebSocket.Connect();
//渲染器
end(this.WebSocket, this.WebSocket.OnMessageEvent, (msg) =>
if (msg.data === "render")
{
if (msg.data === "render")
{
this.WebRtcRenderer.canUse = true;
}
});
this.WebRtcRenderer.canUse = true;
}
});
end(this.WebSocket, this.WebSocket.OnLinkEvent, (isLink) =>
{
if (!isLink)
this.WebRtcRenderer.canUse = false;
});
end(this.WebSocket, this.WebSocket.OnLinkEvent, (isLink) =>
{
if (!isLink)
this.WebRtcRenderer.canUse = false;
});
setTimeout(() =>
{
this.WebSocket.Connect();
}, 5000);
end(this.Viewer, this.Viewer.OnSize, () =>
{
if (this.WebRtcRenderer.webRtcPlayerObj)
if (this.WebRtcRenderer.pixelStreaming)
{
let d = { type: "ResSize", w: this.Viewer.Width, h: this.Viewer.Height };
AppendUserInfo(d);

@ -1,796 +0,0 @@
// Copyright Epic Games, Inc. All Rights Reserved.
export function WebRtcPlayer(parOptions?)
{
parOptions = parOptions || {
"type": "config",
"peerConnectionOptions": {
"iceServers": [
{
"urls": [
"stun:stun.l.google.com:19302"
]
}
],
"sdpSemantics": "unified-plan",
"offerExtmapAllowMixed": false,
"bundlePolicy": "balanced"
}
};;
var self = this;
const urlParams = new URLSearchParams(window.location.search);
//**********************
//Config setup
//**********************
this.cfg = typeof parOptions.peerConnectionOptions !== 'undefined' ? parOptions.peerConnectionOptions : {};
this.cfg.sdpSemantics = 'unified-plan';
// If this is true in Chrome 89+ SDP is sent that is incompatible with UE Pixel Streaming 4.26 and below.
// However 4.27 Pixel Streaming does not need this set to false as it supports `offerExtmapAllowMixed`.
// tdlr; uncomment this line for older versions of Pixel Streaming that need Chrome 89+.
this.cfg.offerExtmapAllowMixed = false;
this.forceTURN = urlParams.has('ForceTURN');
if (this.forceTURN)
{
console.log("Forcing TURN usage by setting ICE Transport Policy in peer connection config.");
this.cfg.iceTransportPolicy = "relay";
}
this.cfg.bundlePolicy = "balanced";
this.forceMaxBundle = urlParams.has('ForceMaxBundle');
if (this.forceMaxBundle)
{
this.cfg.bundlePolicy = "max-bundle";
}
//**********************
//Variables
//**********************
this.pcClient = null;
this.dcClient = null;
this.tnClient = null;
this.sfu = false;
this.sdpConstraints = {
offerToReceiveAudio: 1, //Note: if you don't need audio you can get improved latency by turning this off.
offerToReceiveVideo: 1,
voiceActivityDetection: false
};
// See https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit for values (this is needed for Firefox to be consistent with Chrome.)
this.dataChannelOptions = { ordered: true };
// This is useful if the video/audio needs to autoplay (without user input) as browsers do not allow autoplay non-muted of sound sources without user interaction.
this.startVideoMuted = typeof parOptions.startVideoMuted !== 'undefined' ? parOptions.startVideoMuted : false;
this.autoPlayAudio = typeof parOptions.autoPlayAudio !== 'undefined' ? parOptions.autoPlayAudio : true;
// To force mono playback of WebRTC audio
this.forceMonoAudio = urlParams.has('ForceMonoAudio');
if (this.forceMonoAudio)
{
console.log("Will attempt to force mono audio by munging the sdp in the browser.");
}
// To enable mic in browser use SSL/localhost and have ?useMic in the query string.
this.useMic = urlParams.has('useMic');
if (!this.useMic)
{
console.log("Microphone access is not enabled. Pass ?useMic in the url to enable it.");
}
// When ?useMic check for SSL or localhost
let isLocalhostConnection = location.hostname === "localhost" || location.hostname === "127.0.0.1";
let isHttpsConnection = location.protocol === 'https:';
if (this.useMic && !isLocalhostConnection && !isHttpsConnection)
{
this.useMic = false;
console.error("Microphone access in the browser will not work if you are not on HTTPS or localhost. Disabling mic access.");
console.error("For testing you can enable HTTP microphone access Chrome by visiting chrome://flags/ and enabling 'unsafely-treat-insecure-origin-as-secure'");
}
// Prefer SFU or P2P connection
this.preferSFU = urlParams.has('preferSFU');
console.log(this.preferSFU ?
"The browser will signal it would prefer an SFU connection. Remove ?preferSFU from the url to signal for P2P usage." :
"The browser will signal for a P2P connection. Pass ?preferSFU in the url to signal for SFU usage.");
// Latency tester
this.latencyTestTimings =
{
TestStartTimeMs: null,
UEReceiptTimeMs: null,
UEEncodeMs: null,
UECaptureToSendMs: null,
UETransmissionTimeMs: null,
BrowserReceiptTimeMs: null,
FrameDisplayDeltaTimeMs: null,
Reset: function ()
{
this.TestStartTimeMs = null;
this.UEReceiptTimeMs = null;
this.UEEncodeMs = null,
this.UECaptureToSendMs = null,
this.UETransmissionTimeMs = null;
this.BrowserReceiptTimeMs = null;
this.FrameDisplayDeltaTimeMs = null;
},
SetUETimings: function (UETimings)
{
this.UEReceiptTimeMs = UETimings.ReceiptTimeMs;
this.UEEncodeMs = UETimings.EncodeMs,
this.UECaptureToSendMs = UETimings.CaptureToSendMs,
this.UETransmissionTimeMs = UETimings.TransmissionTimeMs;
this.BrowserReceiptTimeMs = Date.now();
this.OnAllLatencyTimingsReady(this);
},
SetFrameDisplayDeltaTime: function (DeltaTimeMs)
{
if (this.FrameDisplayDeltaTimeMs == null)
{
this.FrameDisplayDeltaTimeMs = Math.round(DeltaTimeMs);
this.OnAllLatencyTimingsReady(this);
}
},
OnAllLatencyTimingsReady: function (Timings) { }
};
//**********************
//Functions
//**********************
//Create Video element and expose that as a parameter
this.createWebRtcVideo = function ()
{
var video = document.createElement('video');
video.id = "streamingVideo";
video.playsInline = true;
video.disablePictureInPicture = true;
video.muted = self.startVideoMuted;;
video.addEventListener('loadedmetadata', function (e)
{
if (self.onVideoInitialised)
{
self.onVideoInitialised();
}
}, true);
video.addEventListener('pause', function (e)
{
video.play();
});
// Check if request video frame callback is supported
if ('requestVideoFrameCallback' in HTMLVideoElement.prototype)
{
// The API is supported!
const onVideoFrameReady = (now, metadata) =>
{
if (metadata.receiveTime && metadata.expectedDisplayTime)
{
const receiveToCompositeMs = metadata.presentationTime - metadata.receiveTime;
// self.aggregatedStats.receiveToCompositeMs = receiveToCompositeMs;
}
// Re-register the callback to be notified about the next frame.
video.requestVideoFrameCallback(onVideoFrameReady);
};
// Initially register the callback to be notified about the first frame.
video.requestVideoFrameCallback(onVideoFrameReady);
}
return video;
};
this.createWebRtcAudio = function ()
{
var audio = document.createElement('audio');
audio.id = 'streamingAudio';
return audio;
};
this.video = this.createWebRtcVideo();
this.audio = this.createWebRtcAudio();
this.availableVideoStreams = new Map();
const onsignalingstatechange = function (state)
{
console.info('Signaling state change. |', state.srcElement.signalingState, "|");
};
const oniceconnectionstatechange = function (state)
{
console.info('Browser ICE connection |', state.srcElement.iceConnectionState, '|');
};
const onicegatheringstatechange = function (state)
{
console.info('Browser ICE gathering |', state.srcElement.iceGatheringState, '|');
};
const handleOnTrack = function (e)
{
if (e.track)
{
console.log('Got track. | Kind=' + e.track.kind + ' | Id=' + e.track.id + ' | readyState=' + e.track.readyState + ' |');
}
if (e.track.kind == "audio")
{
handleOnAudioTrack(e.streams[0]);
return;
}
else if (e.track.kind == "video")
{
for (const s of e.streams)
{
if (!self.availableVideoStreams.has(s.id))
{
self.availableVideoStreams.set(s.id, s);
}
}
self.video.srcObject = e.streams[0];
// All tracks are added "muted" by WebRTC/browser and become unmuted when media is being sent
e.track.onunmute = () =>
{
self.video.srcObject = e.streams[0];
self.onNewVideoTrack(e.streams);
};
}
};
const handleOnAudioTrack = function (audioMediaStream)
{
// do nothing the video has the same media stream as the audio track we have here (they are linked)
if (self.video.srcObject == audioMediaStream)
{
return;
}
// video element has some other media stream that is not associated with this audio track
else if (self.video.srcObject && self.video.srcObject !== audioMediaStream)
{
self.audio.srcObject = audioMediaStream;
}
};
const onDataChannel = function (dataChannelEvent)
{
// This is the primary data channel code path when we are "receiving"
console.log("Data channel created for us by browser as we are a receiving peer.");
self.dcClient = dataChannelEvent.channel;
setupDataChannelCallbacks(self.dcClient);
};
const createDataChannel = function (pc, label, options)
{
// This is the primary data channel code path when we are "offering"
let datachannel = pc.createDataChannel(label, options);
console.log(`Created datachannel (${label})`);
setupDataChannelCallbacks(datachannel);
return datachannel;
};
const setupDataChannelCallbacks = function (datachannel)
{
try
{
// Inform browser we would like binary data as an ArrayBuffer (FF chooses Blob by default!)
datachannel.binaryType = "arraybuffer";
datachannel.addEventListener('open', e =>
{
console.log(`Data channel connected: ${datachannel.label}(${datachannel.id})`);
if (self.onDataChannelConnected)
{
self.onDataChannelConnected();
}
});
datachannel.addEventListener('close', e =>
{
console.log(`Data channel disconnected: ${datachannel.label}(${datachannel.id}`, e);
});
datachannel.addEventListener('message', e =>
{
if (self.onDataChannelMessage)
{
self.onDataChannelMessage(e.data);
}
});
datachannel.addEventListener('error', e =>
{
console.error(`Data channel error: ${datachannel.label}(${datachannel.id}`, e);
});
return datachannel;
} catch (e)
{
console.warn('Datachannel setup caused an exception: ', e);
return null;
}
};
const onicecandidate = function (e)
{
let candidate = e.candidate;
if (candidate && candidate.candidate)
{
console.log("%c[Browser ICE candidate]", "background: violet; color: black", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
self.onWebRtcCandidate(candidate);
}
};
const handleCreateOffer = function (pc)
{
pc.createOffer(self.sdpConstraints).then(function (offer)
{
// Munging is where we modifying the sdp string to set parameters that are not exposed to the browser's WebRTC API
mungeSDP(offer);
// Set our munged SDP on the local peer connection so it is "set" and will be send across
pc.setLocalDescription(offer);
if (self.onWebRtcOffer)
{
self.onWebRtcOffer(offer);
}
},
function () { console.warn("Couldn't create offer"); });
};
const mungeSDP = function (offer)
{
let audioSDP = '';
// set max bitrate to highest bitrate Opus supports
audioSDP += 'maxaveragebitrate=510000;';
if (self.useMic)
{
// set the max capture rate to 48khz (so we can send high quality audio from mic)
audioSDP += 'sprop-maxcapturerate=48000;';
}
// Force mono or stereo based on whether ?forceMono was passed or not
audioSDP += self.forceMonoAudio ? 'sprop-stereo=0;stereo=0;' : 'sprop-stereo=1;stereo=1;';
// enable in-band forward error correction for opus audio
audioSDP += 'useinbandfec=1';
// We use the line 'useinbandfec=1' (which Opus uses) to set our Opus specific audio parameters.
offer.sdp = offer.sdp.replace('useinbandfec=1', audioSDP);
};
const setupPeerConnection = function (pc)
{
//Setup peerConnection events
pc.onsignalingstatechange = onsignalingstatechange;
pc.oniceconnectionstatechange = oniceconnectionstatechange;
pc.onicegatheringstatechange = onicegatheringstatechange;
pc.ontrack = handleOnTrack;
pc.onicecandidate = onicecandidate;
pc.ondatachannel = onDataChannel;
};
const generateAggregatedStatsFunction = function ()
{
if (!self.aggregatedStats)
self.aggregatedStats = {};
return function (stats)
{
let newStat = {} as any;
// store each type of codec we can get stats on
newStat.codecs = {};
stats.forEach(stat =>
{
// Get the inbound-rtp for video
if (stat.type === 'inbound-rtp'
&& !stat.isRemote
&& (stat.mediaType === 'video' || stat.id.toLowerCase().includes('video')))
{
newStat.timestamp = stat.timestamp;
newStat.bytesReceived = stat.bytesReceived;
newStat.framesDecoded = stat.framesDecoded;
newStat.packetsLost = stat.packetsLost;
newStat.bytesReceivedStart = self.aggregatedStats && self.aggregatedStats.bytesReceivedStart ? self.aggregatedStats.bytesReceivedStart : stat.bytesReceived;
newStat.framesDecodedStart = self.aggregatedStats && self.aggregatedStats.framesDecodedStart ? self.aggregatedStats.framesDecodedStart : stat.framesDecoded;
newStat.timestampStart = self.aggregatedStats && self.aggregatedStats.timestampStart ? self.aggregatedStats.timestampStart : stat.timestamp;
if (self.aggregatedStats && self.aggregatedStats.timestamp)
{
// Get the mimetype of the video codec being used
if (stat.codecId && self.aggregatedStats.codecs && self.aggregatedStats.codecs.hasOwnProperty(stat.codecId))
{
newStat.videoCodec = self.aggregatedStats.codecs[stat.codecId];
}
if (self.aggregatedStats.bytesReceived)
{
// bitrate = bits received since last time / number of ms since last time
//This is automatically in kbits (where k=1000) since time is in ms and stat we want is in seconds (so a '* 1000' then a '/ 1000' would negate each other)
newStat.bitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceived) / (newStat.timestamp - self.aggregatedStats.timestamp);
newStat.bitrate = Math.floor(newStat.bitrate);
newStat.lowBitrate = self.aggregatedStats.lowBitrate && self.aggregatedStats.lowBitrate < newStat.bitrate ? self.aggregatedStats.lowBitrate : newStat.bitrate;
newStat.highBitrate = self.aggregatedStats.highBitrate && self.aggregatedStats.highBitrate > newStat.bitrate ? self.aggregatedStats.highBitrate : newStat.bitrate;
}
if (self.aggregatedStats.bytesReceivedStart)
{
newStat.avgBitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceivedStart) / (newStat.timestamp - self.aggregatedStats.timestampStart);
newStat.avgBitrate = Math.floor(newStat.avgBitrate);
}
if (self.aggregatedStats.framesDecoded)
{
// framerate = frames decoded since last time / number of seconds since last time
newStat.framerate = (newStat.framesDecoded - self.aggregatedStats.framesDecoded) / ((newStat.timestamp - self.aggregatedStats.timestamp) / 1000);
newStat.framerate = Math.floor(newStat.framerate);
newStat.lowFramerate = self.aggregatedStats.lowFramerate && self.aggregatedStats.lowFramerate < newStat.framerate ? self.aggregatedStats.lowFramerate : newStat.framerate;
newStat.highFramerate = self.aggregatedStats.highFramerate && self.aggregatedStats.highFramerate > newStat.framerate ? self.aggregatedStats.highFramerate : newStat.framerate;
}
if (self.aggregatedStats.framesDecodedStart)
{
newStat.avgframerate = (newStat.framesDecoded - self.aggregatedStats.framesDecodedStart) / ((newStat.timestamp - self.aggregatedStats.timestampStart) / 1000);
newStat.avgframerate = Math.floor(newStat.avgframerate);
}
}
}
// Get inbound-rtp for audio
if (stat.type === 'inbound-rtp'
&& !stat.isRemote
&& (stat.mediaType === 'audio' || stat.id.toLowerCase().includes('audio')))
{
// Get audio bytes received
if (stat.bytesReceived)
{
newStat.audioBytesReceived = stat.bytesReceived;
}
// As we loop back through we may wish to compute some stats based on a delta of the previous time we recorded the stat
if (self.aggregatedStats && self.aggregatedStats.timestamp)
{
// Get the mimetype of the audio codec being used
if (stat.codecId && self.aggregatedStats.codecs && self.aggregatedStats.codecs.hasOwnProperty(stat.codecId))
{
newStat.audioCodec = self.aggregatedStats.codecs[stat.codecId];
}
// Determine audio bitrate delta over the time period
if (self.aggregatedStats.audioBytesReceived)
{
newStat.audioBitrate = 8 * (newStat.audioBytesReceived - self.aggregatedStats.audioBytesReceived) / (stat.timestamp - self.aggregatedStats.timestamp);
newStat.audioBitrate = Math.floor(newStat.audioBitrate);
}
}
}
//Read video track stats
if (stat.type === 'track' && (stat.trackIdentifier === 'video_label' || stat.kind === 'video'))
{
newStat.framesDropped = stat.framesDropped;
newStat.framesReceived = stat.framesReceived;
newStat.framesDroppedPercentage = stat.framesDropped / stat.framesReceived * 100;
newStat.frameHeight = stat.frameHeight;
newStat.frameWidth = stat.frameWidth;
newStat.frameHeightStart = self.aggregatedStats && self.aggregatedStats.frameHeightStart ? self.aggregatedStats.frameHeightStart : stat.frameHeight;
newStat.frameWidthStart = self.aggregatedStats && self.aggregatedStats.frameWidthStart ? self.aggregatedStats.frameWidthStart : stat.frameWidth;
}
if (stat.type === 'candidate-pair' && stat.hasOwnProperty('currentRoundTripTime') && stat.currentRoundTripTime != 0)
{
newStat.currentRoundTripTime = stat.currentRoundTripTime;
}
// Store mimetype of each codec
if (newStat.hasOwnProperty('codecs') && stat.type === 'codec' && stat.mimeType && stat.id)
{
const codecId = stat.id;
const codecType = stat.mimeType.replace("video/", "").replace("audio/", "");
newStat.codecs[codecId] = codecType;
}
});
if (self.aggregatedStats.receiveToCompositeMs)
{
newStat.receiveToCompositeMs = self.aggregatedStats.receiveToCompositeMs;
self.latencyTestTimings.SetFrameDisplayDeltaTime(self.aggregatedStats.receiveToCompositeMs);
}
self.aggregatedStats = newStat;
if (self.onAggregatedStats)
self.onAggregatedStats(newStat);
};
};
const setupTransceiversAsync = async function (pc)
{
let hasTransceivers = pc.getTransceivers().length > 0;
// Setup a transceiver for getting UE video
pc.addTransceiver("video", { direction: "recvonly" });
// Setup a transceiver for sending mic audio to UE and receiving audio from UE
if (!self.useMic)
{
pc.addTransceiver("audio", { direction: "recvonly" });
}
else
{
let audioSendOptions = self.useMic ?
{
autoGainControl: false,
channelCount: 1,
echoCancellation: false,
latency: 0,
noiseSuppression: false,
sampleRate: 48000,
sampleSize: 16,
volume: 1.0
} : false;
// Note using mic on android chrome requires SSL or chrome://flags/ "unsafely-treat-insecure-origin-as-secure"
const stream = await navigator.mediaDevices.getUserMedia({ video: false, audio: audioSendOptions });
if (stream)
{
if (hasTransceivers)
{
for (let transceiver of pc.getTransceivers())
{
if (transceiver && transceiver.receiver && transceiver.receiver.track && transceiver.receiver.track.kind === "audio")
{
for (const track of stream.getTracks())
{
if (track.kind && track.kind == "audio")
{
transceiver.sender.replaceTrack(track);
transceiver.direction = "sendrecv";
}
}
}
}
}
else
{
for (const track of stream.getTracks())
{
if (track.kind && track.kind == "audio")
{
pc.addTransceiver(track, { direction: "sendrecv" });
}
}
}
}
else
{
pc.addTransceiver("audio", { direction: "recvonly" });
}
}
};
//**********************
//Public functions
//**********************
this.setVideoEnabled = function (enabled)
{
self.video.srcObject.getTracks().forEach(track => track.enabled = enabled);
};
this.startLatencyTest = function (onTestStarted)
{
// Can't start latency test without a video element
if (!self.video)
{
return;
}
self.latencyTestTimings.Reset();
self.latencyTestTimings.TestStartTimeMs = Date.now();
onTestStarted(self.latencyTestTimings.TestStartTimeMs);
};
//This is called when revceiving new ice candidates individually instead of part of the offer
this.handleCandidateFromServer = function (iceCandidate)
{
let candidate = new RTCIceCandidate(iceCandidate);
console.log("%c[Unreal ICE candidate]", "background: pink; color: black", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
// if forcing TURN, reject any candidates not relay
if (self.forceTURN)
{
// check if no relay address is found, if so, we are assuming it means no TURN server
if (candidate.candidate.indexOf("relay") < 0)
{
console.warn("Dropping candidate because it was not TURN relay.", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
return;
}
}
self.pcClient.addIceCandidate(candidate).catch(function (e)
{
console.error("Failed to add ICE candidate", e);
});
};
//Called externaly to create an offer for the server
this.createOffer = function ()
{
if (self.pcClient)
{
console.log("Closing existing PeerConnection");
self.pcClient.close();
self.pcClient = null;
}
self.pcClient = new RTCPeerConnection(self.cfg);
setupPeerConnection(self.pcClient);
setupTransceiversAsync(self.pcClient).finally(function ()
{
self.dcClient = createDataChannel(self.pcClient, 'cirrus', self.dataChannelOptions);
handleCreateOffer(self.pcClient);
});
};
//Called externaly when an offer is received from the server
this.receiveOffer = function (offer)
{
if (offer.sfu)
{
this.sfu = true;
delete offer.sfu;
}
if (!self.pcClient)
{
console.log("Creating a new PeerConnection in the browser.");
self.pcClient = new RTCPeerConnection(self.cfg);
setupPeerConnection(self.pcClient);
// Put things here that happen post transceiver setup
self.pcClient.setRemoteDescription(offer)
.then(() =>
{
setupTransceiversAsync(self.pcClient).finally(function ()
{
self.pcClient.createAnswer()
.then(answer =>
{
mungeSDP(answer);
return self.pcClient.setLocalDescription(answer);
})
.then(() =>
{
if (self.onWebRtcAnswer)
{
self.onWebRtcAnswer(self.pcClient.currentLocalDescription);
}
})
.then(() =>
{
let receivers = self.pcClient.getReceivers();
for (let receiver of receivers)
{
receiver.playoutDelayHint = 0;
}
})
.catch((error) => console.error("createAnswer() failed:", error));
});
});
}
};
//Called externaly when an answer is received from the server
this.receiveAnswer = function (answer)
{
self.pcClient.setRemoteDescription(answer);
};
this.receiveSFUPeerDataChannelRequest = function (channelData)
{
const sendOptions = {
ordered: true,
negotiated: true,
id: channelData.sendStreamId
};
const unidirectional = channelData.sendStreamId != channelData.recvStreamId;
const sendDataChannel = self.pcClient.createDataChannel(unidirectional ? 'send-datachannel' : 'datachannel', sendOptions);
setupDataChannelCallbacks(sendDataChannel);
if (unidirectional)
{
const recvOptions = {
ordered: true,
negotiated: true,
id: channelData.recvStreamId
};
const recvDataChannel = self.pcClient.createDataChannel('recv-datachannel', recvOptions);
// when recv data channel is "open" we want to let SFU know so it can tell streamer
recvDataChannel.addEventListener('open', e =>
{
if (self.onSFURecvDataChannelReady)
{
self.onSFURecvDataChannelReady();
}
});
setupDataChannelCallbacks(recvDataChannel);
}
this.dcClient = sendDataChannel;
};
this.close = function ()
{
if (self.pcClient)
{
console.log("Closing existing peerClient");
self.pcClient.close();
self.pcClient = null;
}
if (self.aggregateStatsIntervalId)
{
clearInterval(self.aggregateStatsIntervalId);
}
};
//Sends data across the datachannel
this.send = function (data)
{
if (self.dcClient && self.dcClient.readyState == 'open')
{
//console.log('Sending data on dataconnection', self.dcClient)
self.dcClient.send(data);
}
};
this.getStats = function (onStats)
{
if (self.pcClient && onStats)
{
self.pcClient.getStats(null).then((stats) =>
{
onStats(stats);
});
}
};
this.aggregateStats = function (checkInterval)
{
let calcAggregatedStats = generateAggregatedStatsFunction();
let printAggregatedStats = () => { self.getStats(calcAggregatedStats); };
self.aggregateStatsIntervalId = setInterval(printAggregatedStats, checkInterval);
};
}

@ -1,15 +1,14 @@
import { Config, Logger, PixelStreaming } from '@epicgames-ps/lib-pixelstreamingfrontend-ue5.2';
import { observable } from "mobx";
import { AppendUserInfo } from "../Add-on/ExportData";
import { app } from "../ApplicationServices/Application";
import { Sleep } from "../Common/Sleep";
import { WebRtcPlayer } from "./WebRtcPlayer";
//webrtc渲染器连接工具
export class WebRtcRenderer
{
private ws: WebSocket;
webRtcPlayerObj: any;
pixelStreaming: PixelStreaming;
@observable canUse = false;
constructor()
{
@ -17,98 +16,53 @@ export class WebRtcRenderer
private async Connect()
{
return new Promise((res, rej) =>
{
let ws = new WebSocket("ws://localhost:13421");
this.ws = ws;
//@ts-ignore
ws.attemptStreamReconnection = true;
//@ts-ignore
ws.onmessagebinary = function (event)
{
if (!event || !event.data) { return; }
event.data.text().then(function (messageString)
{
// send the new stringified event back into `onmessage`
//@ts-ignore
ws.onmessage({ data: messageString });
}).catch(function (error)
{
console.error(`Failed to parse binary blob from websocket, reason: ${error}`);
});
};
ws.onmessage = (event) =>
{
// Check if websocket message is binary, if so, stringify it.
if (event.data && event.data instanceof Blob)
{
//@ts-ignore
ws.onmessagebinary(event);
return;
}
Logger.Log = () => { };
Logger.Error = () => { };
Logger.Info = () => { };
Logger.SetLoggerVerbosity(-1);
const config = new Config({
initialSettings: {
AutoPlayVideo: true,
AutoConnect: true,
ss: 'ws://localhost:13421',
StartVideoMuted: true,
HoveringMouse: false
}
});
let msg = JSON.parse(event.data);
if (msg.type === 'config')
{
console.log("%c[Inbound SS (config)]", "background: lightblue; color: black", msg);
this.webRtcPlayerObj = setupWebRtcPlayer(msg, ws);
res(true);
}
else if (msg.type === 'playerCount')
{
console.log("%c[Inbound SS (playerCount)]", "background: lightblue; color: black", msg);
}
else if (msg.type === 'offer')
{
console.log("%c[Inbound SS (offer)]", "background: lightblue; color: black", msg);
// if (!UrlParamsCheck('offerToReceive'))
{
// onWebRtcOffer(msg);
if (this.webRtcPlayerObj)
this.webRtcPlayerObj.receiveOffer(msg);
}
}
else if (msg.type === 'answer')
{
console.log("%c[Inbound SS (answer)]", "background: lightblue; color: black", msg);
this.webRtcPlayerObj.receiveAnswer(msg);
}
else if (msg.type === 'iceCandidate')
{
// onWebRtcIce(msg.candidate);
if (this.webRtcPlayerObj)
this.webRtcPlayerObj.handleCandidateFromServer(msg.candidate);
}
else if (msg.type === 'warning' && msg.warning)
{
console.warn(msg.warning);
}
else if (msg.type === 'peerDataChannels')
{
// onWebRtcSFUPeerDatachannels(msg);
this.webRtcPlayerObj.receiveSFUPeerDataChannelRequest(msg);
}
else
{
console.error("Invalid SS message type", msg.type);
}
};
let pixContainer = document.getElementById("PixContainer");
if (!pixContainer)
{
let pixParent = document.createElement("div");
pixParent.id = "pixParent";
pixParent.style.position = "absolute";
pixParent.style.left = "0px";
pixParent.style.top = "0px";
pixParent.style.width = "100%";
pixParent.style.height = "100%";
pixParent.style.pointerEvents = "none";
app.Viewer.canvasContainer.insertBefore(pixParent, app.Viewer.PreViewer.Renderer.domElement);
pixContainer = document.createElement("div");
pixContainer.id = "PixContainer";
pixParent.append(pixContainer);
}
ws.onerror = function (event)
{
console.log(`WS error: ${JSON.stringify(event)}`);
};
// Create a PixelStreaming instance and attach the video element to an existing parent divs
const pixelStreaming = new PixelStreaming(config, { videoElementParent: pixContainer });
this.pixelStreaming = pixelStreaming;
document.getElementById("streamingVideo").style.pointerEvents = "none";
ws.onclose = (event) =>
{
res(false);
this.ws = undefined;
this.EndRenderer();
};
pixelStreaming.addEventListener("playStreamRejected", () =>
{
console.log("ok");
});
pixelStreaming.addEventListener("webRtcDisconnected", this.EndRenderer);
pixelStreaming.addEventListener("dataChannelError", this.EndRenderer);
pixelStreaming.addEventListener("dataChannelClose", this.EndRenderer);
}
@ -120,183 +74,32 @@ export class WebRtcRenderer
let startData = { type: "StartStream" };
AppendUserInfo(startData);
app.WebSocket.Send(JSON.stringify(startData));
await Sleep(50);
await this.Connect();
if (!this.webRtcPlayerObj) return;
this.webRtcPlayerObj.video.style.position = "absolute";
this.webRtcPlayerObj.video.style.pointerEvents = "none";
this.webRtcPlayerObj.video.style.width = "100%";
this.webRtcPlayerObj.video.style.height = "100%";
this.webRtcPlayerObj.video.style.left = "0px";
this.webRtcPlayerObj.video.style.top = "0px";
app.Viewer.canvasContainer.insertBefore(this.webRtcPlayerObj.video, app.Viewer.PreViewer.Renderer.domElement);
await new Promise((res, rej) =>
{
this.webRtcPlayerObj.onVideoInitialised = () =>
{
res(true);
};
});
playStream(this.webRtcPlayerObj);
app.SendCameraPosToRenderer();
let d = { type: "ResSize", w: app.Viewer.Width, h: app.Viewer.Height };
AppendUserInfo(d);
app.WebSocket.Send(JSON.stringify(d));
await Sleep(50);
await this.Connect();
app.Viewer.DisableRenderer = true;
}
//就像closeStream
EndRenderer()
{
app.Viewer.DisableRenderer = false;
this.ws?.close();
if (this.webRtcPlayerObj)
{
this.webRtcPlayerObj.video.remove();
this.webRtcPlayerObj.close();
this.webRtcPlayerObj = undefined;
}
}
}
const WS_OPEN_STATE = 1;
function setupWebRtcPlayer(config, ws)
{
let webRtcPlayerObj = new WebRtcPlayer(config);
webRtcPlayerObj.onWebRtcOffer = function (offer)
{
if (ws && ws.readyState === WS_OPEN_STATE)
{
let offerStr = JSON.stringify(offer);
console.log("%c[Outbound SS message (offer)]", "background: lightgreen; color: black", offer);
ws.send(offerStr);
}
};
webRtcPlayerObj.onWebRtcCandidate = function (candidate)
{
if (ws && ws.readyState === WS_OPEN_STATE)
if (this.pixelStreaming)
{
ws.send(JSON.stringify({
type: 'iceCandidate',
candidate: candidate
}));
}
};
webRtcPlayerObj.onWebRtcAnswer = function (answer)
{
if (ws && ws.readyState === WS_OPEN_STATE)
{
let answerStr = JSON.stringify(answer);
console.log("%c[Outbound SS message (answer)]", "background: lightgreen; color: black", answer);
ws.send(answerStr);
if (webRtcPlayerObj.sfu)
{
// Send data channel setup request to the SFU
const requestMsg = { type: "dataChannelRequest" };
console.log("%c[Outbound SS message (dataChannelRequest)]", "background: lightgreen; color: black", requestMsg);
ws.send(JSON.stringify(requestMsg));
}
}
};
webRtcPlayerObj.onSFURecvDataChannelReady = function ()
{
if (webRtcPlayerObj.sfu)
{
// Send SFU a message to let it know browser data channels are ready
const requestMsg = { type: "peerDataChannelsReady" };
console.log("%c[Outbound SS message (peerDataChannelsReady)]", "background: lightgreen; color: black", requestMsg);
ws.send(JSON.stringify(requestMsg));
}
};
webRtcPlayerObj.onVideoInitialised = function ()
{
if (ws && ws.readyState === WS_OPEN_STATE)
{
//视频准备好了
}
};
webRtcPlayerObj.onNewVideoTrack = function (streams)
{
if (webRtcPlayerObj.video && webRtcPlayerObj.video.srcObject && webRtcPlayerObj.onVideoInitialised)
{
webRtcPlayerObj.onVideoInitialised();
}
};
webRtcPlayerObj.onDataChannelMessage = function (data)
{
};
//数据已经连接
webRtcPlayerObj.onDataChannelConnected = () =>
{
// requestQualityControl();//代码如下 请求质量控制 (我们必须请求质量控制权,否则第二次连接时画面丢失)
if (!webRtcPlayerObj._qualityController)
webRtcPlayerObj.send(new Uint8Array([1]).buffer);
};
return webRtcPlayerObj;
}
this.pixelStreaming.removeEventListener("webRtcDisconnected", this.EndRenderer);
this.pixelStreaming.removeEventListener("dataChannelError", this.EndRenderer);
this.pixelStreaming.removeEventListener("dataChannelClose", this.EndRenderer);
function playStream(webRtcPlayerObj)
{
if (webRtcPlayerObj && webRtcPlayerObj.video)
{
if (webRtcPlayerObj.audio.srcObject)
{
// Video and Audio are seperate tracks
webRtcPlayerObj.audio.play().then(() =>
{
// audio play has succeeded, start playing video
playVideo(webRtcPlayerObj);
}).catch((onRejectedReason) =>
{
console.error(onRejectedReason);
console.log("Browser does not support autoplaying audio without interaction - to resolve this we are going to show the play button overlay.");
});
}
else
{
// Video and audio are combined in the video element
playVideo(webRtcPlayerObj);
this.pixelStreaming.disconnect();
this.pixelStreaming = undefined;
}
let pixContainer = document.getElementById("pixParent");
if (pixContainer)
pixContainer.remove();
}
}
function playVideo(webRtcPlayerObj)
{
webRtcPlayerObj.video.play().catch((onRejectedReason) =>
{
if (webRtcPlayerObj.audio.srcObject)
{
try
{
webRtcPlayerObj.audio.stop();
} catch (error)
{
console.log("??");
}
}
console.error(onRejectedReason);
console.log("Browser does not support autoplaying video without interaction - to resolve this we are going to show the play button overlay.");
});
}

@ -332,7 +332,7 @@ export default class ResourceStore
{
let url = "/Data/MAT_INST基础材质库" + mtlPath;
let dataString = await (await fetch(GenerateCdnUrl(`/Paks/paks_cooked/ue_resource/Content${encodeURI(url)}.json`))).text();
let dataString = await (await fetch(GenerateCdnUrl(`/Paks/paks_cooked2/ue_resource/Content${encodeURI(url)}.json`))).text();
let data = JSON.parse(dataString);
mtl = new PhysicalMaterialRecord();

Loading…
Cancel
Save