Browse Source

fix: 分辨率

shuisheng 1 year ago
parent
commit
210a41cf92

+ 5 - 5
src/hooks/use-websocket.ts

@@ -53,7 +53,7 @@ import {
 
 import { useForwardAll } from './webrtc/forwardAll';
 import { useForwardBilibili } from './webrtc/forwardBilibili';
-import { useForwardDouyu } from './webrtc/forwardDouyu';
+import { useForwardHuya } from './webrtc/forwardHuya';
 import { useWebRtcRemoteDesk } from './webrtc/remoteDesk';
 
 export const useWebsocket = () => {
@@ -69,7 +69,7 @@ export const useWebsocket = () => {
   const { updateWebRtcSrsConfig, webRtcSrs } = useWebRtcSrs();
   const { updateForwardBilibiliConfig, forwardBilibili } = useForwardBilibili();
   const { updateForwardAllConfig, forwardAll } = useForwardAll();
-  const { updateForwardDouyuConfig, forwardDouyu } = useForwardDouyu();
+  const { updateForwardHuyaConfig, forwardHuya } = useForwardHuya();
   const { updateWebRtcTencentcloudCssConfig, webRtcTencentcloudCss } =
     useWebRtcTencentcloudCss();
   const { updateWebRtcLiveConfig, webRtcLive } = useWebRtcLive();
@@ -208,17 +208,17 @@ export const useWebsocket = () => {
         receiver: 'srs',
       });
     } else if (type === LiveRoomTypeEnum.forward_huya) {
-      updateForwardDouyuConfig({
+      updateForwardHuyaConfig({
         isPk: false,
         roomId: roomId.value,
         canvasVideoStream: canvasVideoStream.value,
       });
-      forwardDouyu.newWebRtc({
+      forwardHuya.newWebRtc({
         sender: mySocketId.value,
         receiver: 'srs',
         videoEl: createNullVideo(),
       });
-      forwardDouyu.sendOffer({
+      forwardHuya.sendOffer({
         sender: mySocketId.value,
         receiver: 'srs',
       });

+ 9 - 9
src/hooks/webrtc/forwardDouyu.ts → src/hooks/webrtc/forwardHuya.ts

@@ -9,7 +9,7 @@ import { useUserStore } from '@/store/user';
 import { LiveRoomTypeEnum } from '@/types/ILiveRoom';
 import { WebRTCClass } from '@/utils/network/webRTC';
 
-export const useForwardDouyu = () => {
+export const useForwardHuya = () => {
   const userStore = useUserStore();
   const networkStore = useNetworkStore();
 
@@ -21,13 +21,13 @@ export const useForwardDouyu = () => {
   const roomId = ref('');
   const canvasVideoStream = ref<MediaStream>();
 
-  function updateForwardDouyuConfig(data: { isPk; roomId; canvasVideoStream }) {
+  function updateForwardHuyaConfig(data: { isPk; roomId; canvasVideoStream }) {
     isPk.value = data.isPk;
     roomId.value = data.roomId;
     canvasVideoStream.value = data.canvasVideoStream;
   }
 
-  const forwardDouyu = {
+  const forwardHuya = {
     newWebRtc: (data: {
       sender: string;
       receiver: string;
@@ -54,7 +54,7 @@ export const useForwardDouyu = () => {
       sender: string;
       receiver: string;
     }) => {
-      console.log('开始ForwardDouyu的sendOffer', {
+      console.log('开始ForwardHuya的sendOffer', {
         sender,
         receiver,
       });
@@ -66,7 +66,7 @@ export const useForwardDouyu = () => {
           canvasVideoStream.value?.getTracks().forEach((track) => {
             if (canvasVideoStream.value) {
               console.log(
-                'ForwardDouyu的canvasVideoStream插入track',
+                'ForwardHuya的canvasVideoStream插入track',
                 track.kind,
                 track
               );
@@ -75,8 +75,8 @@ export const useForwardDouyu = () => {
           });
           const offerSdp = await rtc.createOffer();
           if (!offerSdp) {
-            console.error('ForwardDouyu的offerSdp为空');
-            window.$message.error('ForwardDouyu的offerSdp为空');
+            console.error('ForwardHuya的offerSdp为空');
+            window.$message.error('ForwardHuya的offerSdp为空');
             return;
           }
           await rtc.setLocalDescription(offerSdp!);
@@ -112,10 +112,10 @@ export const useForwardDouyu = () => {
           console.error('rtc不存在');
         }
       } catch (error) {
-        console.error('ForwardDouyu的sendOffer错误');
+        console.error('ForwardHuya的sendOffer错误');
       }
     },
   };
 
-  return { updateForwardDouyuConfig, forwardDouyu };
+  return { updateForwardHuyaConfig, forwardHuya };
 };

+ 0 - 1
src/locales/en/layout.ts

@@ -41,7 +41,6 @@ export default nameSpaceWrap('layout', {
   srsLive: 'SRS Live',
   forwardBilibili: 'forward Bilibili',
   forwardHuya: 'forward Huya',
-  forwardDouyu: 'forward Douyu',
   forwardAll: 'forward All',
   webrtcLive: 'WebRTC Live',
   webrtcMeeting: 'WebRTC Meeting',

+ 1 - 2
src/locales/zh/layout.ts

@@ -41,8 +41,7 @@ export default nameSpaceWrap('layout', {
   srsLive: 'SRS直播(推荐)',
   forwardBilibili: '转推b站(beta)',
   forwardHuya: '转推虎牙(beta)',
-  forwardDouyu: '转推斗鱼(beta)',
-  forwardAll: '转推b站和斗鱼(beta)',
+  forwardAll: '转推b站和虎牙(beta)',
   webrtcLive: 'WebRTC直播(低延迟)',
   webrtcMeeting: 'WebRTC会议(低延迟)',
   msrLive: 'Msr直播(b站实现)',

+ 23 - 10
src/views/push/index.vue

@@ -1123,9 +1123,10 @@ function autoCreateVideo(data: {
   stream?: MediaStream;
   id: string;
   rect?: { left: number; top: number };
+  scaleInfo?: Record<number, { scaleX: number; scaleY: number }>;
   muted?: boolean;
 }) {
-  const { file, id, rect, muted } = data;
+  const { file, id, rect, scaleInfo, muted } = data;
   let stream = data.stream;
   let videoEl: HTMLVideoElement;
   if (file) {
@@ -1161,12 +1162,11 @@ function autoCreateVideo(data: {
         videoEl.width = width;
         videoEl.height = height;
         if (canvasDom) {
-          const old = appStore.allTrack.find((item) => item.id === id);
           fabricCanvas.value?.remove(canvasDom);
           canvasDom = markRaw(
             new fabric.Image(videoEl, {
-              top: (old?.rect?.top || 0) / window.devicePixelRatio,
-              left: (old?.rect?.left || 0) / window.devicePixelRatio,
+              top: (rect?.top || 0) / window.devicePixelRatio,
+              left: (rect?.left || 0) / window.devicePixelRatio,
               width,
               height,
             })
@@ -1174,8 +1174,8 @@ function autoCreateVideo(data: {
         } else {
           canvasDom = markRaw(
             new fabric.Image(videoEl, {
-              top: rect?.top || 0,
-              left: rect?.left || 0,
+              top: (rect?.top || 0) / window.devicePixelRatio,
+              left: (rect?.left || 0) / window.devicePixelRatio,
               width,
               height,
             })
@@ -1190,7 +1190,7 @@ function autoCreateVideo(data: {
         });
         handleMoving({ canvasDom, id });
         handleScaling({ canvasDom, id });
-        canvasDom.scale(ratio / window.devicePixelRatio);
+        canvasDom.scale(scaleInfo?.[window.devicePixelRatio]?.scaleX || 1);
         fabricCanvas.value!.add(canvasDom);
         resolve({ canvasDom, scale: ratio, videoEl, stream: stream! });
       }
@@ -1435,13 +1435,11 @@ async function handleCache() {
           id: obj.id,
           muted: item.muted,
           rect: item.rect,
+          scaleInfo: item.scaleInfo,
         });
         if (obj.volume !== undefined) {
           videoEl.volume = obj.volume / 100;
         }
-        handleMoving({ canvasDom, id: obj.id });
-        handleScaling({ canvasDom, id: obj.id });
-        canvasDom.scale(item.scaleInfo[window.devicePixelRatio].scaleX);
         obj.videoEl = videoEl;
         obj.canvasDom = canvasDom;
         obj.stream = stream;
@@ -1740,11 +1738,14 @@ async function addMediaOk(val: AppRootState['allTrack'][0]) {
       hidden: false,
       muted: false,
       scaleInfo: {},
+      rect: { left: 0, top: 0 },
     };
 
     const { canvasDom, videoEl, scale } = await autoCreateVideo({
       stream: event,
       id: videoTrack.id,
+      rect: videoTrack.rect,
+      scaleInfo: videoTrack.scaleInfo,
     });
     setScaleInfo({ canvasDom, track: videoTrack, scale });
     videoTrack.videoEl = videoEl;
@@ -1803,10 +1804,13 @@ async function addMediaOk(val: AppRootState['allTrack'][0]) {
       hidden: false,
       muted: false,
       scaleInfo: {},
+      rect: { left: 0, top: 0 },
     };
     const { canvasDom, videoEl, scale } = await autoCreateVideo({
       stream: event,
       id: videoTrack.id,
+      rect: videoTrack.rect,
+      scaleInfo: videoTrack.scaleInfo,
     });
     setScaleInfo({ canvasDom, track: videoTrack, scale });
     videoTrack.videoEl = videoEl;
@@ -1834,10 +1838,13 @@ async function addMediaOk(val: AppRootState['allTrack'][0]) {
       hidden: false,
       muted: false,
       scaleInfo: {},
+      rect: { left: 0, top: 0 },
     };
     const { canvasDom, videoEl, scale } = await autoCreateVideo({
       stream: event,
       id: videoTrack.id,
+      rect: videoTrack.rect,
+      scaleInfo: videoTrack.scaleInfo,
     });
     setScaleInfo({ canvasDom, track: videoTrack, scale });
     videoTrack.videoEl = videoEl;
@@ -1851,9 +1858,12 @@ async function addMediaOk(val: AppRootState['allTrack'][0]) {
     const event = val.stream;
     if (!event) return;
     const videoTrack = val;
+    videoTrack.rect = { left: 0, top: 0 };
     const { canvasDom, videoEl, scale } = await autoCreateVideo({
       stream: event,
       id: videoTrack.id,
+      rect: videoTrack.rect,
+      scaleInfo: videoTrack.scaleInfo,
     });
     setScaleInfo({ canvasDom, track: videoTrack, scale });
     videoTrack.videoEl = videoEl;
@@ -2099,6 +2109,7 @@ async function addMediaOk(val: AppRootState['allTrack'][0]) {
       hidden: false,
       muted: false,
       scaleInfo: {},
+      rect: { left: 0, top: 0 },
     };
     if (fabricCanvas.value) {
       if (!val.mediaInfo) return;
@@ -2109,6 +2120,8 @@ async function addMediaOk(val: AppRootState['allTrack'][0]) {
         file,
         id: mediaVideoTrack.id,
         muted: mediaVideoTrack.muted,
+        rect: mediaVideoTrack.rect,
+        scaleInfo: mediaVideoTrack.scaleInfo,
       });
       setScaleInfo({ canvasDom, track: mediaVideoTrack, scale });
       mediaVideoTrack.videoEl = videoEl;