Date: Mon, 23 Sep 2024 13:03:05 +0800
Subject: [PATCH 4/6] =?UTF-8?q?=E5=AE=8C=E6=88=90mp4?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
main.js | 4 +-
package.json | 2 +-
src/page/Meeting/index.tsx | 284 +++++++++++++++++++------------------
3 files changed, 152 insertions(+), 138 deletions(-)
diff --git a/main.js b/main.js
index c4d0bb7..1b5d59a 100644
--- a/main.js
+++ b/main.js
@@ -278,8 +278,8 @@ app.on('ready', () => {
return app.getPath('userData');
});
// 用户数据目录路径
- global.userDataPath = app.getPath('userData'); // 全局变量
- console.log('User Data Path:', global.userDataPath);
+ const userDataPath = app.getPath('userData'); // 全局变量
+ console.log('User Data Path:', userDataPath);
// 检查并下载 ffmpeg
checkAndDownloadFFmpeg(userDataPath)
.then(() => {
diff --git a/package.json b/package.json
index e74a0f8..969c00e 100644
--- a/package.json
+++ b/package.json
@@ -1,7 +1,7 @@
{
"name": "WGShare.Metting",
"private": true,
- "version": "0.1.14",
+ "version": "0.3.0",
"main": "main.js",
"authors": "yj",
"description": "智汇享",
diff --git a/src/page/Meeting/index.tsx b/src/page/Meeting/index.tsx
index 83908db..f2d7fa9 100644
--- a/src/page/Meeting/index.tsx
+++ b/src/page/Meeting/index.tsx
@@ -20,10 +20,11 @@ import SharedFilesModel from '@/components/SharedFilesModel';
import StupWizard from '@/components/StupWizard';
import EquipmentManagement from '@/components/EquipmentManagement';
import UserVideo from '@/components/UserVideo';
-import { role } from '@/config/role';
-import path from 'path';
+import { role } from '@/config/role';
+const { ipcRenderer } = require('electron');
+import * as path from 'path';
const { confirm } = Modal;
-const { ipcRenderer } = require('electron');
+
const { exec } = require('child_process');
const fs = require('fs').promises;
dayjs.extend(durationPlugin);
@@ -547,39 +548,38 @@ const Meeting: React.FC = () => {
});
const reader = new FileReader() as any;
reader.onload = async () => {
- // const setting = await JSON.parse(storage.getItem('setting') as string)
- // const buffer = Buffer.from(reader.result);
- // await fs.writeFile(`${setting.recordingFilesPath}会议录制_${state.roomName}_${state.channelId}_${+new Date()}.mp4`, buffer, {});
-
- // 获取当前日期并格式化
- const date = new Date();
- const year = date.getFullYear();
- const month = date.getMonth() + 1; // JavaScript月份从0开始
- const day = date.getDate();
- const hours = date.getHours();
- const minutes = date.getMinutes();
- const formattedDate = `${year}年${month}月${day}日${hours}时${minutes}分`;
+ try {
+ const userDataPath = await ipcRenderer.invoke('get-user-data-path');
+
+ // 获取当前日期并格式化
+ const date = new Date();
+ const year = date.getFullYear();
+ const month = date.getMonth() + 1; // JavaScript月份从0开始
+ const day = date.getDate();
+ const hours = date.getHours();
+ const minutes = date.getMinutes();
+ const formattedDate = `${year}年${month}月${day}日${hours}时${minutes}分`;
const setting = await JSON.parse(storage.getItem('setting') as string)
const buffer = Buffer.from(reader.result);
- const mp4Path = `${setting.recordingFilesPath}会议录制_${state.roomName}_${state.channelId}_${formattedDate}_beforehanlder.mp4`;
+ const mp4Path=`${setting.recordingFilesPath}会议录制_${state.roomName}_${state.channelId}_${formattedDate}_beforehanlder.mp4`;
await fs.writeFile(mp4Path, buffer);
- const userDataPath = await ipcRenderer.invoke('get-user-data-path');
- // 获取应用程序安装路径
+ // 获取应用程序安装路径
const ffmpegPath = path.join(userDataPath, "ffmpeg.exe");
+
+ const inputFilePath = mp4Path; // 输入文件路径
+ const outputFilePath = mp4Path.replace('_beforehanlder',''); // 输出文件路径
+ const command = `${ffmpegPath} -i "${inputFilePath}" -vcodec copy -acodec copy "${outputFilePath}"`;
+
+ exec(command, (error, stdout, stderr) => {
+ if (error) {
+ console.error('Error executing ffmpeg command:', error);
+ return;
+ }
- const inputFilePath = mp4Path; // 输入文件路径
- const outputFilePath = mp4Path.replace('_beforehanlder', ''); // 输出文件路径
- const command = `${ffmpegPath} -i "${inputFilePath}" -vcodec copy -acodec copy "${outputFilePath}"`;
-
- exec(command, (error, stdout, stderr) => {
- if (error) {
- console.error('Error executing ffmpeg command:', error);
- return;
- }
- // 删除输入文件
+ // 删除输入文件
fs.unlink(inputFilePath, (err) => {
if (err) {
console.error('Error deleting input file:', err);
@@ -589,27 +589,32 @@ const Meeting: React.FC = () => {
});
+ confirm({
+ title: '提示',
+ icon: ,
+ content: `录制成功!文件已保存至:${setting.recordingFilesPath}`,
+ centered: true,
+ okText: '打开文件夹',
+ cancelText: '关闭',
+ async onOk() {
+ await fs.access(setting.recordingFilesPath, fs.constants.F_OK);
+ if (process.platform === 'win32') {
+ exec(`explorer "${setting.recordingFilesPath}"`);
+ } else if (process.platform === 'darwin') {
+ exec(`open "${setting.recordingFilesPath}"`);
+ }
+ },
+ onCancel() {
+ }
+ })
- confirm({
- title: '提示',
- icon: ,
- content: `录制成功!文件已保存至:${setting.recordingFilesPath}`,
- centered: true,
- okText: '打开文件夹',
- cancelText: '关闭',
- async onOk() {
- await fs.access(setting.recordingFilesPath, fs.constants.F_OK);
- if (process.platform === 'win32') {
- exec(`explorer "${setting.recordingFilesPath}"`);
- } else if (process.platform === 'darwin') {
- exec(`open "${setting.recordingFilesPath}"`);
- }
- },
- onCancel() {
- }
- })
- })
- };
+ });
+ } catch (err) {
+ console.error('处理录制时出错:', err);
+ }
+
+ }
+
reader.readAsArrayBuffer(blob);
}
};
@@ -627,32 +632,32 @@ const Meeting: React.FC = () => {
return () => clearTimeout(timer);
}, [isClicked]);
- // useEffect(() => {
- // const elements = document.querySelectorAll('.intersectionObserver-view');
- // if (elements.length && currentVideoId) {
- // elements.forEach(element => {
- // observer?.unobserve(element);
- // });
- // const observerObject = new IntersectionObserver(async (entries: IntersectionObserverEntry[], _observer: IntersectionObserver) => {
- // entries.forEach(async (entry) => {
- // if (entry.target.id !== user.uid) {
- // await agora.muteRemoteVideoStreamEx(Number(entry.target.id), !entry.isIntersecting)
- // }
- // });
- // await agora.muteRemoteVideoStreamEx(Number(currentVideoId), false)
- // }, { threshold: 0, root: document.getElementById('videoView') });
- // setObserver(observerObject)
- // elements.forEach(element => {
- // observerObject.observe(element);
- // });
- // }
- // return () => {
- // elements.forEach(element => {
- // observer?.unobserve(element);
- // });
- // observer?.disconnect();
- // }
- // }, [roomUserList, currentVideoId]);
+ useEffect(() => {
+ const elements = document.querySelectorAll('.intersectionObserver-view');
+ if (elements.length && currentVideoId) {
+ elements.forEach(element => {
+ observer?.unobserve(element);
+ });
+ const observerObject = new IntersectionObserver(async (entries: IntersectionObserverEntry[], _observer: IntersectionObserver) => {
+ entries.forEach(async (entry) => {
+ if (entry.target.id !== user.uid) {
+ await agora.muteRemoteVideoStreamEx(Number(entry.target.id), !entry.isIntersecting)
+ }
+ });
+ await agora.muteRemoteVideoStreamEx(Number(currentVideoId), false)
+ }, { threshold: 0, root: document.getElementById('videoView') });
+ setObserver(observerObject)
+ elements.forEach(element => {
+ observerObject.observe(element);
+ });
+ }
+ return () => {
+ elements.forEach(element => {
+ observer?.unobserve(element);
+ });
+ observer?.disconnect();
+ }
+ }, [roomUserList, currentVideoId]);
// 声网初始化
const agoraInit = async () => {
@@ -1060,69 +1065,76 @@ const Meeting: React.FC = () => {
}
})
break;
- case '录制':
- const setting = await JSON.parse(storage.getItem('setting') as string);
- try {
- await fs.access(setting.recordingFilesPath, fs.constants.F_OK);
- footerListTemplate[itemIndex][rowIndex].title = '录制中';
- footerListTemplate[itemIndex][rowIndex].active = true;
- setFooterList(footerListTemplate);
-
- window.electron.getSources().then(async (sources: any) => {
- const screenId = sources[0].id;
- const stream = await navigator.mediaDevices.getUserMedia({
- audio: {
- mandatory: {
- chromeMediaSource: 'desktop',
- chromeMediaSourceId: screenId,
- }
- } as any,
- video: {
- mandatory: {
- chromeMediaSource: 'desktop',
- chromeMediaSourceId: screenId,
- }
- } as any
- });
- // 获取所有音频输入设备
- const devices = await navigator.mediaDevices.enumerateDevices();
- const audioInputDevices = devices.filter(device => device.kind === 'audioinput' &&
- device.deviceId !== 'default' &&
- device.deviceId !== 'communications');
- // 使用Web Audio API来捕获系统声音和麦克风声音,将它们合并到同一个MediaStream中。
- const audioCtx = new (window.AudioContext || (window as any).webkitAudioContext)();
- const systemSoundSource = audioCtx.createMediaStreamSource(stream);
- const systemSoundDestination = audioCtx.createMediaStreamDestination();
- systemSoundSource.connect(systemSoundDestination);
- // 录制所有音频输入设备
- audioInputDevices.forEach(async device => {
- const micStream = await navigator.mediaDevices.getUserMedia({ audio: { deviceId: { exact: device.deviceId } } });
- setMediaStream(micStream);
- const micSoundSource = audioCtx.createMediaStreamSource(micStream);
- micSoundSource.connect(systemSoundDestination);
- })
- // 合并音频流与视频流
- const combinedSource = new MediaStream([...stream.getVideoTracks(), ...systemSoundDestination.stream.getAudioTracks()]);
- // 开始录制
- const mediaRecorder = new MediaRecorder(combinedSource, {
- mimeType: 'video/webm;codecs=vp9,opus',
- videoBitsPerSecond: 1.5e6,
- });
- setRecorder(mediaRecorder);
+ case '录制':
+ const setting = await JSON.parse(storage.getItem('setting') as string);
+ try {
+ await fs.access(setting.recordingFilesPath, fs.constants.F_OK);
+ footerListTemplate[itemIndex][rowIndex].title = '录制中';
+ footerListTemplate[itemIndex][rowIndex].active = true;
+ setFooterList(footerListTemplate);
+
+ window.electron.getSources().then(async (sources: any) => {
+ const screenId = sources[0].id;
+
+ const stream = await navigator.mediaDevices.getUserMedia({
+ audio: {
+ mandatory: {
+ chromeMediaSource: 'desktop',
+ chromeMediaSourceId: screenId,
+ }
+ },
+ video: {
+ mandatory: {
+ chromeMediaSource: 'desktop',
+ chromeMediaSourceId: screenId,
+ }
+ }
});
- } catch (error: any) {
- if (error.code === 'ENOENT') {
- message.error({
- content: 文件夹不存在 {
- stupWizardRef.current.changeModal(3);
- }}>前往设置
- });
- return;
- } else {
- message.error(error);
- }
- }
+ // 获取所有音频输入设备
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ const audioInputDevices = devices.filter(device => device.kind === 'audioinput' &&
+ device.deviceId !== 'default' &&
+ device.deviceId !== 'communications' );
+
+ // 使用Web Audio API来捕获系统声音和麦克风声音,将它们合并到同一个MediaStream中。
+ const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
+ const systemSoundSource = audioCtx.createMediaStreamSource(stream);
+ const systemSoundDestination = audioCtx.createMediaStreamDestination();
+ systemSoundSource.connect(systemSoundDestination);
+
+ // 录制所有音频输入设备
+ audioInputDevices.forEach( async device=>{
+ const micStream = await navigator.mediaDevices.getUserMedia({ audio: { deviceId: { exact: device.deviceId } }});
+ const micSoundSource = audioCtx.createMediaStreamSource(micStream);
+ micSoundSource.connect(systemSoundDestination);
+ })
+
+ // 合并音频流与视频流
+ const combinedSource = new MediaStream([...stream.getVideoTracks(), ...systemSoundDestination.stream.getAudioTracks()]);
+
+ // 开始录制
+ const recorder = new MediaRecorder(combinedSource, {
+ mimeType: 'video/webm;codecs=vp9,opus',
+ videoBitsPerSecond: 1.5e6,
+ });
+
+ setMediaStream(combinedSource);
+ setRecorder(recorder);
+ });
+ } catch (error: any) {
+ if (error.code === 'ENOENT') {
+ message.error({
+ content: 文件夹不存在 {
+ stupWizardRef.current.changeModal(3);
+ }}>前往设置
+ });
+ return;
+ } else {
+ message.error(error);
+ }
+ }
+
break;
case '录制中':
footerListTemplate[itemIndex][rowIndex].title = '录制'
@@ -1329,6 +1341,8 @@ const Meeting: React.FC = () => {
}
})
}
+
+
// 开关麦克风
const postOpenMicrApi = async (enableMicr: boolean, uid: string, isAll: boolean, isMessage: boolean = false): Promise => {
if (isAll) {
From c9362fc14d9f14d9175939286f4cbab8125777dc Mon Sep 17 00:00:00 2001
From: youngq
Date: Mon, 23 Sep 2024 13:32:30 +0800
Subject: [PATCH 5/6] 1
---
package.json | 2 +-
src/page/Meeting/index.tsx | 52 +++++++++++++++++++-------------------
2 files changed, 27 insertions(+), 27 deletions(-)
diff --git a/package.json b/package.json
index 969c00e..1fa5a8b 100644
--- a/package.json
+++ b/package.json
@@ -1,7 +1,7 @@
{
"name": "WGShare.Metting",
"private": true,
- "version": "0.3.0",
+ "version": "0.3.1",
"main": "main.js",
"authors": "yj",
"description": "智汇享",
diff --git a/src/page/Meeting/index.tsx b/src/page/Meeting/index.tsx
index f2d7fa9..6e49828 100644
--- a/src/page/Meeting/index.tsx
+++ b/src/page/Meeting/index.tsx
@@ -632,32 +632,32 @@ const Meeting: React.FC = () => {
return () => clearTimeout(timer);
}, [isClicked]);
- useEffect(() => {
- const elements = document.querySelectorAll('.intersectionObserver-view');
- if (elements.length && currentVideoId) {
- elements.forEach(element => {
- observer?.unobserve(element);
- });
- const observerObject = new IntersectionObserver(async (entries: IntersectionObserverEntry[], _observer: IntersectionObserver) => {
- entries.forEach(async (entry) => {
- if (entry.target.id !== user.uid) {
- await agora.muteRemoteVideoStreamEx(Number(entry.target.id), !entry.isIntersecting)
- }
- });
- await agora.muteRemoteVideoStreamEx(Number(currentVideoId), false)
- }, { threshold: 0, root: document.getElementById('videoView') });
- setObserver(observerObject)
- elements.forEach(element => {
- observerObject.observe(element);
- });
- }
- return () => {
- elements.forEach(element => {
- observer?.unobserve(element);
- });
- observer?.disconnect();
- }
- }, [roomUserList, currentVideoId]);
+ // useEffect(() => {
+ // const elements = document.querySelectorAll('.intersectionObserver-view');
+ // if (elements.length && currentVideoId) {
+ // elements.forEach(element => {
+ // observer?.unobserve(element);
+ // });
+ // const observerObject = new IntersectionObserver(async (entries: IntersectionObserverEntry[], _observer: IntersectionObserver) => {
+ // entries.forEach(async (entry) => {
+ // if (entry.target.id !== user.uid) {
+ // await agora.muteRemoteVideoStreamEx(Number(entry.target.id), !entry.isIntersecting)
+ // }
+ // });
+ // await agora.muteRemoteVideoStreamEx(Number(currentVideoId), false)
+ // }, { threshold: 0, root: document.getElementById('videoView') });
+ // setObserver(observerObject)
+ // elements.forEach(element => {
+ // observerObject.observe(element);
+ // });
+ // }
+ // return () => {
+ // elements.forEach(element => {
+ // observer?.unobserve(element);
+ // });
+ // observer?.disconnect();
+ // }
+ // }, [roomUserList, currentVideoId]);
// 声网初始化
const agoraInit = async () => {
From b86c4230bb86c417f0da6bcf5d8c2d9e9074454e Mon Sep 17 00:00:00 2001
From: youngq
Date: Mon, 23 Sep 2024 13:37:53 +0800
Subject: [PATCH 6/6] 1
---
src/page/Meeting/index.tsx | 137 ++++++++++++++++++-------------------
1 file changed, 65 insertions(+), 72 deletions(-)
diff --git a/src/page/Meeting/index.tsx b/src/page/Meeting/index.tsx
index 6e49828..11f36ed 100644
--- a/src/page/Meeting/index.tsx
+++ b/src/page/Meeting/index.tsx
@@ -573,14 +573,14 @@ const Meeting: React.FC = () => {
const outputFilePath = mp4Path.replace('_beforehanlder',''); // 输出文件路径
const command = `${ffmpegPath} -i "${inputFilePath}" -vcodec copy -acodec copy "${outputFilePath}"`;
- exec(command, (error, stdout, stderr) => {
+ exec(command, (error:any, stdout:any, stderr:any) => {
if (error) {
console.error('Error executing ffmpeg command:', error);
return;
}
// 删除输入文件
- fs.unlink(inputFilePath, (err) => {
+ fs.unlink(inputFilePath, (err:any) => {
if (err) {
console.error('Error deleting input file:', err);
} else {
@@ -1065,77 +1065,70 @@ const Meeting: React.FC = () => {
}
})
break;
- case '录制':
- const setting = await JSON.parse(storage.getItem('setting') as string);
- try {
- await fs.access(setting.recordingFilesPath, fs.constants.F_OK);
- footerListTemplate[itemIndex][rowIndex].title = '录制中';
- footerListTemplate[itemIndex][rowIndex].active = true;
- setFooterList(footerListTemplate);
-
- window.electron.getSources().then(async (sources: any) => {
- const screenId = sources[0].id;
-
- const stream = await navigator.mediaDevices.getUserMedia({
- audio: {
- mandatory: {
- chromeMediaSource: 'desktop',
- chromeMediaSourceId: screenId,
- }
- },
- video: {
- mandatory: {
- chromeMediaSource: 'desktop',
- chromeMediaSourceId: screenId,
- }
+ case '录制':
+ const setting = await JSON.parse(storage.getItem('setting') as string);
+ try {
+ await fs.access(setting.recordingFilesPath, fs.constants.F_OK);
+ footerListTemplate[itemIndex][rowIndex].title = '录制中';
+ footerListTemplate[itemIndex][rowIndex].active = true;
+ setFooterList(footerListTemplate);
+
+ window.electron.getSources().then(async (sources: any) => {
+ const screenId = sources[0].id;
+ const stream = await navigator.mediaDevices.getUserMedia({
+ audio: {
+ mandatory: {
+ chromeMediaSource: 'desktop',
+ chromeMediaSourceId: screenId,
+ }
+ } as any,
+ video: {
+ mandatory: {
+ chromeMediaSource: 'desktop',
+ chromeMediaSourceId: screenId,
+ }
+ } as any
+ });
+ // 获取所有音频输入设备
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ const audioInputDevices = devices.filter(device => device.kind === 'audioinput' &&
+ device.deviceId !== 'default' &&
+ device.deviceId !== 'communications');
+ // 使用Web Audio API来捕获系统声音和麦克风声音,将它们合并到同一个MediaStream中。
+ const audioCtx = new (window.AudioContext || (window as any).webkitAudioContext)();
+ const systemSoundSource = audioCtx.createMediaStreamSource(stream);
+ const systemSoundDestination = audioCtx.createMediaStreamDestination();
+ systemSoundSource.connect(systemSoundDestination);
+ // 录制所有音频输入设备
+ audioInputDevices.forEach(async device => {
+ const micStream = await navigator.mediaDevices.getUserMedia({ audio: { deviceId: { exact: device.deviceId } } });
+ setMediaStream(micStream);
+ const micSoundSource = audioCtx.createMediaStreamSource(micStream);
+ micSoundSource.connect(systemSoundDestination);
+ })
+ // 合并音频流与视频流
+ const combinedSource = new MediaStream([...stream.getVideoTracks(), ...systemSoundDestination.stream.getAudioTracks()]);
+ // 开始录制
+ const mediaRecorder = new MediaRecorder(combinedSource, {
+ mimeType: 'video/webm;codecs=vp9,opus',
+ videoBitsPerSecond: 1.5e6,
+ });
+ setRecorder(mediaRecorder);
+ });
+ } catch (error: any) {
+ if (error.code === 'ENOENT') {
+ message.error({
+ content: 文件夹不存在 {
+ stupWizardRef.current.changeModal(3);
+ }}>前往设置
+ });
+ return;
+ } else {
+ message.error(error);
}
- });
-
- // 获取所有音频输入设备
- const devices = await navigator.mediaDevices.enumerateDevices();
- const audioInputDevices = devices.filter(device => device.kind === 'audioinput' &&
- device.deviceId !== 'default' &&
- device.deviceId !== 'communications' );
-
- // 使用Web Audio API来捕获系统声音和麦克风声音,将它们合并到同一个MediaStream中。
- const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
- const systemSoundSource = audioCtx.createMediaStreamSource(stream);
- const systemSoundDestination = audioCtx.createMediaStreamDestination();
- systemSoundSource.connect(systemSoundDestination);
-
- // 录制所有音频输入设备
- audioInputDevices.forEach( async device=>{
- const micStream = await navigator.mediaDevices.getUserMedia({ audio: { deviceId: { exact: device.deviceId } }});
- const micSoundSource = audioCtx.createMediaStreamSource(micStream);
- micSoundSource.connect(systemSoundDestination);
- })
-
- // 合并音频流与视频流
- const combinedSource = new MediaStream([...stream.getVideoTracks(), ...systemSoundDestination.stream.getAudioTracks()]);
-
- // 开始录制
- const recorder = new MediaRecorder(combinedSource, {
- mimeType: 'video/webm;codecs=vp9,opus',
- videoBitsPerSecond: 1.5e6,
- });
-
- setMediaStream(combinedSource);
- setRecorder(recorder);
- });
- } catch (error: any) {
- if (error.code === 'ENOENT') {
- message.error({
- content: 文件夹不存在 {
- stupWizardRef.current.changeModal(3);
- }}>前往设置
- });
- return;
- } else {
- message.error(error);
- }
- }
-
- break;
+ }
+
+ break;
case '录制中':
footerListTemplate[itemIndex][rowIndex].title = '录制'
footerListTemplate[itemIndex][rowIndex].active = false