diff --git a/main.js b/main.js
index e7f221a..1b5d59a 100644
--- a/main.js
+++ b/main.js
@@ -12,6 +12,7 @@ const {
} = require('electron');
const path = require('node:path')
const fs = require('fs');
+const https = require('https');
const { autoUpdater, CancellationToken } = require('electron-updater');
const cancellationToken = new CancellationToken()
app.allowRendererProcessReuse = false;
@@ -271,6 +272,25 @@ app.on('ready', () => {
const y = Math.round((display.workArea.height - mainWindow.getSize()[1]) / 2);
mainWindow.setPosition(x, y);
});
+
+ // 监听渲染进程请求应用数据目录
+ ipcMain.handle('get-user-data-path', () => {
+ return app.getPath('userData');
+ });
+ // 用户数据目录路径
+ const userDataPath = app.getPath('userData'); // 全局变量
+ console.log('User Data Path:', userDataPath);
+ // 检查并下载 ffmpeg
+ checkAndDownloadFFmpeg(userDataPath)
+ .then(() => {
+ console.log('FFmpeg is ready for use.');
+ // 在这里执行任何依赖于 ffmpeg 的操作
+ })
+ .catch(error => {
+ console.error('Failed to ensure ffmpeg is available:', error);
+ app.quit();
+ });
+
}
});
// 检测更新,在你想要检查更新的时候执行,renderer事件触发后的操作自行编写
@@ -339,3 +359,42 @@ function cancleDownloadUpdate() {
function quitAndInstall() {
autoUpdater.quitAndInstall();
}
+
+
+
+// 下载文件
+function downloadFile(url, dest) {
+ return new Promise((resolve, reject) => {
+ const file = fs.createWriteStream(dest);
+ https.get(url, function (response) {
+ response.pipe(file);
+ file.on('finish', function () {
+ file.close(resolve);
+ });
+ }).on('error', function (err) {
+ fs.unlink(dest, () => reject(err));
+ });
+ });
+}
+
+// 检查并下载ffmpeg
+function checkAndDownloadFFmpeg(appPath) {
+ const ffmpegPath = path.join(appPath, 'ffmpeg.exe');
+ return new Promise((resolve, reject) => {
+ if (!fs.existsSync(ffmpegPath)) {
+ console.log(`ffmpeg.exe not found at ${ffmpegPath}, downloading...`);
+ downloadFile('https://meeting-api.23544.com/meeting/update/ffmpeg.exe', ffmpegPath)
+ .then(() => {
+ console.log('ffmpeg.exe downloaded successfully.');
+ resolve();
+ })
+ .catch(error => {
+ console.error('Error downloading ffmpeg.exe:', error);
+ reject(error);
+ });
+ } else {
+ console.log(`ffmpeg.exe found at ${ffmpegPath}.`);
+ resolve();
+ }
+ });
+}
\ No newline at end of file
diff --git a/package.json b/package.json
index e74a0f8..969c00e 100644
--- a/package.json
+++ b/package.json
@@ -1,7 +1,7 @@
{
"name": "WGShare.Metting",
"private": true,
- "version": "0.1.14",
+ "version": "0.3.0",
"main": "main.js",
"authors": "yj",
"description": "智汇享",
diff --git a/src/page/Meeting/index.tsx b/src/page/Meeting/index.tsx
index fed1d7c..f2d7fa9 100644
--- a/src/page/Meeting/index.tsx
+++ b/src/page/Meeting/index.tsx
@@ -20,9 +20,11 @@ import SharedFilesModel from '@/components/SharedFilesModel';
import StupWizard from '@/components/StupWizard';
import EquipmentManagement from '@/components/EquipmentManagement';
import UserVideo from '@/components/UserVideo';
-import { role } from '@/config/role';
-import { fixWebmDuration } from "webm-duration-fix-buffer";
+import { role } from '@/config/role';
+const { ipcRenderer } = require('electron');
+import * as path from 'path';
const { confirm } = Modal;
+
const { exec } = require('child_process');
const fs = require('fs').promises;
dayjs.extend(durationPlugin);
@@ -540,32 +542,79 @@ const Meeting: React.FC = () => {
useEffect(() => {
if (recorder) {
recorder.start();
- recorder.ondataavailable = async (event: any) => {
- const blob = await fixWebmDuration(event.data);
+ recorder.ondataavailable = (event: any) => {
+ const blob = new Blob([event.data], {
+ type: 'video/webm',
+ });
const reader = new FileReader() as any;
reader.onload = async () => {
+ try {
+ const userDataPath = await ipcRenderer.invoke('get-user-data-path');
+
+ // 获取当前日期并格式化
+ const date = new Date();
+ const year = date.getFullYear();
+ const month = date.getMonth() + 1; // JavaScript月份从0开始
+ const day = date.getDate();
+ const hours = date.getHours();
+ const minutes = date.getMinutes();
+ const formattedDate = `${year}年${month}月${day}日${hours}时${minutes}分`;
+
+
const setting = await JSON.parse(storage.getItem('setting') as string)
const buffer = Buffer.from(reader.result);
- await fs.writeFile(`${setting.recordingFilesPath}会议录制_${state.roomName}_${state.channelId}_${+new Date()}.webm`, buffer, {});
- confirm({
- title: '提示',
- icon: ,
- content: `录制成功!文件已保存至:${setting.recordingFilesPath}`,
- centered: true,
- okText: '打开文件夹',
- cancelText: '关闭',
- async onOk() {
- await fs.access(setting.recordingFilesPath, fs.constants.F_OK);
- if (process.platform === 'win32') {
- exec(`explorer "${setting.recordingFilesPath}"`);
- } else if (process.platform === 'darwin') {
- exec(`open "${setting.recordingFilesPath}"`);
+ const mp4Path=`${setting.recordingFilesPath}会议录制_${state.roomName}_${state.channelId}_${formattedDate}_beforehanlder.mp4`;
+ await fs.writeFile(mp4Path, buffer);
+
+ // 获取应用程序安装路径
+ const ffmpegPath = path.join(userDataPath, "ffmpeg.exe");
+
+ const inputFilePath = mp4Path; // 输入文件路径
+ const outputFilePath = mp4Path.replace('_beforehanlder',''); // 输出文件路径
+ const command = `${ffmpegPath} -i "${inputFilePath}" -vcodec copy -acodec copy "${outputFilePath}"`;
+
+ exec(command, (error, stdout, stderr) => {
+ if (error) {
+ console.error('Error executing ffmpeg command:', error);
+ return;
}
- },
- onCancel() {
- }
- })
- };
+
+ // 删除输入文件
+ fs.unlink(inputFilePath, (err) => {
+ if (err) {
+ console.error('Error deleting input file:', err);
+ } else {
+ console.log('Input file deleted successfully.');
+ }
+ });
+
+
+ confirm({
+ title: '提示',
+ icon: ,
+ content: `录制成功!文件已保存至:${setting.recordingFilesPath}`,
+ centered: true,
+ okText: '打开文件夹',
+ cancelText: '关闭',
+ async onOk() {
+ await fs.access(setting.recordingFilesPath, fs.constants.F_OK);
+ if (process.platform === 'win32') {
+ exec(`explorer "${setting.recordingFilesPath}"`);
+ } else if (process.platform === 'darwin') {
+ exec(`open "${setting.recordingFilesPath}"`);
+ }
+ },
+ onCancel() {
+ }
+ })
+
+ });
+ } catch (err) {
+ console.error('处理录制时出错:', err);
+ }
+
+ }
+
reader.readAsArrayBuffer(blob);
}
};
@@ -583,32 +632,32 @@ const Meeting: React.FC = () => {
return () => clearTimeout(timer);
}, [isClicked]);
- // useEffect(() => {
- // const elements = document.querySelectorAll('.intersectionObserver-view');
- // if (elements.length && currentVideoId) {
- // elements.forEach(element => {
- // observer?.unobserve(element);
- // });
- // const observerObject = new IntersectionObserver(async (entries: IntersectionObserverEntry[], _observer: IntersectionObserver) => {
- // entries.forEach(async (entry) => {
- // if (entry.target.id !== user.uid) {
- // await agora.muteRemoteVideoStreamEx(Number(entry.target.id), !entry.isIntersecting)
- // }
- // });
- // await agora.muteRemoteVideoStreamEx(Number(currentVideoId), false)
- // }, { threshold: 0, root: document.getElementById('videoView') });
- // setObserver(observerObject)
- // elements.forEach(element => {
- // observerObject.observe(element);
- // });
- // }
- // return () => {
- // elements.forEach(element => {
- // observer?.unobserve(element);
- // });
- // observer?.disconnect();
- // }
- // }, [roomUserList, currentVideoId]);
+ useEffect(() => {
+ const elements = document.querySelectorAll('.intersectionObserver-view');
+ if (elements.length && currentVideoId) {
+ elements.forEach(element => {
+ observer?.unobserve(element);
+ });
+ const observerObject = new IntersectionObserver(async (entries: IntersectionObserverEntry[], _observer: IntersectionObserver) => {
+ entries.forEach(async (entry) => {
+ if (entry.target.id !== user.uid) {
+ await agora.muteRemoteVideoStreamEx(Number(entry.target.id), !entry.isIntersecting)
+ }
+ });
+ await agora.muteRemoteVideoStreamEx(Number(currentVideoId), false)
+ }, { threshold: 0, root: document.getElementById('videoView') });
+ setObserver(observerObject)
+ elements.forEach(element => {
+ observerObject.observe(element);
+ });
+ }
+ return () => {
+ elements.forEach(element => {
+ observer?.unobserve(element);
+ });
+ observer?.disconnect();
+ }
+ }, [roomUserList, currentVideoId]);
// 声网初始化
const agoraInit = async () => {
@@ -1016,69 +1065,76 @@ const Meeting: React.FC = () => {
}
})
break;
- case '录制':
- const setting = await JSON.parse(storage.getItem('setting') as string);
- try {
- await fs.access(setting.recordingFilesPath, fs.constants.F_OK);
- footerListTemplate[itemIndex][rowIndex].title = '录制中';
- footerListTemplate[itemIndex][rowIndex].active = true;
- setFooterList(footerListTemplate);
-
- window.electron.getSources().then(async (sources: any) => {
- const screenId = sources[0].id;
- const stream = await navigator.mediaDevices.getUserMedia({
- audio: {
- mandatory: {
- chromeMediaSource: 'desktop',
- chromeMediaSourceId: screenId,
- }
- } as any,
- video: {
- mandatory: {
- chromeMediaSource: 'desktop',
- chromeMediaSourceId: screenId,
- }
- } as any
- });
- // 获取所有音频输入设备
- const devices = await navigator.mediaDevices.enumerateDevices();
- const audioInputDevices = devices.filter(device => device.kind === 'audioinput' &&
- device.deviceId !== 'default' &&
- device.deviceId !== 'communications');
- // 使用Web Audio API来捕获系统声音和麦克风声音,将它们合并到同一个MediaStream中。
- const audioCtx = new (window.AudioContext || (window as any).webkitAudioContext)();
- const systemSoundSource = audioCtx.createMediaStreamSource(stream);
- const systemSoundDestination = audioCtx.createMediaStreamDestination();
- systemSoundSource.connect(systemSoundDestination);
- // 录制所有音频输入设备
- audioInputDevices.forEach(async device => {
- const micStream = await navigator.mediaDevices.getUserMedia({ audio: { deviceId: { exact: device.deviceId } } });
- setMediaStream(micStream);
- const micSoundSource = audioCtx.createMediaStreamSource(micStream);
- micSoundSource.connect(systemSoundDestination);
- })
- // 合并音频流与视频流
- const combinedSource = new MediaStream([...stream.getVideoTracks(), ...systemSoundDestination.stream.getAudioTracks()]);
- // 开始录制
- const mediaRecorder = new MediaRecorder(combinedSource, {
- mimeType: 'video/webm;codecs=vp9,opus',
- videoBitsPerSecond: 1.5e6,
- });
- setRecorder(mediaRecorder);
+ case '录制':
+ const setting = await JSON.parse(storage.getItem('setting') as string);
+ try {
+ await fs.access(setting.recordingFilesPath, fs.constants.F_OK);
+ footerListTemplate[itemIndex][rowIndex].title = '录制中';
+ footerListTemplate[itemIndex][rowIndex].active = true;
+ setFooterList(footerListTemplate);
+
+ window.electron.getSources().then(async (sources: any) => {
+ const screenId = sources[0].id;
+
+ const stream = await navigator.mediaDevices.getUserMedia({
+ audio: {
+ mandatory: {
+ chromeMediaSource: 'desktop',
+ chromeMediaSourceId: screenId,
+ }
+ },
+ video: {
+ mandatory: {
+ chromeMediaSource: 'desktop',
+ chromeMediaSourceId: screenId,
+ }
+ }
});
- } catch (error: any) {
- if (error.code === 'ENOENT') {
- message.error({
- content:
文件夹不存在 {
- stupWizardRef.current.changeModal(3);
- }}>前往设置
- });
- return;
- } else {
- message.error(error);
- }
- }
+ // 获取所有音频输入设备
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ const audioInputDevices = devices.filter(device => device.kind === 'audioinput' &&
+ device.deviceId !== 'default' &&
+ device.deviceId !== 'communications' );
+
+ // 使用Web Audio API来捕获系统声音和麦克风声音,将它们合并到同一个MediaStream中。
+ const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
+ const systemSoundSource = audioCtx.createMediaStreamSource(stream);
+ const systemSoundDestination = audioCtx.createMediaStreamDestination();
+ systemSoundSource.connect(systemSoundDestination);
+
+ // 录制所有音频输入设备
+ audioInputDevices.forEach( async device=>{
+ const micStream = await navigator.mediaDevices.getUserMedia({ audio: { deviceId: { exact: device.deviceId } }});
+ const micSoundSource = audioCtx.createMediaStreamSource(micStream);
+ micSoundSource.connect(systemSoundDestination);
+ })
+
+ // 合并音频流与视频流
+ const combinedSource = new MediaStream([...stream.getVideoTracks(), ...systemSoundDestination.stream.getAudioTracks()]);
+
+ // 开始录制
+ const recorder = new MediaRecorder(combinedSource, {
+ mimeType: 'video/webm;codecs=vp9,opus',
+ videoBitsPerSecond: 1.5e6,
+ });
+
+ setMediaStream(combinedSource);
+ setRecorder(recorder);
+ });
+ } catch (error: any) {
+ if (error.code === 'ENOENT') {
+ message.error({
+ content: 文件夹不存在 {
+ stupWizardRef.current.changeModal(3);
+ }}>前往设置
+ });
+ return;
+ } else {
+ message.error(error);
+ }
+ }
+
break;
case '录制中':
footerListTemplate[itemIndex][rowIndex].title = '录制'
@@ -1285,6 +1341,8 @@ const Meeting: React.FC = () => {
}
})
}
+
+
// 开关麦克风
const postOpenMicrApi = async (enableMicr: boolean, uid: string, isAll: boolean, isMessage: boolean = false): Promise => {
if (isAll) {