diff --git a/spawner/README.md b/spawner/README.md new file mode 100644 index 0000000..462dc86 --- /dev/null +++ b/spawner/README.md @@ -0,0 +1,76 @@ +# Google Meet Recording Bot + +A bot for automating tasks in Google Meet, including joining a meeting and recording WebRTC streams. This project also provides steps to convert WebRTC streams into MPEG2 streams using Unreal Media Server and save them locally using FFmpeg. + +## Features + +- Automates joining Google Meet sessions. +- Captures WebRTC streams. +- Converts WebRTC streams to MPEG2. +- Saves the converted video locally using FFmpeg. + +## Requirements + +- Node.js +- Selenium +- Unreal Media Server +- FFmpeg + +## Setup + +1. Clone the repository: + ```bash + git clone https://github.com/Prithviraj2003/meet-record-bot.git + cd meet-record-bot + ``` +2. Install the required dependencies: + +```bash + npm install +``` +## How to Use + +1. Run the Bot: + +```bash + npm run dev +``` +2. Automated Tasks +- The bot will join the specified Google Meet session. +- send the WebRTC stream as configured to Unreal Media Server. + +## Convert WebRTC Stream to MPEG2 Stream + +Follow these steps to convert the WebRTC stream into an MPEG2 stream using Unreal Media Server and save the video locally: + +### Step 1: Unreal Media Server Setup +1. [Download](http://umediaserver.net/umediaserver/download.html) and install Unreal Media Server. +2. Configure Unreal Media Server as per the tutorial: +[WebRTC to FFmpeg Guide](http://umediaserver.net/umediaserver/webrtctoffmpeg.htm). +3. Use the following values during configuration: +- Alias: webrtctest +- WebRTC Password: 12345 +- Ip address and port for MPEG2 broadcasting : 127.0.0.1:2000 +4. Start the Unreal Media Server and ensure it's running correctly. + +### Step 2: Save Video Locally with FFmpeg +1. [Download](https://www.ffmpeg.org/download.html) and install FFmpeg on your local machine. +2. Open a terminal and navigate to the directory where you want to store the video. +3. Run the following command to save the video stream locally: +```bash +ffmpeg -i udp://127.0.0.1:2000 -c:v libx264 Desktop\ffmpeg\outputfinal.mp4 +``` +- Replace Desktop\ffmpeg\outputfinal.mp4 with your desired output file path. + +### Step 3: Verify the Video +1. Check the output file at the specified location. +2. Ensure the video plays correctly. + +## Stay Updated +Follow us on [X](https://x.com/Prithviraj81646) (Twitter) for the latest updates, features, and announcements about this project: + +## License +This project is licensed under the MIT License. + +## Contributing +Contributions are welcome! Please open an issue or create a pull request to contribute to this project. \ No newline at end of file diff --git a/spawner/src/index.ts b/spawner/src/index.ts index 819f46e..a6a9800 100644 --- a/spawner/src/index.ts +++ b/spawner/src/index.ts @@ -1,147 +1,357 @@ -import { Builder, Browser, By, until, WebDriver } from 'selenium-webdriver' -import { Options } from 'selenium-webdriver/chrome' +import { Builder, Browser, By, until, WebDriver } from "selenium-webdriver"; +import { Options } from "selenium-webdriver/chrome"; async function openMeet(driver: WebDriver) { - try { - await driver.get('https://meet.google.com/ybi-eurv-efr'); - ​​const popupButton = await driver.wait(until.elementLocated(By.xpath('//span[contains(text(), "Got it")]')), 10000); - await popupButton.click() - ​​const nameInput = await driver.wait(until.elementLocated(By.xpath('//input[@placeholder="Your name"]')), 10000); - await nameInput.clear(); - await nameInput.click(); - await nameInput.sendKeys('value', "Meeting bot"); - await driver.sleep(1000) - ​​const buttonInput = await driver.wait(until.elementLocated(By.xpath('//span[contains(text(), "Ask to join")]')), 10000); - buttonInput.click(); - } finally { + await driver.get("https://meet.google.com/ybi-eurv-efr"); + const nameInput = await driver.wait( + until.elementLocated(By.xpath('//input[@placeholder="Your name"]')), + 15000 + ); + await driver.sleep(2000); + await nameInput.sendKeys("Meeting bot"); + const buttonInput = await driver.wait( + until.elementLocated(By.xpath('//span[contains(text(), "Ask to join")]')), + 15000 + ); + await buttonInput.click(); + } catch (e) { + console.log(e); } } async function getDriver() { - const options = new Options({}) - options.addArguments("--disable-blink-features=AutomationControlled"); - options.addArguments("--use-fake-ui-for-media-stream"); - options.addArguments("--window-size=1080,720") - options.addArguments('--auto-select-desktop-capture-source=[RECORD]'); - options.addArguments('--auto-select-desktop-capture-source=[RECORD]'); - options.addArguments('--enable-usermedia-screen-capturing'); - options.addArguments('--auto-select-tab-capture-source-by-title="Meet"') - options.addArguments('--allow-running-insecure-content'); - - // ​​--allow-file-access-from-files--use-fake-device-for-media-stream--allow-running-insecure-content--allow-file-access-from-files--use-fake-device-for-media-stream--allow-running-insecure-content - - - let driver = await new Builder().forBrowser(Browser.CHROME).setChromeOptions(options).build() - return driver; + const options = new Options({}); + options.addArguments("--disable-blink-features=AutomationControlled"); + options.addArguments("--use-fake-ui-for-media-stream"); + options.addArguments("--window-size=1920,1200"); + options.addArguments("--auto-select-desktop-capture-source=[RECORD]"); + options.addArguments("--enable-usermedia-screen-capturing"); + options.addArguments('--auto-select-tab-capture-source-by-title="Meet"'); + options.addArguments("--allow-running-insecure-content"); + + // ​​--allow-file-access-from-files--use-fake-device-for-media-stream--allow-running-insecure-content--allow-file-access-from-files--use-fake-device-for-media-stream--allow-running-insecure-content + + let driver = await new Builder() + .forBrowser(Browser.CHROME) + .setChromeOptions(options) + .build(); + return driver; } async function startScreenshare(driver: WebDriver) { - console.log("startScreensharecalled") - const response = await driver.executeScript(` - - function wait(delayInMS) { - return new Promise((resolve) => setTimeout(resolve, delayInMS)); - } - - function startRecording(stream, lengthInMS) { - let recorder = new MediaRecorder(stream); - let data = []; - - recorder.ondataavailable = (event) => data.push(event.data); - recorder.start(); - - let stopped = new Promise((resolve, reject) => { - recorder.onstop = resolve; - recorder.onerror = (event) => reject(event.name); - }); - - let recorded = wait(lengthInMS).then(() => { - if (recorder.state === "recording") { - recorder.stop(); - } - }); - - return Promise.all([stopped, recorded]).then(() => data); - } - - console.log("before mediadevices") - window.navigator.mediaDevices.getDisplayMedia({ - video: { - displaySurface: "browser" - }, - audio: true, - preferCurrentTab: true - }).then(async screenStream => { - const audioContext = new AudioContext(); - const screenAudioStream = audioContext.createMediaStreamSource(screenStream) - const audioEl1 = document.querySelectorAll("audio")[0]; - const audioEl2 = document.querySelectorAll("audio")[1]; - const audioEl3 = document.querySelectorAll("audio")[2]; - const audioElStream1 = audioContext.createMediaStreamSource(audioEl1.srcObject) - const audioElStream2 = audioContext.createMediaStreamSource(audioEl3.srcObject) - const audioElStream3 = audioContext.createMediaStreamSource(audioEl2.srcObject) - - const dest = audioContext.createMediaStreamDestination(); - - screenAudioStream.connect(dest) - audioElStream1.connect(dest) - audioElStream2.connect(dest) - audioElStream3.connect(dest) - - // window.setInterval(() => { - // document.querySelectorAll("audio").forEach(audioEl => { - // if (!audioEl.getAttribute("added")) { - // console.log("adding new audio"); - // const audioEl = document.querySelector("audio"); - // const audioElStream = audioContext.createMediaStreamSource(audioEl.srcObject) - // audioEl.setAttribute("added", true); - // audioElStream.connect(dest) - // } - // }) - - // }, 2500); - - // Combine screen and audio streams - const combinedStream = new MediaStream([ - ...screenStream.getVideoTracks(), - ...dest.stream.getAudioTracks() - ]); - - console.log("before start recording") - const recordedChunks = await startRecording(combinedStream, 60000); - console.log("after start recording") - - let recordedBlob = new Blob(recordedChunks, { type: "video/webm" }); - - // Create download for video with audio - const recording = document.createElement("video"); - recording.src = URL.createObjectURL(recordedBlob); - - const downloadButton = document.createElement("a"); - downloadButton.href = recording.src; - downloadButton.download = "RecordedScreenWithAudio.webm"; - downloadButton.click(); - - console.log("after download button click") - - // Clean up streams - screenStream.getTracks().forEach(track => track.stop()); - audioStream.getTracks().forEach(track => track.stop()); - }) - - `) - - console.log(response) - driver.sleep(1000000) + console.log("startScreensharecalled"); + await driver.sleep(2000); + try { + const script = ` + const videoProfile = "profile-level-id=6400"; +const ipAddress = "127.0.0.1"; +const useSingleWebRTCPort = true; +function setCodec(sdp, type, codec, clockRate) { + var sdpLines = sdp.split("\\r\\n"); + + for (var i = 0; i < sdpLines.length; i++) { + if (sdpLines[i].search("m=" + type) !== -1) { + var mLineIndex = i; + break; + } + } + + if (mLineIndex === null) return sdp; + + var codecPayload = null; + var re = new RegExp(":(\\d+) " + codec + "/" + clockRate); + function extractPayloadType(sdpLine, pattern) { + var result = sdpLine.match(pattern); + return result && result.length == 2 ? result[1] : null; + } + for (var i = mLineIndex; i < sdpLines.length; i++) { + if (sdpLines[i].search(codec + "/" + clockRate) !== -1) { + codecPayload = extractPayloadType(sdpLines[i], re); + if ( + codecPayload && + CodecProfileMatches(codec, sdpLines, mLineIndex, codecPayload) + ) { + sdpLines[mLineIndex] = setDefaultCodec( + sdpLines[mLineIndex], + codecPayload + ); + break; + } + } + } + + if (codecPayload === null) return sdp; + + var rtmpmap = "a=rtpmap:"; + var rtcp = "a=rtcp-fb:"; + var fmptp = "a=fmtp:"; + var rtmpmapThis = "a=rtpmap:" + codecPayload; + var rtcpThis = "a=rtcp-fb:" + codecPayload; + var fmptpThis = "a=fmtp:" + codecPayload; + var bAddAll = false; + var resSDPLines = new Array(); + + for (var i = 0; i < sdpLines.length; i++) { + if (i <= mLineIndex) { + resSDPLines.push(sdpLines[i]); + } else { + if (sdpLines[i].search("m=") === 0) bAddAll = true; + + var bNotToAdd = + (sdpLines[i].search(rtmpmap) === 0 && + sdpLines[i].search(rtmpmapThis) !== 0) || + (sdpLines[i].search(rtcp) === 0 && + sdpLines[i].search(rtcpThis) !== 0) || + (sdpLines[i].search(fmptp) === 0 && + sdpLines[i].search(fmptpThis) !== 0); + + if (bAddAll || !bNotToAdd) resSDPLines.push(sdpLines[i]); + } + } + + sdp = resSDPLines.join("\\r\\n"); + return sdp; +} +function CodecProfileMatches(codec, sdpLines, mLineIndex, codecPayload) { + if (codec != "H264") return true; + + for (var i = mLineIndex; i < sdpLines.length; i++) { + if ( + sdpLines[i].search("a=fmtp:" + codecPayload) === 0 && + sdpLines[i].search(videoProfile) !== -1 + ) + return true; + } + + return false; +} +function setDefaultCodec(mLine, payload) { + var elements = mLine.split(" "); + var newLine = new Array(); + var index = 0; + for (var i = 0; i < elements.length; i++) { + if (index === 3) { + newLine[index++] = payload; + break; + } + if (elements[i] !== payload) newLine[index++] = elements[i]; + } + return newLine.join(" "); +} +function setMediaBitrates(sdp) { + sdp = setMediaBitrate(sdp, "video", 1000); + + sdp = setMediaBitrate(sdp, "audio", 50); + + return sdp; +} +function setMediaBitrate(sdp, media, bitrate) { + var modifier = "b=AS:"; + + var lines = sdp.split("\\r\\n"); + var line = -1; + for (var i = 0; i < lines.length; i++) { + if (lines[i].indexOf("m=" + media) === 0) { + line = i; + break; + } + } + + if (line === -1) return sdp; + + // Pass the m line + line++; + + // Skip i and c lines + while (lines[line].indexOf("i=") === 0 || lines[line].indexOf("c=") === 0) + line++; + + // If we are on a b line, replace it + if (lines[line].indexOf("b") === 0) { + lines[line] = modifier + bitrate; + return lines.join("\\r\\n"); + } + + // Add a new b line + var newLines = lines.slice(0, line); + newLines.push(modifier + bitrate); + newLines = newLines.concat(lines.slice(line, lines.length)); + return newLines.join("\\r\\n"); +} +const modifyOffer = (offer) => { + offer.sdp = setMediaBitrates(offer.sdp); + offer.sdp = setCodec(offer.sdp, "audio", "opus", 48000); + offer.sdp = setCodec(offer.sdp, "video", "H264", 90000); + + offer.sdp = offer.sdp.replace("a=sendrecv", "a=sendonly"); + offer.sdp = offer.sdp.replace("a=sendrecv", "a=sendonly"); + + //Fix for a=extmap-allow-mixed - Unreal Media Server doesn't support it in SDP + offer.sdp = offer.sdp.replace("a=extmap-allow-mixed\\r\\n", ""); + offer.sdp = offer.sdp.replace("a=extmap-allow-mixed", ""); + return offer; +}; + +// Helper function for delays +function wait(delayInMS) { + return new Promise((resolve) => setTimeout(resolve, delayInMS)); +} + +// Set up WebSocket signaling +const socket = new WebSocket( + "ws://127.0.0.1:5119/webrtc_publish/singleport/tcp/webrtctest" +); +console.log(socket); +const peerConnection = new RTCPeerConnection({ + iceServers: [{ urls: "stun:stun.l.google.com:19302" }], +}); + +// Add event listener for ICE candidates +peerConnection.onicecandidate = (event) => { + // if (event.candidate) { + // socket.send(JSON.stringify(event)); + // } +}; + +// Handle incoming WebSocket messages +socket.onmessage = async (event) => { + var response = event.data; + var strArr = response.split("|-|-|"); + if (strArr.length == 1) { + stop(); + alert(response); + } else { + var serverSDP = JSON.parse(strArr[0]); + var serverEndpoint = JSON.parse(strArr[1]); + + serverEndpoint.candidate = EnsureValidCandidate(serverEndpoint.candidate); + + serverSDP.sdp = setMediaBitrates(serverSDP.sdp); + + peerConnection.setRemoteDescription(new RTCSessionDescription(serverSDP)); + var candidate = new RTCIceCandidate({ + sdpMLineIndex: serverEndpoint.sdpMLineIndex, + candidate: serverEndpoint.candidate, + }); + peerConnection.addIceCandidate(candidate); + } + + if (socket != null) { + socket.close(); + } + function EnsureValidCandidate(candidate) { + if ( + candidate.search(ipAddress) !== -1 || + !useSingleWebRTCPort || + ipAddress == "127.0.0.1" || + !ValidateIPaddress(ipAddress) + ) { + return candidate; + } + + //In case the server is behind the NAT router, replace private IP with public IP in the candidate + var candLines = candidate.split(" "); + var ipIndex = 4; + for (var i = 0; i < candLines.length; i++) { + if (candLines[i] === "typ") { + ipIndex = i - 2; + break; + } + } + + candLines[ipIndex] = ipAddress; + candidate = candLines.join(" "); + return candidate; + } + + function ValidateIPaddress(ipaddr) { + if ( + /^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/.test( + ipaddr + ) + ) { + return true; + } + + return false; + } + // const message = JSON.parse(event.data); + // if (message.type === "answer") { + // const remoteDesc = new RTCSessionDescription(message); + // await peerConnection.setRemoteDescription(remoteDesc); + // } else if (message.type === "ice-candidate") { + // try { + // await peerConnection.addIceCandidate(message.candidate); + // } catch (e) { + // console.error("Error adding received ICE candidate", e); + // } + // } +}; + +console.log("before mediadevices"); +window.navigator.mediaDevices + .getDisplayMedia({ + video: { + displaySurface: "browser", // Capture browser window + }, + audio: true, + preferCurrentTab: true, + }) + .then(async (screenStream) => { + const audioContext = new AudioContext(); + const screenAudioStream = + audioContext.createMediaStreamSource(screenStream); + const audioEl1 = document.querySelectorAll("audio")[0]; + const audioEl2 = document.querySelectorAll("audio")[1]; + const audioEl3 = document.querySelectorAll("audio")[2]; + const audioElStream1 = audioContext.createMediaStreamSource( + audioEl1.srcObject + ); + const audioElStream2 = audioContext.createMediaStreamSource( + audioEl3.srcObject + ); + const audioElStream3 = audioContext.createMediaStreamSource( + audioEl2.srcObject + ); + + const dest = audioContext.createMediaStreamDestination(); + + screenAudioStream.connect(dest); + audioElStream1.connect(dest); + audioElStream2.connect(dest); + audioElStream3.connect(dest); + // Combine screen and audio streams + const combinedStream = new MediaStream([ + ...screenStream.getVideoTracks(), + ...dest.stream.getAudioTracks(), + ]); + // peerConnection.addStream(screenStream); + combinedStream.getTracks().forEach((track) => { + peerConnection.addTrack(track, combinedStream); + }); + // Create and send an SDP offer + let offer = await peerConnection.createOffer(); + offer =await modifyOffer(offer); + await peerConnection.setLocalDescription(offer); + socket.send("12345|-|-|" + JSON.stringify(peerConnection.localDescription)); + }); +console.log("Screen sharing setup complete."); +await wait(10000); +`; + + await driver.executeScript(script); + } catch (error) { + console.log(error); + } + + driver.sleep(1000000); } async function main() { - const driver = await getDriver(); - await openMeet(driver); - await new Promise(x => setTimeout(x, 20000)); - // wait until admin lets u join - await startScreenshare(driver); + const driver = await getDriver(); + await openMeet(driver); + await startScreenshare(driver); } main();