This project uses a custom SDK that simplifies WebRTC integration to deliver seamless communication capabilities. It includes video call, real-time chat via data channel, and screen sharing using MediaProjection.
| Feature | Status | 
|---|---|
| Video Call | ✅ | 
| Chat | ✅ | 
| Screen Sharing | ✅ | 
| File Transfer | ❌ | 
 
 
- 
Create a Firebase Project: - Go to the Firebase Console.
- Click on Add project and follow the steps to create your Firebase project.
 
- 
Create Firestore Database: - In the Firebase project overview, navigate to Build > Firestore Database.
- Click Create database and follow the prompts to set up Firestore in either test or production mode.
 
- 
Download google-services.json:- Go to Project settings (click on the gear icon).
- Under Your apps, select your Android app and download the google-services.jsonconfiguration file.
 
- 
Place google-services.jsonin Your Project:- Copy the downloaded google-services.jsonfile.
- Place it in the appdirectory of your project.
 
- Copy the downloaded 
- 
Creating a Room - Launch the app and select the option to "connect" or "connect with projection"
- A unique number or word will be assigned to the created room.
 
- 
Joining a Room - To join a room created by another user, enter the assigned number or word for that room and click the "connect" or "connect with projection".
- When both users connect to the same room by using the same number or word, the video call will begin.
 
With these simple steps, you can set up and enjoy video calls. Share the room number or word with other users to start a call together.
interface WebRtcClient {
    fun connect(roomID: String)
    fun getEvent(): Flow<WebRtcEvent>
    suspend fun getRoomList(): Flow<List<String>?>
    fun sendMessage(message: String)
    fun sendInputEvent()
    fun getMessages(): Flow<Message>
    fun disconnect()
    fun toggleVoice()
    fun toggleVideo()
    fun getLocalSurface(): SurfaceViewRenderer
    fun getRemoteSurface(): SurfaceViewRenderer
}@Composable
fun VideoCallScreen() {
    val context = LocalContext.current
    val webrtcClient = remember { WebRtcClientFactory.create(context) } // 1. create client
    AndroidView(
        factory = {
            webrtcClient.getRemoteSurface() // 2. Initialize the UI to receive the other person's video
        }
    )
    AndroidView(
        factory = {
            webrtcClient.getLocalSurface() // 3. Initialize the UI to receive my video
        }
    )
    Button(
        onClick = {
            webrtcClient.connect("input roomId") // 4. try to connect
        },
    ) {
        Text("Connect")
    }
}@Composable
fun MediaProjectionScreen() {
    val context = LocalContext.current
    var webrtcClient by remember { mutableStateOf<WebRtcClient?>(null) }
    val mediaProjectionLauncher = rememberLauncherForActivityResult(
        contract = ActivityResultContracts.StartActivityForResult(),
        onResult = { result ->
            if (result.resultCode == RESULT_OK) {
                result.data?.let { intent ->
                    WebRtcCaptureService.startService(context, intent) // 2. start capture service
                }
            }
        }
    )
    LaunchedEffect(Unit) {
        val mediaProjectionManager =
            context.getSystemService(Context.MEDIA_PROJECTION_SERVICE) as MediaProjectionManager
        val captureIntent = mediaProjectionManager.createScreenCaptureIntent()
        mediaProjectionLauncher.launch(captureIntent) // 1. get media projection
        WebRtcCaptureService.getClient().collect { client -> webrtcClient = client } // 3. after the process of getting the media projection, initialize your client
    }
    webrtcClient?.getRemoteSurface()?.let { remote ->
        AndroidView(
            factory = { remote }
        )
    }
    webrtcClient?.getLocalSurface()?.let { local ->
        AndroidView(
            factory = { local }
        )
    }
    Button(
        onClick = {
            webrtcClient?.connect("input roomId") // 4. try to connect
        },
    ) {
        Text("Connect")
    }
}
