diff --git a/.gitmodules b/.gitmodules
index 6c7a308b..6bd7ad0c 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,9 +1,6 @@
[submodule "packages/web-client-sdk"]
path = packages/web-client-sdk
url = https://github.com/fishjam-cloud/web-client-sdk.git
-[submodule "packages/mobile-client-sdk"]
- path = packages/mobile-client-sdk
- url = https://github.com/fishjam-cloud/mobile-client-sdk.git
[submodule "packages/js-server-sdk"]
path = packages/js-server-sdk
url = https://github.com/fishjam-cloud/js-server-sdk.git
diff --git a/docs/explanation/public-livestreams.mdx b/docs/explanation/public-livestreams.mdx
index bb990e11..0fb00ba3 100644
--- a/docs/explanation/public-livestreams.mdx
+++ b/docs/explanation/public-livestreams.mdx
@@ -139,21 +139,23 @@ Once you've created a viewer token, you can connect to a room using the Fishjam
const viewerToken = '';
// ---cut---
- import {
- LivestreamViewer,
- useLivestreamViewer,
- } from '@fishjam-cloud/react-native-client/livestream';
+ import { useLivestreamViewer, RTCView } from '@fishjam-cloud/mobile-client';
- // ...
+ //TODO: FCE-2487 remove it when MediaStream will be updated
+ interface MediaStreamWithURL extends MediaStream {
+ toURL(): string;
+ }
- const { connect, whepClientRef } = useLivestreamViewer();
+ // Inside your component:
+ const { connect, stream } = useLivestreamViewer();
// ...
await connect({ token: viewerToken });
- // Use `LivestreamViewer` to render the stream
-
+ // Render the stream
+ const streamURL = stream ? (stream as MediaStreamWithURL).toURL() : null;
+ {streamURL && }
```
@@ -203,21 +205,23 @@ Once you've created a room of type `livestream` with the `public` flag enabled,
const roomId = '';
// ---cut---
- import {
- LivestreamViewer,
- useLivestreamViewer,
- } from '@fishjam-cloud/react-native-client/livestream';
+ import { useLivestreamViewer, RTCView } from '@fishjam-cloud/mobile-client';
- // ...
+ //TODO: FCE-2487 remove it when MediaStream will be updated
+ interface MediaStreamWithURL extends MediaStream {
+ toURL(): string;
+ }
- const { connect, whepClientRef } = useLivestreamViewer();
+ // Inside your component:
+ const { connect, stream } = useLivestreamViewer();
// ...
await connect({ streamId: roomId });
- // Use `LivestreamViewer` to render the stream
-
+ // Render the stream
+ const streamURL = stream ? (stream as MediaStreamWithURL).toURL() : null;
+ {streamURL && }
```
diff --git a/docs/how-to/backend/server-setup.mdx b/docs/how-to/backend/server-setup.mdx
index 77a75a88..84903f39 100644
--- a/docs/how-to/backend/server-setup.mdx
+++ b/docs/how-to/backend/server-setup.mdx
@@ -164,8 +164,7 @@ At any time you can terminate user's access by deleting the peer.
#### Metadata
-When creating a peer, you can also assign metadata to that peer, which can be read later with the [mobile SDK](../../how-to/react-native/metadata)
-or [web SDK](../../how-to/react/metadata). This metadata can be only set when creating the peer and can't be updated later.
+When creating a peer, you can also assign metadata to that peer, which can be read later with the [client SDK](../../how-to/client/metadata). This metadata can be only set when creating the peer and can't be updated later.
diff --git a/docs/how-to/client/_category_.json b/docs/how-to/client/_category_.json
new file mode 100644
index 00000000..e93ff99d
--- /dev/null
+++ b/docs/how-to/client/_category_.json
@@ -0,0 +1,9 @@
+{
+ "label": "Web & Mobile",
+ "position": 1,
+ "link": {
+ "type": "generated-index",
+ "description": "Learn how to integrate Fishjam into your web and mobile applications."
+ }
+}
+
diff --git a/docs/how-to/client/background-streaming.mdx b/docs/how-to/client/background-streaming.mdx
new file mode 100644
index 00000000..890f9072
--- /dev/null
+++ b/docs/how-to/client/background-streaming.mdx
@@ -0,0 +1,128 @@
+---
+sidebar_position: 13
+sidebar_label: "Background calls π±"
+---
+
+import Tabs from "@theme/Tabs";
+import TabItem from "@theme/TabItem";
+
+# Background calls Mobile
+
+:::note
+This guide is exclusively for **Mobile** (React Native) applications.
+:::
+
+Both Android and iOS support calls running in the background, but they use different approaches:
+
+- **Android**: Uses foreground services to keep the app active in the background
+- **iOS**: Uses CallKit integration to maintain VoIP calls in the background
+
+Below is configuration required to make it work:
+
+
+
+
+
+You need to modify `app.json` file and add our plugin:
+
+```json
+{
+ "expo": {
+ ...
+ "plugins": {
+ ...
+ [
+ "@fishjam-cloud/mobile-client",
+ {
+ "android": {
+ "enableForegroundService": true
+ },
+ "ios": {
+ "enableVoIPBackgroundMode": true
+ }
+ }
+ ],
+ ...
+ }
+ }
+}
+```
+
+
+
+
+**Android Configuration**
+
+You need to add the following service to `AndroidManifest.xml`:
+
+```xml title='AndroidManifest.xml'
+
+ ...
+
+ ...
+
+
+
+```
+
+**iOS Configuration**
+
+You need to add VoIP background mode in `Info.plist`:
+
+```xml title='Info.plist'
+UIBackgroundModes
+
+ voip
+
+```
+
+
+
+
+## Usage
+
+
+
+
+
+You can use [`useForegroundService`](../../api/mobile/variables/useForegroundService) hook to handle how foreground service behaves on Android.
+
+:::important[Permissions]
+
+If you want to use [`enableCamera`](../../api/mobile/type-aliases/ForegroundServiceConfig#enablecamera) or [`enableMicrophone`](../../api/mobile/type-aliases/ForegroundServiceConfig#enablemicrophone),
+user must first grant permission for this resource. [`useForegroundService`](../../api/mobile/variables/useForegroundService) will check if permission is
+granted and only then allow to start a service.
+
+:::
+
+```tsx
+import {
+ useCamera,
+ useMicrophone,
+} from "@fishjam-cloud/mobile-client";
+
+const { isCameraOn } = useCamera();
+const { isMicrophoneOn } = useMicrophone();
+
+
+```
+
+
+
+
+On iOS, background calls are achieved through CallKit integration. To enable background streaming on iOS:
+
+1. Enable VoIP background mode by setting `enableVoIPBackgroundMode: true` in the plugin configuration or adding the VoIP background mode to your `Info.plist`
+2. The SDK will automatically handle CallKit integration for maintaining background audio/video sessions
+
+:::note
+CallKit integration is handled automatically by the SDK when VoIP background mode is enabled. The call will appear in the iOS call history and can be managed through the native phone interface.
+:::
+
+
+
+
+## See Also
+
+For an enhanced user experience when your app is in the background, consider enabling [Picture in Picture](./picture-in-picture), which allows users to see video content in a floating window while using other apps.
+
diff --git a/docs/how-to/client/connecting.mdx b/docs/how-to/client/connecting.mdx
new file mode 100644
index 00000000..94b90659
--- /dev/null
+++ b/docs/how-to/client/connecting.mdx
@@ -0,0 +1,173 @@
+---
+sidebar_position: 3
+---
+
+import Tabs from "@theme/Tabs";
+import TabItem from "@theme/TabItem";
+
+# Connecting
+
+This article will guide you through the process of connecting to a Fishjam room.
+
+## Getting URL and token
+
+In order to connect, you need to obtain a **Peer Token** (the token that will authenticate the peer in
+your Room).
+
+
+
+
+
+Once you create your account on [Fishjam](https://fishjam.io), you will have access to the Sandbox environment as part of the Mini Jar plan.
+While using the Sandbox environment, [you can use the Sandbox API](../features/sandbox-api-testing) to generate peer tokens for testing or development purposes.
+This is basically a service that will create a Room, add your app as
+the Room's Peer, and return the token required to use that Room.
+
+
+
+
+```ts
+import { useSandbox } from "@fishjam-cloud/react-client";
+const roomName = "room";
+const peerName = "user";
+// ---cut---
+
+// The `useSandbox` hook gets the fishjamId from FishjamProvider
+// It will work ONLY with the FISHJAM_ID of the Sandbox environment
+const { getSandboxPeerToken } = useSandbox();
+const peerToken = await getSandboxPeerToken(roomName, peerName);
+```
+
+
+
+
+```ts
+import { useSandbox } from "@fishjam-cloud/mobile-client";
+const roomName = "room";
+const peerName = "user";
+// ---cut---
+
+// The `useSandbox` hook gets the fishjamId from FishjamProvider
+// It will work ONLY with the FISHJAM_ID of the Sandbox environment
+const { getSandboxPeerToken } = useSandbox();
+const peerToken = await getSandboxPeerToken(roomName, peerName);
+```
+
+
+
+
+
+
+
+For the production app, you need to implement your own backend service that will provide the user with a **Peer Token**. To do that,
+follow our [server setup instructions](../backend/server-setup).
+
+
+
+
+## Connecting
+
+Use the [`useConnection`](../../api/web/functions/useConnection) hook to get
+the [`joinRoom`](../../api/web/functions/useConnection#joinroom) function.
+
+
+
+
+```tsx
+const PEER_TOKEN = "some-peer-token";
+// ---cut-before---
+import { useConnection, useSandbox } from "@fishjam-cloud/react-client";
+import React, { useCallback } from "react";
+
+export function JoinRoomButton() {
+ const { joinRoom } = useConnection(); // [!code highlight]
+ // get the peer token from sandbox or your backend
+ const { getSandboxPeerToken } = useSandbox();
+
+ const onJoinRoomPress = useCallback(async () => {
+ // [!code highlight:5]
+ const peerToken = await getSandboxPeerToken("Room", "User");
+ await joinRoom({ peerToken });
+ }, [joinRoom]);
+
+ return ;
+}
+```
+
+
+
+
+```tsx
+import React, { useCallback } from "react";
+import { Button } from "react-native";
+import { useConnection, useSandbox } from "@fishjam-cloud/mobile-client";
+
+export function JoinRoomButton() {
+ const { joinRoom } = useConnection(); // [!code highlight]
+ // fishjamId is provided through FishjamProvider
+ const { getSandboxPeerToken } = useSandbox();
+
+ const onPressJoin = useCallback(async () => {
+ // in production environment, get the peerToken from your backend
+ const peerToken = await getSandboxPeerToken("Room", "User");
+
+ await joinRoom({ peerToken }); // [!code highlight]
+ }, [joinRoom, getSandboxPeerToken]);
+
+ return ;
+}
+```
+
+
+
+
+## Disconnecting
+
+In order to close connection, use the [`leaveRoom`](../../api/web/functions/useConnection#leaveroom) method
+from [`useConnection`](../../api/web/functions/useConnection) hook.
+
+
+
+
+```tsx
+import { useConnection } from "@fishjam-cloud/react-client";
+import React, { useCallback } from "react";
+
+export function LeaveRoomButton() {
+ const { leaveRoom } = useConnection(); // [!code highlight]
+
+ return ;
+}
+```
+
+
+
+
+```tsx
+import React, { useCallback } from "react";
+import { Button } from "react-native";
+import { useConnection } from "@fishjam-cloud/mobile-client";
+
+export function LeaveRoomButton() {
+ const { leaveRoom } = useConnection(); // [!code highlight]
+
+ const onPressLeave = useCallback(async () => {
+ await leaveRoom(); // [!code highlight]
+ }, [leaveRoom]);
+
+ return ;
+}
+```
+
+
+
+
+## Next Steps
+
+Now that you're connected to a room, you can explore additional features:
+
+- [Start Streaming](./start-streaming) - Enable your camera and microphone
+- [List Other Peers](./list-other-peers) - Display video from other participants
+- [Picture in Picture](./picture-in-picture) - Allow users to watch video in a floating window (Mobile)
+- [Background Streaming](./background-streaming) - Keep calls active when the app is backgrounded (Mobile)
+
diff --git a/docs/how-to/react/custom-sources.mdx b/docs/how-to/client/custom-sources.mdx
similarity index 95%
rename from docs/how-to/react/custom-sources.mdx
rename to docs/how-to/client/custom-sources.mdx
index 14e7fde0..435eed5c 100644
--- a/docs/how-to/react/custom-sources.mdx
+++ b/docs/how-to/client/custom-sources.mdx
@@ -1,15 +1,20 @@
---
sidebar_position: 8
+sidebar_label: "Custom sources π"
---
import Tabs from "@theme/Tabs";
import TabItem from "@theme/TabItem";
-# Custom sources
+# Custom sources Web
+
+:::note
+This guide is exclusively for **Web** (React) applications.
+:::
:::important
-If you only wish to send camera, microphone or screen share output through Fishjam, then you most likely should refer to the documentation in [Streaming media](../../how-to/react/start-streaming) and [Managing devices](../../how-to/react/managing-devices) instead of this page.
+If you only wish to send camera, microphone or screen share output through Fishjam, then you most likely should refer to the documentation in [Streaming media](./start-streaming) and [Managing devices](./managing-devices) instead of this page.
:::
@@ -173,3 +178,4 @@ If you have a `
+
diff --git a/docs/how-to/react/debug-logging.mdx b/docs/how-to/client/debug-logging.mdx
similarity index 91%
rename from docs/how-to/react/debug-logging.mdx
rename to docs/how-to/client/debug-logging.mdx
index 6f62159f..16bcd9a0 100644
--- a/docs/how-to/react/debug-logging.mdx
+++ b/docs/how-to/client/debug-logging.mdx
@@ -1,8 +1,13 @@
---
sidebar_position: 9
+sidebar_label: "Debug logging π"
---
-# Debug logging
+# Debug logging Web
+
+:::note
+This guide is exclusively for **Web** (React) applications.
+:::
The Fishjam SDK includes a built-in debugging mode to help developers troubleshoot connectivity and media issues during development. This feature controls the verbosity of the SDK's internal logging mechanisms.
@@ -64,3 +69,4 @@ When enabled, you may see logs similar to:
[FISHJAM] Couldn't get camera permission: NotAllowedError ...
[FISHJAM] ICE connection: disconnected
```
+
diff --git a/docs/how-to/client/installation.mdx b/docs/how-to/client/installation.mdx
new file mode 100644
index 00000000..2ab6749c
--- /dev/null
+++ b/docs/how-to/client/installation.mdx
@@ -0,0 +1,213 @@
+---
+sidebar_position: 1
+---
+
+import Tabs from "@theme/Tabs";
+import TabItem from "@theme/TabItem";
+
+# Installation
+
+
+
+
+## 1. Install the package
+
+```bash npm2yarn
+npm install @fishjam-cloud/react-client
+```
+
+## 2. Setup Fishjam context
+
+Wrap your app in our [`FishjamProvider`](../../api/web/functions/FishjamProvider) component. Get your Fishjam ID from [Fishjam Dashboard](https://fishjam.io/app) and pass it to the provider.
+
+```tsx
+const App = () => {
+ return
Hello world
;
+};
+
+// ---cut---
+import React from "react";
+import ReactDOM from "react-dom/client";
+// import App from "./App";
+import { FishjamProvider } from "@fishjam-cloud/react-client";
+
+// Check https://fishjam.io/app/ for your Fishjam ID
+const FISHJAM_ID = "your-fishjam-id";
+
+ReactDOM.createRoot(document.getElementById("root")!).render(
+ // [!code highlight:5]
+
+
+
+
+ ,
+);
+```
+
+:::tip
+
+It's possible to have many independent Fishjam contexts in one app.
+Just render many [`FishjamProvider`](../../api/web/functions/FishjamProvider) components and make sure they don't overlap.
+
+:::
+
+
+
+
+## Optional: Create a New App
+
+
+ Follow these steps to create a new mobile app
+
+If you don't have an existing project, you can create a new Expo app using a template
+
+```bash
+npx create-expo-app@latest my-video-app
+```
+
+As the next step, you have to generate native files with the `expo prebuild` command:
+
+```bash
+npx expo prebuild
+```
+
+You can also follow more detailed [Expo instructions](https://docs.expo.dev/get-started/introduction/).
+
+
+
+## Step 1: Install the Package
+
+Install `@fishjam-cloud/mobile-client` with your preferred package manager.
+
+```bash npm2yarn
+npm install @fishjam-cloud/mobile-client
+```
+
+## Step 2: Configure App Permissions
+
+Your app needs to have permissions configured in order to use the microphone and camera.
+
+### Android
+
+Permissions below are required to stream audio and video with Fishjam on Android.
+
+- `android.permission.CAMERA`
+- `android.permission.RECORD_AUDIO`
+- `android.permission.MODIFY_AUDIO_SETTINGS`
+- `android.permission.ACCESS_NETWORK_STATE`
+- `android.permission.ACCESS_WIFI_STATE`
+
+
+
+
+
+Add required permissions to the `app.json` file.
+
+```json title='app.json'
+{
+ "expo": {
+ ...
+ "android": {
+ ...
+ "permissions": [
+ "android.permission.CAMERA",
+ "android.permission.RECORD_AUDIO",
+ "android.permission.MODIFY_AUDIO_SETTINGS",
+ "android.permission.ACCESS_NETWORK_STATE",
+ "android.permission.ACCESS_WIFI_STATE"
+ ]
+ }
+ }
+}
+```
+
+
+
+
+Add required permissions to the `AndroidManifest.xml` file.
+
+```xml title='AndroidManifest.xml'
+
+ ...
+
+
+
+ ...
+
+```
+
+
+
+
+### iOS
+
+
+
+
+
+You don't have to make any changes to run app on iOS.
+To update default content of permission alert, you can add these settings to `app.json`:
+
+```json title='app.json'
+{
+ "expo": {
+ ...
+ "ios": {
+ ...
+ "infoPlist": {
+ "NSCameraUsageDescription": "Allow $(PRODUCT_NAME) to access your camera.",
+ "NSMicrophoneUsageDescription": "Allow $(PRODUCT_NAME) to access your microphone."
+ }
+ },
+ }
+}
+```
+
+
+
+
+Ensure `Info.plist` contains camera and microphone usage description entries:
+
+```xml title='Info.plist'
+ NSCameraUsageDescription
+ Allow $(PRODUCT_NAME) to access your camera.
+ NSMicrophoneUsageDescription
+ Allow $(PRODUCT_NAME) to access your microphone.
+```
+
+
+
+
+## Step 3: Setup Fishjam Context
+
+Wrap your app in the [`FishjamProvider`](../../api/mobile/functions/FishjamProvider) component:
+
+```tsx
+import React from "react";
+import { FishjamProvider } from "@fishjam-cloud/mobile-client";
+
+// Check https://fishjam.io/app/ for your Fishjam ID
+const FISHJAM_ID = "your-fishjam-id";
+
+export default function App() {
+ return (
+
+ {/* Your app components */}
+
+ );
+}
+```
+
+## Camera and Microphone Permissions
+
+:::info
+You don't need to explicitly request permissions as they're automatically asked for when your app needs them.
+:::
+
+The SDK automatically requests camera and microphone permissions when you call `initializeDevices()`. The system permission dialog will be shown to the user if permissions haven't been granted yet.
+
+If you need manual control over permissions, you can use the [expo-permissions](https://docs.expo.dev/versions/latest/sdk/permissions/) or [react-native-permissions](https://github.com/zoontek/react-native-permissions) libraries.
+
+
+
+
diff --git a/docs/how-to/client/list-other-peers.mdx b/docs/how-to/client/list-other-peers.mdx
new file mode 100644
index 00000000..5f8576e4
--- /dev/null
+++ b/docs/how-to/client/list-other-peers.mdx
@@ -0,0 +1,98 @@
+---
+sidebar_position: 5
+---
+
+import Tabs from "@theme/Tabs";
+import TabItem from "@theme/TabItem";
+
+# Display media of other peers
+
+To access data and media of other peers, use the [`usePeers`](../../api/web/functions/usePeers) hook.
+It returns two properties, `remotePeers` and `localPeer`.
+They contain all the tracks of other peers and all the tracks of the local user, respectively.
+
+## Example of playing other peers' available media
+
+
+
+
+```tsx
+import React, { FC } from "react";
+
+const VideoRenderer: FC<{ stream?: MediaStream | null }> = (_) => ;
+
+const AudioPlayer: FC<{ stream?: MediaStream | null }> = (_) => ;
+
+// ---cut---
+import { usePeers } from "@fishjam-cloud/react-client";
+
+export function Component() {
+ const { remotePeers } = usePeers();
+
+ return (
+
+ // remember to import
+ your VideoRenderer component
+
+
+ ))}
+
+ );
+}
+```
+
+
+
+
+```tsx
+import React from "react";
+import { View, Text } from "react-native";
+import { usePeers, RTCView } from "@fishjam-cloud/mobile-client";
+
+//TODO: FCE-2487 remove it when MediaStream will be updated
+interface MediaStreamWithURL extends MediaStream {
+ toURL(): string;
+}
+
+function VideoPlayer({ stream }: { stream: MediaStream | null }) {
+ const streamURL = stream ? (stream as MediaStreamWithURL).toURL() : null;
+ if (!streamURL) return No video;
+ return (
+
+ );
+}
+
+export function ShowAllPeers() {
+ const { remotePeers, localPeer } = usePeers(); // [!code highlight]
+
+ return (
+
+ {/* Local camera */}
+ {localPeer?.cameraTrack?.stream && ( // [!code highlight]
+
+ )}
+
+ {/* Remote cameras */}
+ {remotePeers.map((peer) => ( // [!code highlight]
+
+ {peer.cameraTrack?.stream && (
+
+ )}
+
+ ))}
+
+ );
+}
+```
+
+:::tip[Enable Picture in Picture]
+
+To allow users to continue watching video in a floating window when they background your app, use the `RTCPIPView` component. See the [Picture in Picture guide](./picture-in-picture) for more details.
+
+:::
+
+
+
+
diff --git a/docs/how-to/client/managing-devices.mdx b/docs/how-to/client/managing-devices.mdx
new file mode 100644
index 00000000..4046fa08
--- /dev/null
+++ b/docs/how-to/client/managing-devices.mdx
@@ -0,0 +1,238 @@
+---
+sidebar_position: 4
+sidebar_label: "Managing devices"
+---
+
+import Tabs from "@theme/Tabs";
+import TabItem from "@theme/TabItem";
+
+# Managing devices
+
+The Fishjam SDK provides functions for dynamically controlling media device streams. This includes selecting desired cameras and microphones, turning them on and off, as well as muting and unmuting microphones.
+
+## Selecting Camera and Microphone
+
+To select the desired camera or microphone, use the `selectCamera` and `selectMicrophone` functions.
+Lists of the available devices are available via the `cameraDevices` and `microphoneDevices` properties.
+
+
+
+
+To select the desired camera or microphone, use [`selectCamera()`](../../api/web/functions/useCamera#selectcamera) and [`selectMicrophone()`](../../api/web/functions/useMicrophone#selectmicrophone) functions.
+Lists of the available devices are available via the [`cameraDevices`](../../api/web/functions/useCamera#cameradevices) and [`microphoneDevices`](../../api/web/functions/useMicrophone#microphonedevices) properties.
+
+#### Usage Example
+
+```tsx
+import React from "react";
+import { useCamera } from "@fishjam-cloud/react-client";
+
+export function CameraControl() {
+ const { cameraDevices, selectCamera } = useCamera();
+
+ return (
+
+ {cameraDevices.map(({ deviceId, label }) => (
+
+
+
+ ))}
+
+ );
+}
+```
+
+
+
+
+To select the desired camera, use the [`selectCamera`](../../api/mobile/functions/useCamera#selectcamera) method.
+The list of the available camera devices is available via the [`cameraDevices`](../../api/mobile/functions/useCamera#cameradevices).
+
+```tsx
+import React, { useCallback, useState } from "react";
+import { Button } from "react-native";
+import { useCamera } from "@fishjam-cloud/mobile-client";
+
+export function FlipButton() {
+ const { cameraDevices, selectCamera } = useCamera(); // [!code highlight]
+ const [currentIndex, setCurrentIndex] = useState(0);
+
+ const onPressFlipCamera = useCallback(() => {
+ if (cameraDevices.length === 0) return;
+
+ // Cycle through available cameras
+ const nextIndex = (currentIndex + 1) % cameraDevices.length;
+ const nextCamera = cameraDevices[nextIndex];
+ if (nextCamera) {
+ selectCamera(nextCamera.deviceId); // [!code highlight]
+ setCurrentIndex(nextIndex);
+ }
+ }, [cameraDevices, currentIndex, selectCamera]);
+
+ return ;
+}
+```
+
+
+
+
+## Turning Camera On and Off
+
+
+
+
+Use the [`toggleCamera()`](../../api/web/functions/useCamera#togglecamera) method to control the physical operational state of the camera.
+
+- **Turning the camera off**: This action stops the camera device, disables the media stream, and pauses streaming. The webcam indicator light will shut down.
+- **Turning the camera on**: This action starts the camera and resumes streaming, allowing other participants to see video after a brief initialization period.
+
+#### Usage Example
+
+```tsx
+import React from "react";
+import { useCamera } from "@fishjam-cloud/react-client";
+
+export function CameraControl() {
+ const { toggleCamera } = useCamera();
+
+ return ;
+}
+```
+
+
+
+
+You can use [`toggleCamera`](../../api/mobile/functions/useCamera#togglecamera) to toggle the camera state, or use [`startCamera`](../../api/mobile/functions/useCamera#startcamera) and [`stopCamera`](../../api/mobile/functions/useCamera#stopcamera) for more explicit control.
+
+#### Using toggleCamera
+
+```tsx
+import { Button } from "react-native";
+import React from "react";
+import { useCamera } from "@fishjam-cloud/mobile-client";
+
+export function ToggleCameraButton() {
+ const { isCameraOn, toggleCamera } = useCamera(); // [!code highlight]
+
+ return (
+
+ );
+}
+```
+
+#### Using startCamera/stopCamera
+
+```tsx
+import { Button, View } from "react-native";
+import React from "react";
+import { useCamera } from "@fishjam-cloud/mobile-client";
+
+export function CameraControls() {
+ const { startCamera, stopCamera, isCameraOn } = useCamera();
+
+ return (
+
+
+ );
+}
+```
+
+
+
+
+## Turning Microphone On and Off
+
+
+
+
+Use the [`toggleMicrophone()`](../../api/web/functions/useMicrophone#togglemicrophone) method to toggle the microphone's physical operational state. The function interacts with a physical device, so it might take a noticeable amount of time.
+
+**Turning the microphone off**: This action turns the microphone off, disables the media stream, and pauses any audio transmission.
+
+**Turning the microphone on**: This action turns the microphone on and resumes audio streaming.
+
+
+
+
+You can use [`toggleMicrophone`](../../api/mobile/functions/useMicrophone#togglemicrophone) to toggle the microphone state, or use [`startMicrophone`](../../api/mobile/functions/useMicrophone#startmicrophone) and [`stopMicrophone`](../../api/mobile/functions/useMicrophone#stopmicrophone) for more explicit control.
+
+#### Using toggleMicrophone
+
+```tsx
+import { Button } from "react-native";
+import React from "react";
+import { useMicrophone } from "@fishjam-cloud/mobile-client";
+
+export function ToggleMicrophoneButton() {
+ const { isMicrophoneOn, toggleMicrophone } = useMicrophone(); // [!code highlight]
+
+ return (
+
+ );
+}
+```
+
+#### Using startMicrophone/stopMicrophone
+
+```tsx
+import { Button, View } from "react-native";
+import React from "react";
+import { useMicrophone } from "@fishjam-cloud/mobile-client";
+
+export function MicrophoneControls() {
+ const { startMicrophone, stopMicrophone, isMicrophoneOn } = useMicrophone();
+
+ return (
+
+ startMicrophone()} title="Start Microphone" disabled={isMicrophoneOn} />
+ stopMicrophone()} title="Stop Microphone" disabled={!isMicrophoneOn} />
+
+ );
+}
+```
+
+
+
+
+## Muting and Unmuting Microphone (Web only)
+
+:::note
+This feature is only available on Web. On mobile, use `toggleMicrophone` or `stopMicrophone` to disable audio transmission.
+:::
+
+Use [`toggleMicrophoneMute()`](../../api/web/functions/useMicrophone#togglemicrophonemute) to manage the audio stream's operational status without affecting the microphone's hardware state.
+
+Muting and unmuting is faster than turning the microphone on/off, but a muted device still uses resources. This is useful, as it is common to mute and unmute during a meeting. Unmuting needs to be quick to capture the first word of a sentence.
+
+- **Muting the microphone**: This action disables the media stream and stops audio transmission while keeping the microphone active.
+- **Unmuting the microphone**: This action enables the media stream, allowing immediate transmission of sounds.
+
+#### Usage Example
+
+```tsx
+import React from "react";
+import { useMicrophone } from "@fishjam-cloud/react-client";
+
+export function MicrophoneControl() {
+ const { toggleMicrophone, toggleMicrophoneMute } = useMicrophone();
+
+ return (
+
+ );
+}
+```
diff --git a/docs/how-to/client/metadata.mdx b/docs/how-to/client/metadata.mdx
new file mode 100644
index 00000000..0f96782b
--- /dev/null
+++ b/docs/how-to/client/metadata.mdx
@@ -0,0 +1,212 @@
+---
+sidebar_position: 6
+title: "Metadata"
+description: "How to use metadata"
+---
+
+import Tabs from "@theme/Tabs";
+import TabItem from "@theme/TabItem";
+
+Alongside audio and video, it is possible to send additional metadata with each peer. Metadata is just
+JSON that can contain arbitrary information. Its most common use is sending a user name associated with a peer.
+However, it can be also used to send the peer's camera type, application information etc.
+
+:::info
+
+You can also set metadata on [the server side, when adding user to the room](../backend/server-setup#metadata). This metadata is persistent throughout its lifetime and is useful for attaching information that
+can't be overwritten by the peer, like information about real user names or basic permission info.
+
+:::
+
+## Setting metadata when joining the room
+
+The `joinRoom` method from the `useConnection` hook has a `peerMetadata` parameter, that can be used for setting object metadata.
+
+
+
+
+```tsx
+const PEER_TOKEN = "some-peer-token";
+// ---cut---
+import { useConnection } from "@fishjam-cloud/react-client";
+import React, { useCallback } from "react";
+
+type PeerMetadata = {
+ displayName: string;
+};
+
+export function JoinRoomButton() {
+ const { joinRoom } = useConnection(); // [!code highlight]
+
+ const onJoinRoomPress = useCallback(async () => {
+ await joinRoom({
+ peerToken: PEER_TOKEN,
+ peerMetadata: { displayName: "John Wick" }, // [!code highlight]
+ });
+ }, [joinRoom]);
+
+ return Join room;
+}
+```
+
+
+
+
+```tsx
+const PEER_TOKEN = "some-peer-token";
+// ---cut---
+import React, { useCallback } from "react";
+import { Button } from "react-native";
+import { useConnection } from "@fishjam-cloud/mobile-client";
+
+type PeerMetadata = {
+ displayName: string;
+};
+
+export function JoinRoomButton() {
+ const { joinRoom } = useConnection();
+
+ const onPressJoin = useCallback(async () => {
+ // Note: fishjamId is passed to FishjamProvider, not joinRoom
+ await joinRoom({
+ peerToken: PEER_TOKEN,
+ peerMetadata: { displayName: "John Wick" }, // [!code highlight]
+ });
+ }, [joinRoom]);
+
+ return ;
+}
+```
+
+
+
+
+## Updating metadata during connection
+
+Once you've joined the room, you can update your peer metadata with the `updatePeerMetadata` method of the `useUpdatePeerMetadata` hook:
+
+
+
+
+```tsx
+import { useUpdatePeerMetadata } from "@fishjam-cloud/react-client";
+import React, { useCallback } from "react";
+
+type PeerMetadata = {
+ displayName: string;
+};
+
+export function JoinRoomButton() {
+ const { updatePeerMetadata } = useUpdatePeerMetadata(); // [!code highlight]
+
+ const onPressUpdateName = useCallback(async () => {
+ await updatePeerMetadata({ displayName: "Thomas A. Anderson" }); // [!code highlight]
+ }, [updatePeerMetadata]);
+
+ return Change name;
+}
+```
+
+
+
+
+```tsx
+import React, { useCallback } from "react";
+import { Button } from "react-native";
+import { useUpdatePeerMetadata } from "@fishjam-cloud/mobile-client";
+
+type PeerMetadata = {
+ displayName: string;
+};
+
+export function UpdateNameButton() {
+ const { updatePeerMetadata } = useUpdatePeerMetadata(); // [!code highlight]
+
+ const onPressUpdateName = useCallback(async () => {
+ await updatePeerMetadata({ displayName: "Thomas A. Anderson" }); // [!code highlight]
+ }, [updatePeerMetadata]);
+
+ return ;
+}
+```
+
+
+
+
+## Reading metadata
+
+Peer metadata is available as the `metadata` property for each peer. Therefore, when you list your peers with the `usePeers` hook, you can read
+the metadata associated with them.
+Note that the `metadata.peer` property contains only the metadata set by the client SDK (as in the examples examples above).
+The metadata set on the server side is available as `metadata.server`.
+Learn more about server metadata [here](../backend/server-setup#metadata).
+
+
+
+
+```tsx
+import React from "react";
+import { usePeers } from "@fishjam-cloud/react-client";
+
+type PeerMetadata = {
+ displayName: string;
+};
+
+type ServerMetadata = {
+ realName: string;
+};
+
+export function ListAllNames() {
+ const { remotePeers } = usePeers(); // [!code highlight]
+
+ return (
+
+ );
+}
+```
+
+
+
+
+```tsx
+import React from "react";
+import { Text, View } from "react-native";
+import { usePeers } from "@fishjam-cloud/mobile-client";
+
+type PeerMetadata = {
+ displayName: string;
+};
+
+type ServerMetadata = {
+ realName: string;
+};
+
+export function ListAllNames() {
+ const { remotePeers } = usePeers(); // [!code highlight]
+
+ return (
+
+ {remotePeers.map((peer) => (
+ // [!code highlight:4]
+
+ Display name: {peer.metadata?.peer?.displayName || "Unknown"}
+ Real name: {peer.metadata?.server?.realName || "Unknown"}
+
+ ))}
+
+ );
+}
+```
+
+
+
+
diff --git a/docs/how-to/react-native/picture-in-picture.mdx b/docs/how-to/client/picture-in-picture.mdx
similarity index 50%
rename from docs/how-to/react-native/picture-in-picture.mdx
rename to docs/how-to/client/picture-in-picture.mdx
index 44bbfdf8..2987cb7e 100644
--- a/docs/how-to/react-native/picture-in-picture.mdx
+++ b/docs/how-to/client/picture-in-picture.mdx
@@ -1,11 +1,16 @@
---
-sidebar_position: 7
+sidebar_position: 11
+sidebar_label: "Picture in Picture π±"
---
import Tabs from "@theme/Tabs";
import TabItem from "@theme/TabItem";
-# Picture in Picture
+# Picture in Picture Mobile
+
+:::note
+This guide is exclusively for **Mobile** (React Native) applications.
+:::
Picture in Picture (PiP) allows your app to display video content in a small window that floats above other apps when the user backgrounds your application. This is especially useful for video calls and livestreaming where users want to multitask while staying connected.
@@ -34,7 +39,7 @@ You need to modify your `app.json` file and add our plugin with Picture in Pictu
...
"plugins": [
[
- "@fishjam-cloud/react-native-client",
+ "@fishjam-cloud/mobile-client",
{
"android": {
"supportsPictureInPicture": true
@@ -97,7 +102,7 @@ You need to modify your `app.json` file and add our plugin with Picture in Pictu
...
"plugins": [
[
- "@fishjam-cloud/react-native-client",
+ "@fishjam-cloud/mobile-client",
{
"android": {
"supportsPictureInPicture": true
@@ -156,18 +161,40 @@ Add the `audio` background mode to your `Info.plist`:
### Basic Usage
-The [`PipContainerView`](../../api/mobile/interfaces/PipContainerViewProps) component automatically manages Picture in Picture functionality. Simply wrap your video call UI with this component:
+The `RTCPIPView` component displays video content that can be shown in Picture in Picture mode. Use `startPIP` and `stopPIP` to control PIP manually:
```tsx
-import React from "react";
-import { View } from "react-native";
-import { PipContainerView } from "@fishjam-cloud/react-native-client";
+import React, { useRef } from "react";
+import { View, Button } from "react-native";
+import { RTCPIPView, startPIP, stopPIP, useCamera } from "@fishjam-cloud/mobile-client";
+
+//TODO: FCE-2487 remove it when MediaStream will be updated
+interface MediaStreamWithURL extends MediaStream {
+ toURL(): string;
+}
export function VideoCallScreen() {
+ const pipViewRef = useRef>(null);
+ const { cameraStream } = useCamera();
+ const streamURL = cameraStream ? (cameraStream as MediaStreamWithURL).toURL() : null;
+
return (
-
- {/* Your video call UI */}
-
+
+ startPIP(pipViewRef as any)} />
+ stopPIP(pipViewRef as any)} />
+ {streamURL && (
+
+ )}
+
);
}
```
@@ -176,24 +203,38 @@ By default, Picture in Picture will start automatically when the app goes to the
### Configuration Options
-You can customize the Picture in Picture behavior using props on the [`PipContainerView`](../../api/mobile/interfaces/PipContainerViewProps):
+You can customize the Picture in Picture behavior using the `pip` prop on `RTCPIPView`:
```tsx
-import React from "react";
+import React, { useRef } from "react";
import { View } from "react-native";
-import { PipContainerView } from "@fishjam-cloud/react-native-client";
+import { RTCPIPView, useCamera } from "@fishjam-cloud/mobile-client";
+
+//TODO: FCE-2487 remove it when MediaStream will be updated
+interface MediaStreamWithURL extends MediaStream {
+ toURL(): string;
+}
export function VideoCallScreen() {
+ const pipViewRef = useRef>(null);
+ const { cameraStream } = useCamera();
+ const streamURL = cameraStream ? (cameraStream as MediaStreamWithURL).toURL() : null;
+
return (
-
- {/* Your video call UI */}
-
+
+ {streamURL && (
+
+ )}
+
);
}
```
@@ -204,42 +245,51 @@ export function VideoCallScreen() {
**stopAutomatically**: When `true`, Picture in Picture stops automatically when the app returns to the foreground. Default: `true`
-**allowsCameraInBackground**: **(iOS only)** When `true`, allows the camera to continue running while in Picture in Picture mode. Requires iOS 16.0 or later. Default: `false`
-
-**primaryPlaceholderText**: Text displayed in the left view when the local camera is unavailable. Default: `"No camera"`
-
-**secondaryPlaceholderText**: Text displayed in the right view when no remote speaker is active. Default: `"No active speaker"`
+**enabled**: When `true`, enables Picture in Picture functionality for this view.
### Manual Control
-For more control over when Picture in Picture starts and stops, you can use a ref to manually trigger these actions:
+For more control over when Picture in Picture starts and stops, use the `startPIP` and `stopPIP` functions with a ref:
```tsx
import React, { useRef } from "react";
import { Button, View } from "react-native";
-import {
- PipContainerView,
- PipContainerViewRef,
-} from "@fishjam-cloud/react-native-client";
+import { RTCPIPView, startPIP, stopPIP, useCamera } from "@fishjam-cloud/mobile-client";
+
+//TODO: FCE-2487 remove it when MediaStream will be updated
+interface MediaStreamWithURL extends MediaStream {
+ toURL(): string;
+}
export function VideoCallScreen() {
- const pipRef = useRef(null);
+ const pipViewRef = useRef>(null);
+ const { cameraStream } = useCamera();
+ const streamURL = cameraStream ? (cameraStream as MediaStreamWithURL).toURL() : null;
- const handleStartPip = async () => {
- await pipRef.current?.startPictureInPicture();
+ const handleStartPip = () => {
+ startPIP(pipViewRef as any);
};
- const handleStopPip = async () => {
- await pipRef.current?.stopPictureInPicture();
+ const handleStopPip = () => {
+ stopPIP(pipViewRef as any);
};
return (
-
- {/* Your video call UI */}
-
+ {streamURL && (
+
+ )}
);
}
@@ -250,59 +300,66 @@ export function VideoCallScreen() {
Here's a complete example showing Picture in Picture with a video call:
```tsx
-import React from "react";
-import { FlatList, StyleSheet, View } from "react-native";
-import {
- PipContainerView,
- usePeers,
- VideoRendererView,
-} from "@fishjam-cloud/react-native-client";
+import React, { useRef } from "react";
+import { FlatList, StyleSheet, View, Text, Button } from "react-native";
+import { RTCPIPView, RTCView, startPIP, stopPIP, usePeers } from "@fishjam-cloud/mobile-client";
+
+//TODO: FCE-2487 remove it when MediaStream will be updated
+interface MediaStreamWithURL extends MediaStream {
+ toURL(): string;
+}
+
+function VideoPlayer({ stream }: { stream: MediaStream | null }) {
+ const streamURL = stream ? (stream as MediaStreamWithURL).toURL() : null;
+ if (!streamURL) return No video;
+ return ;
+}
export function VideoCallScreen() {
+ const pipViewRef = useRef>(null);
const { localPeer, remotePeers } = usePeers();
+ const firstRemotePeer = remotePeers[0];
+ const remoteStreamURL = firstRemotePeer?.cameraTrack?.stream
+ ? (firstRemotePeer.cameraTrack.stream as MediaStreamWithURL).toURL()
+ : null;
return (
-
-
- {/* Render local video */}
- {localPeer?.tracks.map((track) => {
- if (track.type === "Video") {
- return (
-
- );
- }
- })}
-
- {/* Render remote videos */}
- peer.id}
- renderItem={({ item: peer }) => (
-
- {peer.tracks.map((track) => {
- if (track.type === "Video") {
- return (
-
- );
- }
- })}
-
- )}
+
+ startPIP(pipViewRef as any)} />
+ stopPIP(pipViewRef as any)} />
+
+ {/* Render local video */}
+ {localPeer?.cameraTrack?.stream && (
+
+ )}
+
+ {/* Render first remote peer with PiP support */}
+ {remoteStreamURL && (
+
-
-
+ )}
+
+ {/* Render remaining remote videos */}
+ peer.id}
+ renderItem={({ item: peer }) => (
+
+ {peer.cameraTrack?.stream && (
+
+ )}
+
+ )}
+ />
+
);
}
@@ -346,7 +403,7 @@ Example configuration combining both features:
"expo": {
"plugins": [
[
- "@fishjam-cloud/react-native-client",
+ "@fishjam-cloud/mobile-client",
{
"android": {
"enableForegroundService": true,
@@ -368,26 +425,48 @@ Example configuration combining both features:
### Basic Usage
+Use `RTCPIPView` to display the livestream with Picture in Picture support:
+
```tsx
-import React from "react";
-import { View, StyleSheet } from "react-native";
-import {
- LivestreamViewer,
- useLivestreamViewer,
-} from "@fishjam-cloud/react-native-client/livestream";
+import React, { useRef, useEffect } from "react";
+import { View, StyleSheet, Text } from "react-native";
+import { RTCPIPView, useLivestreamViewer, useSandbox } from "@fishjam-cloud/mobile-client";
+
+//TODO: FCE-2487 remove it when MediaStream will be updated
+interface MediaStreamWithURL extends MediaStream {
+ toURL(): string;
+}
export function LivestreamScreen() {
- const { whepClientRef } = useLivestreamViewer();
+ const pipViewRef = useRef>(null);
+ const { connect, stream } = useLivestreamViewer();
+ const { getSandboxViewerToken } = useSandbox();
+ const streamURL = stream ? (stream as MediaStreamWithURL).toURL() : null;
+
+ useEffect(() => {
+ const connectToStream = async () => {
+ const token = await getSandboxViewerToken("room-name");
+ await connect({ token });
+ };
+ connectToStream();
+ }, []);
return (
-
+ {streamURL ? (
+
+ ) : (
+ Connecting to stream...
+ )}
);
}
@@ -404,70 +483,48 @@ const styles = StyleSheet.create({
### Configuration Options
-```tsx
-import React from "react";
-import { View, StyleSheet } from "react-native";
-import {
- LivestreamViewer,
- useLivestreamViewer,
-} from "@fishjam-cloud/react-native-client/livestream";
-
-export function LivestreamScreen() {
- const { whepClientRef } = useLivestreamViewer();
-
- return (
-
-
-
- );
-}
-
-const styles = StyleSheet.create({
- container: {
- flex: 1,
- },
- viewer: {
- flex: 1,
- },
-});
+Configure Picture in Picture behavior using the `pip` prop:
+
+```jsx
+
```
#### Configuration Properties
-**pipEnabled**: Enable or disable Picture in Picture functionality. Default: `true`
+**enabled**: Enable or disable Picture in Picture functionality.
-**autoStartPip**: When `true`, Picture in Picture starts automatically when the app goes to the background. Default: `false`
-
-**autoStopPip**: **(iOS only)** When `true`, Picture in Picture stops automatically when the app returns to the foreground. On Android, PiP always stops when returning to foreground. Default: `false`
+**startAutomatically**: When `true`, Picture in Picture starts automatically when the app goes to the background. Default: `true`
-**pipSize**: An object with `width` and `height` properties to set the aspect ratio of the Picture in Picture window
+**stopAutomatically**: When `true`, Picture in Picture stops automatically when the app returns to the foreground. Default: `true`
### Complete Example
Here's a complete example showing how to connect to a livestream and display it with Picture in Picture:
```tsx
-import React, { useEffect } from "react";
-import { View, StyleSheet } from "react-native";
-import {
- LivestreamViewer,
- useLivestreamViewer,
-} from "@fishjam-cloud/react-native-client/livestream";
-import { useSandbox } from "@fishjam-cloud/react-native-client";
+import React, { useEffect, useRef } from "react";
+import { View, StyleSheet, Text, Button } from "react-native";
+import { RTCPIPView, useLivestreamViewer, useSandbox, startPIP, stopPIP } from "@fishjam-cloud/mobile-client";
-export function LivestreamViewerScreen() {
- const { getSandboxViewerToken } = useSandbox({
- fishjamId: "your-fishjam-id",
- });
+//TODO: FCE-2487 remove it when MediaStream will be updated
+interface MediaStreamWithURL extends MediaStream {
+ toURL(): string;
+}
- const { connect, disconnect, whepClientRef } = useLivestreamViewer();
+export function LivestreamViewerScreen() {
+ const pipViewRef = useRef>(null);
+ const { getSandboxViewerToken } = useSandbox();
+ const { connect, disconnect, stream } = useLivestreamViewer();
+ const streamURL = stream ? (stream as MediaStreamWithURL).toURL() : null;
useEffect(() => {
const connectToStream = async () => {
@@ -488,14 +545,22 @@ export function LivestreamViewerScreen() {
return (
-
+ startPIP(pipViewRef as any)} />
+ stopPIP(pipViewRef as any)} />
+ {streamURL ? (
+
+ ) : (
+ Connecting to stream...
+ )}
);
}
@@ -508,6 +573,11 @@ const styles = StyleSheet.create({
viewer: {
flex: 1,
},
+ loading: {
+ color: "#fff",
+ textAlign: "center",
+ marginTop: 20,
+ },
});
```
@@ -523,3 +593,4 @@ Picture in Picture requires the `audio` background mode. Uses the native AVPictu
+
diff --git a/docs/how-to/react-native/reconnection-handling.mdx b/docs/how-to/client/reconnection-handling.mdx
similarity index 80%
rename from docs/how-to/react-native/reconnection-handling.mdx
rename to docs/how-to/client/reconnection-handling.mdx
index 3189b6de..0749543c 100644
--- a/docs/how-to/react-native/reconnection-handling.mdx
+++ b/docs/how-to/client/reconnection-handling.mdx
@@ -1,8 +1,13 @@
---
-sidebar_position: 7
+sidebar_position: 10
+sidebar_label: "Reconnect π±"
---
-# Reconnect
+# Reconnect Mobile
+
+:::note
+This guide is exclusively for **Mobile** (React Native) applications.
+:::
If your connection is lost while you are connected to a room, the app will automatically handle the reconnection process.
You can monitor these events by utilizing the [`useConnection`](../../api/mobile/functions/useConnection) hook.
@@ -14,7 +19,7 @@ import { useEffect, useRef } from "react";
import {
ReconnectionStatus,
useConnection,
-} from "@fishjam-cloud/react-native-client";
+} from "@fishjam-cloud/mobile-client";
export function useLogConnectionStatus() {
const prevStatus = useRef("idle");
@@ -36,3 +41,4 @@ export function useLogConnectionStatus() {
}, [reconnectionStatus]);
}
```
+
diff --git a/docs/how-to/react-native/screensharing.mdx b/docs/how-to/client/screensharing.mdx
similarity index 79%
rename from docs/how-to/react-native/screensharing.mdx
rename to docs/how-to/client/screensharing.mdx
index a20ed9ff..4e1aaf52 100644
--- a/docs/how-to/react-native/screensharing.mdx
+++ b/docs/how-to/client/screensharing.mdx
@@ -1,11 +1,16 @@
---
-sidebar_position: 5
+sidebar_position: 12
+sidebar_label: "Screen sharing π±"
---
import Tabs from "@theme/Tabs";
import TabItem from "@theme/TabItem";
-# Screen sharing
+# Screen sharing Mobile
+
+:::note
+This guide is exclusively for **Mobile** (React Native) applications.
+:::
Our SDK also allow to stream content of mobile device screen.
@@ -21,10 +26,7 @@ To enable screen sharing on iOS, you need to follow the steps below.
:::tip[Background streaming during screen sharing]
-If you want to continue screen sharing when the app goes to the background, you need to:
-
-1. Enable VoIP background mode by setting `enableVoIPBackgroundMode: true` in the plugin configuration or adding the VoIP background mode to your `Info.plist`
-2. Use the [`useCallKitService`](../../api/mobile/variables/useCallKitService) hook in your component to manage the CallKit session
+If you want to continue screen sharing when the app goes to the background, you need to enable VoIP background mode by setting `enableVoIPBackgroundMode: true` in the plugin configuration or adding the VoIP background mode to your `Info.plist`.
See the [background calls documentation](./background-streaming) for detailed instructions and code examples.
@@ -42,7 +44,7 @@ You need to modify `app.json` file and add our plugin:
...
"plugins": [
[
- "@fishjam-cloud/react-native-client",
+ "@fishjam-cloud/mobile-client",
{
"ios": {
"enableScreensharing": true,
@@ -115,15 +117,12 @@ Configuring screen sharing on iOS is a little complicated.
1. Open your `.xcworkspace` in Xcode
1. Create new Broadcast Upload Extension. Select `File β New β Target... β Broadcast Upload Extension β Next`. Choose the name for the new target, select Swift language and deselect "Include UI Extension".
- 
1. Configure app group. Go to "Signing & Capabilities" tab, click "+ Capability" button in upper left corner and select "App Groups".
- 
Then in the "App Groups" add a new group or select existing. Usually group name has format `group.`. Verify that both app and extension targets have app group and dev team set correctly.
-1. A new folder with app extension should appear on the left with contents like this:
- 
+1. A new folder with app extension should appear on the left.
Replace `SampleHandler.swift` with `FishjamBroadcastHandler.swift` and this code:
@@ -213,10 +212,7 @@ Configuring screen sharing on iOS is a little complicated.
:::tip[Background streaming during screen sharing]
-If you want to continue screen sharing when the app goes to the background, you need to:
-
-1. Enable VoIP background mode by setting `enableVoIPBackgroundMode: true` in the plugin configuration or adding the VoIP background mode to your `Info.plist`
-2. Use the [`useCallKitService`](../../api/mobile/variables/useCallKitService) hook in your component to manage the CallKit session
+If you want to continue screen sharing when the app goes to the background, you need to enable VoIP background mode by setting `enableVoIPBackgroundMode: true` in the plugin configuration or adding the VoIP background mode to your `Info.plist`.
See the [background calls documentation](./background-streaming) for detailed instructions and code examples.
@@ -228,7 +224,7 @@ You can use [`useScreenShare`](../../api/mobile/functions/useScreenShare) hook t
:::tip[Permissions]
-Permission request is handled for you as soon as you call [`toggleScreenShare`](../../api/mobile/functions/useScreenShare#togglescreenshare).
+Permission request is handled for you as soon as you call [`startStreaming`](../../api/mobile/functions/useScreenShare#startstreaming).
:::
@@ -238,27 +234,31 @@ On Android API level >= 24, you must use a foreground service when screen sharin
:::
-You can enable/disable screen sharing with [`toggleScreenShare`](../../api/mobile/functions/useScreenShare#togglescreenshare) method.
-And check current state with [`isScreenShareOn`](../../api/mobile/functions/useScreenShare#isscreenshareon) property.
+You can enable/disable screen sharing with [`startStreaming`](../../api/mobile/functions/useScreenShare#startstreaming) and [`stopStreaming`](../../api/mobile/functions/useScreenShare#stopstreaming) methods.
+And check current state by checking if [`stream`](../../api/mobile/functions/useScreenShare#stream) exists.
```tsx
import React, { useCallback } from "react";
import { Button } from "react-native";
-import { useScreenShare } from "@fishjam-cloud/react-native-client";
+import { useScreenShare } from "@fishjam-cloud/mobile-client";
export function ScreenShareButton() {
- const { toggleScreenShare, isScreenShareOn } = useScreenShare(); // [!code highlight]
+ const { startStreaming, stopStreaming, stream } = useScreenShare(); // [!code highlight]
- const onPressToggle = useCallback(
- () => toggleScreenShare(), // [!code highlight]
- [toggleScreenShare],
- );
+ const onPressToggle = useCallback(() => {
+ if (stream) {
+ stopStreaming(); // [!code highlight]
+ } else {
+ startStreaming(); // [!code highlight]
+ }
+ }, [stream, startStreaming, stopStreaming]);
return (
);
}
```
+
diff --git a/docs/how-to/client/start-streaming.mdx b/docs/how-to/client/start-streaming.mdx
new file mode 100644
index 00000000..115a676a
--- /dev/null
+++ b/docs/how-to/client/start-streaming.mdx
@@ -0,0 +1,142 @@
+---
+sidebar_position: 2
+---
+
+import Tabs from "@theme/Tabs";
+import TabItem from "@theme/TabItem";
+
+# Streaming media
+
+This guide covers the basics of initializing and using camera and microphone devices. For more advanced device management (selecting specific devices, device switching, muting, etc.), see the [Managing devices](./managing-devices) guide.
+
+## Initialize access to your devices
+
+
+
+
+Fishjam provides an API to browse and manage media devices you can use.
+To ask the browser for permission to list the available devices,
+call the [`initializeDevices`](../../api/web/functions/useInitializeDevices#initializedevices)
+function from [`useInitializeDevices`](../../api/web/functions/useInitializeDevices) hook.
+
+You can choose whether to initialize both camera and microphone devices or just one of them by passing [`InitializeDevicesSettings`](../../api/web/type-aliases/InitializeDevicesSettings)
+as an argument. By default, both camera and microphone are initialized.
+
+The [`initializeDevices`](../../api/web/functions/useInitializeDevices#initializedevices) function will return a [`Promise`](../../api/web/type-aliases/InitializeDevicesResult) object.
+
+```ts
+import React, { useEffect } from "react";
+import { useInitializeDevices } from "@fishjam-cloud/react-client";
+
+export function useExample() {
+ const { initializeDevices } = useInitializeDevices();
+
+ useEffect(() => {
+ initializeDevices().then((result) => {
+ // optionally handle the result
+ console.log(result);
+ });
+ }, [initializeDevices]);
+}
+```
+
+:::note
+The [`useInitializeDevices`](../../api/web/functions/useInitializeDevices) hook gives you the convenience of asking the user for all permissions at once.
+
+It is not the only way to enable the device. You can just toggle the device using [`useCamera`](../../api/web/functions/useCamera) or [`useMicrophone`](../../api/web/functions/useMicrophone) hooks.
+:::
+
+
+
+
+On mobile, you should use `initializeDevices()` when you first want to stream. This gives your app access to all available devices and automatically requests camera and microphone permissions. The SDK will show the system permission dialog if permissions haven't been granted yet.
+
+Once devices are initialized, you can manage their state using the methods described in the [Managing devices](./managing-devices) guide.
+
+
+
+
+## Device API
+
+
+
+
+To manage users' camera and microphone devices, use the respective [`useCamera`](../../api/web/functions/useCamera) and [`useMicrophone`](../../api/web/functions/useMicrophone) hooks. Both of them have similar API. To keep things simple, we will just use the camera hook.
+```tsx
+import React, { useEffect, useRef } from "react";
+import { useCamera } from "@fishjam-cloud/react-client";
+
+export function ExampleCameraPreview() {
+ const videoRef = useRef(null);
+
+ const { activeCamera, selectCamera, cameraStream, cameraDevices } =
+ useCamera(); // [!code highlight]
+
+ useEffect(() => {
+ if (!videoRef.current) return;
+ videoRef.current.srcObject = cameraStream ?? null;
+ }, [cameraStream]);
+
+ return (
+
+
Active camera: {activeCamera?.label ?? "None"}
+
+ {cameraStream && } // [!code highlight]
+
+ );
+}
+```
+
+
+
+
+To manage users' camera and microphone devices, use the respective [`useCamera`](../../api/mobile/functions/useCamera) and [`useMicrophone`](../../api/mobile/functions/useMicrophone) hooks. Both of them have similar API. To keep things simple, we will just use the camera hook.
+
+You can preview your camera stream using the `RTCView` component.
+
+```tsx
+import React, { useEffect } from "react";
+import { View, Text } from "react-native";
+import { useCamera, RTCView } from "@fishjam-cloud/mobile-client";
+
+//TODO: FCE-2487 remove it when MediaStream will be updated
+interface MediaStreamWithURL extends MediaStream {
+ toURL(): string;
+}
+
+export function CameraPreview() {
+ const { startCamera, cameraStream } = useCamera(); // [!code highlight]
+
+ useEffect(() => {
+ startCamera(); // [!code highlight]
+ }, [startCamera]);
+
+ const streamURL = cameraStream
+ ? (cameraStream as MediaStreamWithURL).toURL()
+ : null;
+
+ return (
+
+ {streamURL ? (
+
+ ) : (
+ No camera stream
+ )}
+
+ );
+}
+```
+
+
+
diff --git a/docs/how-to/react/stream-middleware.mdx b/docs/how-to/client/stream-middleware.mdx
similarity index 93%
rename from docs/how-to/react/stream-middleware.mdx
rename to docs/how-to/client/stream-middleware.mdx
index baa2019c..4645a30e 100644
--- a/docs/how-to/react/stream-middleware.mdx
+++ b/docs/how-to/client/stream-middleware.mdx
@@ -1,8 +1,13 @@
---
-sidebar_position: 6
+sidebar_position: 7
+sidebar_label: "Stream middleware π"
---
-# Stream middleware
+# Stream middleware Web
+
+:::note
+This guide is exclusively for **Web** (React) applications.
+:::
Stream middleware in Fishjam allows you to intercept and manipulate media tracks before they are sent to the Fishjam server.
This feature is powerful for applying effects, custom encodings, or any other transformations to the media stream.
@@ -96,3 +101,4 @@ export function CameraWithBlurEffect() {
```
This example provides a button to toggle the blur effect on and off. The `BlurProcessor` handles the actual processing logic and is assumed to be implemented elsewhere.
+
diff --git a/docs/how-to/features/audio-only-calls.mdx b/docs/how-to/features/audio-only-calls.mdx
index 16ffab7a..17fca18d 100644
--- a/docs/how-to/features/audio-only-calls.mdx
+++ b/docs/how-to/features/audio-only-calls.mdx
@@ -64,7 +64,7 @@ Set `roomType` to `audio_only` when creating a room:
-Now, you can connect peers normally to the room as described in our [React Native](../../how-to/react-native/connecting) and [React](../../how-to/react/connecting) docs.
+Now, you can connect peers normally to the room as described in our [Web & Mobile connecting guide](../../how-to/client/connecting).
### Livestreaming
diff --git a/docs/how-to/features/sandbox-api-testing.mdx b/docs/how-to/features/sandbox-api-testing.mdx
index a9b5ac71..7f7b37e5 100644
--- a/docs/how-to/features/sandbox-api-testing.mdx
+++ b/docs/how-to/features/sandbox-api-testing.mdx
@@ -36,11 +36,12 @@ The Sandbox API is a development tool that lets you create rooms and peers for t
const roomName = "testRoom";
const peerName = "testUser";
// ---cut---
- import { useSandbox } from '@fishjam-cloud/react-native-client';
+ import { useSandbox } from '@fishjam-cloud/mobile-client';
// ...
- const { getSandboxPeerToken } = useSandbox({ fishjamId: FISHJAM_ID });
+ // fishjamId is provided through FishjamProvider
+ const { getSandboxPeerToken } = useSandbox();
const peerToken = await getSandboxPeerToken(roomName, peerName);
```
@@ -115,11 +116,12 @@ Below are examples on how to use tokens from the Sandbox API in your frontend ap
const RoomView = (props: {peerToken: string}) => null;
// ---cut---
- import { useSandbox } from "@fishjam-cloud/react-native-client";
+ import { useSandbox } from "@fishjam-cloud/mobile-client";
export default function TestScreen() {
const [peerToken, setPeerToken] = useState(null);
- const { getSandboxPeerToken } = useSandbox({ fishjamId: FISHJAM_ID });
+ // fishjamId is provided through FishjamProvider
+ const { getSandboxPeerToken } = useSandbox();
useEffect(() => {
const fetchPeerToken = async () => {
diff --git a/docs/how-to/react-native/_category_.json b/docs/how-to/react-native/_category_.json
deleted file mode 100644
index 1fd79841..00000000
--- a/docs/how-to/react-native/_category_.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "label": "React Native",
- "position": 1
-}
diff --git a/docs/how-to/react-native/_components/configure-permissions.mdx b/docs/how-to/react-native/_components/configure-permissions.mdx
deleted file mode 100644
index bd5debcc..00000000
--- a/docs/how-to/react-native/_components/configure-permissions.mdx
+++ /dev/null
@@ -1,91 +0,0 @@
-import Tabs from "@theme/Tabs";
-import TabItem from "@theme/TabItem";
-
-Your app needs to have permissions configured in order to use the microphone and camera.
-
-### Android
-
-Permissions below are required to stream audio and video with Fishjam on Android.
-
-- `android.permission.CAMERA`
-- `android.permission.RECORD_AUDIO`
-- `android.permission.MODIFY_AUDIO_SETTINGS`
-
-
-
-
-
-Add required permissions to the `app.json` file.
-
-```json title='app.json'
-{
- "expo": {
- ...
- "android": {
- ...
- "permissions": [
- "android.permission.CAMERA",
- "android.permission.RECORD_AUDIO",
- "android.permission.MODIFY_AUDIO_SETTINGS"
- ]
- }
- }
-}
-```
-
-
-
-
-Add required permissions to the `AndroidManifest.xml` file.
-
-```xml title='AndroidManifest.xml'
-
- ...
-
-
-
- ...
-
-```
-
-
-
-
-### iOS
-
-
-
-
-
-You don't have to make any changes to run app on iOS.
-To update default content of permission alert, you can add these settings to `app.json`:
-
-```json title='app.json'
-{
- "expo": {
- ...
- "ios": {
- ...
- "infoPlist": {
- "NSCameraUsageDescription": "Allow $(PRODUCT_NAME) to access your camera.",
- "NSMicrophoneUsageDescription": "Allow $(PRODUCT_NAME) to access your microphone."
- }
- },
- }
-}
-```
-
-
-
-
-Ensure `Info.plist` contains camera and microphone usage description entries:
-
-```xml title='Info.plist'
- NSCameraUsageDescription
- Allow $(PRODUCT_NAME) to access your camera.
- NSMicrophoneUsageDescription
- Allow $(PRODUCT_NAME) to access your microphone.
-```
-
-
-
diff --git a/docs/how-to/react-native/_components/install-package.mdx b/docs/how-to/react-native/_components/install-package.mdx
deleted file mode 100644
index 356da871..00000000
--- a/docs/how-to/react-native/_components/install-package.mdx
+++ /dev/null
@@ -1,27 +0,0 @@
-import Tabs from "@theme/Tabs";
-import TabItem from "@theme/TabItem";
-
-
-
-
-```bash npm2yarn
-npm install @fishjam-cloud/react-native-client
-```
-
-
-
-
-
-### Install Expo dependencies
-
-Follow instructions from official [Expo documentation](https://docs.expo.dev/bare/installing-expo-modules/).
-
-### Install Fishjam
-
- ```sh
- npx expo install @fishjam-cloud/react-native-client
- ```
-
-
-
-
diff --git a/docs/how-to/react-native/background-streaming.mdx b/docs/how-to/react-native/background-streaming.mdx
deleted file mode 100644
index 9b43c516..00000000
--- a/docs/how-to/react-native/background-streaming.mdx
+++ /dev/null
@@ -1,207 +0,0 @@
----
-sidebar_position: 6
----
-
-import Tabs from "@theme/Tabs";
-import TabItem from "@theme/TabItem";
-
-# Background calls
-
-Both Android and iOS support calls running in the background, but they use different approaches:
-
-- **Android**: Uses foreground services to keep the app active in the background
-- **iOS**: Uses CallKit integration to maintain VoIP calls in the background
-
-Below is configuration required to make it work:
-
-
-
-
-
-You need to modify `app.json` file and add our plugin:
-
-```json
-{
- "expo": {
- ...
- "plugins": {
- ...
- [
- "@fishjam-cloud/react-native-client",
- {
- "android": {
- "enableForegroundService": true
- },
- "ios": {
- "enableVoIPBackgroundMode": true
- }
- }
- ],
- ...
- }
- }
-}
-```
-
-
-
-
-**Android Configuration**
-
-You need to add the following service to `AndroidManifest.xml`:
-
-```xml title='AndroidManifest.xml'
-
- ...
-
- ...
-
-
-
-```
-
-**iOS Configuration**
-
-You need to add VoIP background mode in `Info.plist`:
-
-```xml title='Info.plist'
-UIBackgroundModes
-
- voip
-
-```
-
-
-
-
-## Usage
-
-
-
-
-
-You can use [`useForegroundService`](../../api/mobile/variables/useForegroundService) hook to handle how foreground service behaves on Android.
-
-:::important[Permissions]
-
-If you want to use [`enableCamera`](../../api/mobile/type-aliases/ForegroundServiceConfig#enablecamera) or [`enableMicrophone`](../../api/mobile/type-aliases/ForegroundServiceConfig#enablemicrophone),
-user must first grant permission for this resource. [`useForegroundService`](../../api/mobile/variables/useForegroundService) will check if permission is
-granted and only then allow to start a service.
-
-:::
-
-```tsx
-import {
- useForegroundService,
- useCamera,
- useMicrophone,
-} from "@fishjam-cloud/react-native-client";
-
-const { isCameraOn } = useCamera();
-const { isMicrophoneOn } = useMicrophone();
-
-useForegroundService({
- channelId: "io.fishjam.example.fishjamchat.foregroundservice.channel",
- channelName: "Fishjam Chat Notifications",
- notificationTitle: "Your video call is ongoing",
- notificationContent: "Tap to return to the call.",
- enableCamera: isCameraOn,
- enableMicrophone: isMicrophoneOn,
-});
-```
-
-
-
-
-On iOS, background calls are achieved through CallKit integration. You can use the CallKit hooks to manage VoIP calls that continue running in the background.
-
-### Manual CallKit Management
-
-Use the [`useCallKit`](../../api/mobile/variables/useCallKit) hook for fine-grained control over CallKit sessions:
-
-```tsx
-import { useCallKit } from "@fishjam-cloud/react-native-client";
-
-const { startCallKitSession, endCallKitSession } = useCallKit();
-
-// Start CallKit session when joining a room
-const handleJoinRoom = async () => {
- await startCallKitSession({
- displayName: "John Doe",
- isVideo: true,
- });
- // ... join room logic
-};
-
-// End CallKit session when leaving
-const handleLeaveRoom = async () => {
- await endCallKitSession();
- // ... leave room logic
-};
-```
-
-### Automatic CallKit Management
-
-Use the [`useCallKitService`](../../api/mobile/variables/useCallKitService) hook for automatic session lifecycle management:
-
-```tsx
-import React from "react";
-import { useCallKitService } from "@fishjam-cloud/react-native-client";
-import { View } from "react-native";
-
-function CallScreen({ username }: { username: string }) {
- // CallKit session automatically starts when component mounts
- // and ends when component unmounts
- useCallKitService({
- displayName: username,
- isVideo: true,
- });
-
- return ...;
-}
-```
-
-### Listening to CallKit Events
-
-Use the [`useCallKitEvent`](../../api/mobile/variables/useCallKitEvent) hook to respond to user interactions with the native CallKit interface:
-
-```tsx
-import {
- useCallKitEvent,
- useCamera,
- useMicrophone,
- useConnection,
-} from "@fishjam-cloud/react-native-client";
-
-const { toggleCamera } = useCamera();
-const { startMicrophone, stopMicrophone } = useMicrophone();
-const { leaveRoom } = useConnection();
-
-// Listen for mute toggle from CallKit UI
-useCallKitEvent("muted", (isMuted: boolean) => {
- if (isMuted) {
- stopMicrophone();
- } else {
- startMicrophone();
- }
-});
-
-// Listen for hold state changes
-useCallKitEvent("held", (isOnHold: boolean) => {
- console.log("Call hold state:", isOnHold);
- // Handle hold state in your app
-});
-
-// Listen for call end from CallKit UI
-useCallKitEvent("ended", () => {
- // Handle call termination
- leaveRoom();
-});
-```
-
-
-
-
-## See Also
-
-For an enhanced user experience when your app is in the background, consider enabling [Picture in Picture](./picture-in-picture), which allows users to see video content in a floating window while using other apps.
diff --git a/docs/how-to/react-native/connecting.mdx b/docs/how-to/react-native/connecting.mdx
deleted file mode 100644
index e8d6481a..00000000
--- a/docs/how-to/react-native/connecting.mdx
+++ /dev/null
@@ -1,101 +0,0 @@
----
-sidebar_position: 2
----
-
-import Tabs from "@theme/Tabs";
-import TabItem from "@theme/TabItem";
-
-# Connecting
-
-This article will guide you through the process of connecting to a Fishjam room.
-
-## Getting URL and token
-
-In order to connect, you need to obtain a **Peer Token** (the token that will authenticate the peer in
-your Room).
-
-
-
-
-
-Once you create your account on [Fishjam](https://fishjam.io), you will have access to the Sandbox environment as part of the Mini Jar plan.
-While using the Sandbox environment, [you can use the Sandbox API](../../how-to/features/sandbox-api-testing) to generate peer tokens for testing or development purposes.
-This is basically a service that will create a Room, add your app as
-the Room's Peer, and return the token required to use that Room.
-
-```ts
-import { useSandbox } from "@fishjam-cloud/react-native-client";
-const SANDBOX_FISHJAM_ID = "...";
-const roomName = "room";
-const peerName = "user";
-// ---cut---
-
-// The `useSandbox` hook will work ONLY with the FISHJAM_ID of the Sandbox environment
-const { getSandboxPeerToken } = useSandbox({ fishjamId: SANDBOX_FISHJAM_ID });
-const peerToken = await getSandboxPeerToken(roomName, peerName);
-```
-
-
-
-
-For the production app, you need to implement your own backend service that will provide the user with a **Peer Token**. To do that,
-follow our [server setup instructions](../../how-to/backend/server-setup).
-
-
-
-
-## Connecting
-
-In order to connect, call [`joinRoom`](../../api/mobile/functions/useConnection#joinroom) method with the `peerToken` and the fishjam ID:
-
-```tsx
-import React, { useCallback } from "react";
-import { Button } from "react-native";
-import { useConnection, useSandbox } from "@fishjam-cloud/react-native-client";
-
-// Check https://fishjam.io/app/ for your Fishjam ID
-const FISHJAM_ID = "...";
-
-export function JoinRoomButton() {
- const { joinRoom } = useConnection(); // [!code highlight]
- const { getSandboxPeerToken } = useSandbox({ fishjamId: FISHJAM_ID });
-
- const onPressJoin = useCallback(async () => {
- // in production environment, get the peerToken from your backend
- const peerToken = await getSandboxPeerToken("Room", "User");
-
- await joinRoom({ fishjamId: FISHJAM_ID, peerToken }); // [!code highlight]
- }, [joinRoom]);
-
- return ;
-}
-```
-
-## Disconnecting
-
-In order to close the connection, you have to call [`leaveRoom`](../../api/mobile/functions/useConnection#leaveroom) method.
-
-```tsx
-import React, { useCallback } from "react";
-import { Button } from "react-native";
-import { useConnection } from "@fishjam-cloud/react-native-client";
-
-export function LeaveRoomButton() {
- const { leaveRoom } = useConnection(); // [!code highlight]
-
- const onPressLeave = useCallback(async () => {
- await leaveRoom(); // [!code highlight]
- }, [leaveRoom]);
-
- return ;
-}
-```
-
-## Next Steps
-
-Now that you're connected to a room, you can explore additional features:
-
-- [Start Streaming](./start-streaming) - Enable your camera and microphone
-- [List Other Peers](./list-other-peers) - Display video from other participants
-- [Picture in Picture](./picture-in-picture) - Allow users to watch video in a floating window
-- [Background Streaming](./background-streaming) - Keep calls active when the app is backgrounded
diff --git a/docs/how-to/react-native/custom-video-sources/_category_.json b/docs/how-to/react-native/custom-video-sources/_category_.json
deleted file mode 100644
index c7673c88..00000000
--- a/docs/how-to/react-native/custom-video-sources/_category_.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "label": "Custom Video Sources",
- "position": 9
-}
diff --git a/docs/how-to/react-native/custom-video-sources/index.mdx b/docs/how-to/react-native/custom-video-sources/index.mdx
deleted file mode 100644
index f2c364ca..00000000
--- a/docs/how-to/react-native/custom-video-sources/index.mdx
+++ /dev/null
@@ -1,14 +0,0 @@
----
-sidebar_position: 10
-title: "Custom Video Sources"
-description: "Guides for implementing custom video sources in Fishjam React Native SDK"
----
-
-# Custom Video Sources
-
-This section contains guides for implementing custom video sources in the Fishjam React Native SDK.
-
-## Available Guides
-
-- [Custom Source](../../../how-to/react-native/custom-video-sources/overview) - Learn how to create a basic custom video source
-- [Vision Camera](../../../how-to/react-native/custom-video-sources/vision-camera) - Learn how to implement Vision Camera as a custom video source
diff --git a/docs/how-to/react-native/custom-video-sources/overview.mdx b/docs/how-to/react-native/custom-video-sources/overview.mdx
deleted file mode 100644
index 45e0858e..00000000
--- a/docs/how-to/react-native/custom-video-sources/overview.mdx
+++ /dev/null
@@ -1,94 +0,0 @@
----
-sidebar_position: 1
-title: "Overview"
-description: "Guide to creating a custom video source in Fishjam React Native SDK"
----
-
-import Tabs from "@theme/Tabs";
-import TabItem from "@theme/TabItem";
-import ConfigurePermissions from "../_components/configure-permissions.mdx";
-
-# Custom Video Source
-
-The Fishjam React Native SDK allows you to stream content from any native video source. This is useful if you have an existing camera setup or need to preprocess video frames.
-
-:::tip
-
-Check out our [**working example**](https://github.com/fishjam-cloud/mobile-client-sdk/tree/3473afd51ff3a4dc5849463d1ff8a4028e428739/examples/vision-camera) implementing [VisionCamera](https://github.com/mrousavy/react-native-vision-camera)
-
-:::
-
-## Overview
-
-To create a custom video source, follow these steps:
-
-1. **Implement the CustomSource Interface**:
- - On iOS and Android, implement the `CustomSource` interface. This interface provides metadata for the video track and a delegate/consumer to send frames.
-
-1. **Extract and Send Frames**:
- - Extract frames from your video source.
- - Pass these frames to the delegate/consumer (`CMSampleBuffer` on iOS and `ImageProxy` on Android).
-
-1. **Add Your Custom Source**:
- - Use `RNFishjamProxy` to register your custom video source to the SDK.
-
-:::warning
-
-It is your responsibility to implement the JS layer. It will depend on your react-native setup. If you use expo, we recommend creating a [Local Expo Module](https://docs.expo.dev/modules/get-started/).
-
-:::
-
-
-
-
-
-```swift
-import FishjamCloudClient
-
-class CustomSourceExample: CustomSource {
- var delegate: CustomSourceDelegate?
-
- let isScreenShare = false
- let metadata = ["type":"camera"].toMetadata()
- let videoParameters = VideoParameters.presetFHD43
-
- func send(frameBuffer: CMSampleBuffer) {
- delegate?.customSource(self, didOutputSampleBuffer: frameBuffer, rotation: .ninety)
- }
-}
-```
-
-```swift
-let source = CustomSourceExample()
-try await RNFishjamProxy.add(customSource: source)
-```
-
-
-
-
-```kotlin
-class CustomSourceExample: CustomSource {
- override val isScreenShare = false
- override val metadata: Metadata = mapOf("type" to "camera")
- override val videoParameters = VideoParameters.presetFHD43
-
- var consumer: CustomSourceConsumer? = null
- private set
-
- override fun initialize(consumer: CustomSourceConsumer) {
- this.consumer = consumer
- }
-
- fun sendImageFrame(imageFrame: ImageProxy) {
- consumer?.onImageProxyCaptured(imageFrame)
- }
-}
-```
-
-```kotlin
-val source = CustomSourceExample()
-RNFishjamClient.createCustomSource(source)
-```
-
-
-
diff --git a/docs/how-to/react-native/custom-video-sources/vision-camera.mdx b/docs/how-to/react-native/custom-video-sources/vision-camera.mdx
deleted file mode 100644
index 6ebed8e0..00000000
--- a/docs/how-to/react-native/custom-video-sources/vision-camera.mdx
+++ /dev/null
@@ -1,183 +0,0 @@
----
-sidebar_position: 2
-title: "React Native Vision Camera Example"
-description: "Guide to creating a custom video source in Fishjam React Native SDK"
----
-
-import Tabs from "@theme/Tabs";
-import TabItem from "@theme/TabItem";
-import ConfigurePermissions from "../_components/configure-permissions.mdx";
-
-# RN Vision Camera Example
-
-This example demonstrates how to implement a CustomSource using [VisionCamera](https://github.com/mrousavy/react-native-vision-camera) to stream content directly from your device's camera to the Fishjam SDK.
-
-:::tip
-
-Check out our [**complete example implementation**](https://github.com/fishjam-cloud/mobile-client-sdk/tree/3473afd51ff3a4dc5849463d1ff8a4028e428739/examples/vision-camera) using [VisionCamera](https://github.com/mrousavy/react-native-vision-camera)
-
-:::
-
-## Overview
-
-There are two main components to implement:
-
-1. **Create a Frame Processor Plugin**:
- - This plugin extracts frames from Vision Camera and passes them to the Fishjam SDK.
- - For more details on frame processor plugins, check out the Vision Camera documentation [here](https://react-native-vision-camera.com/docs/guides/frame-processors-plugins-overview).
-
-2. **Create a CustomSource**:
- - This component sends the camera frames to Fishjam.
- - Check out the [CustomSource overview](./overview.mdx) to learn more about this concept.
-
-## How does it work?
-
-The `FrameProcessorPlugin` and `CustomSource` work together to process and transmit video frames from the Vision Camera to the Fishjam SDK. Here's a clearer breakdown of their roles:
-
-1. **FrameProcessorPlugin**:
- - Extracts frames from the Vision Camera.
- - Processes each frame and prepares it for transmission.
- - Passes the processed frames to the `CustomSource`.
-
-2. **CustomSource**:
- - Receives frames from the `FrameProcessorPlugin`.
- - Transmits these frames to the Fishjam SDK.
- - Ensures frames are in the correct format for the SDK.
-
-### Diagram
-
-Below is a diagram illustrating the flow of frames from the Vision Camera to the Fishjam SDK:
-
-
-
-This diagram shows the flow of data from the Vision Camera through the `FrameProcessorPlugin` to the `CustomSource`, and finally to the Fishjam SDK.
-
-### Examples
-
-Here are examples illustrating how to implement the above flow for iOS and Android.
-
-
-
-
-
-Follow these steps to implement Vision Camera as a custom source on iOS:
-
-1. Create a CustomSource class that implements the required protocol:
-
-```swift
-import FishjamCloudClient
-
-class WebrtcVisionCameraCustomSource: CustomSource {
- var delegate: CustomSourceDelegate?
-
- let isScreenShare = false
- let metadata = ["type":"camera"].toMetadata()
- let videoParameters = VideoParameters.presetFHD43
-}
-```
-
-2. Create a FrameProcessorPlugin that will extract frames from Vision Camera and pass them to Fishjam SDK:
-
-```swift
-import VisionCamera
-
-public class WebrtcFrameProcessorPlugin: FrameProcessorPlugin {
- static var currentSource: WebrtcVisionCameraCustomSource?
-
- public override func callback(_ frame: Frame, withArguments arguments: [AnyHashable : Any]?) -> Any {
- if let customSource = WebrtcFrameProcessorPlugin.currentSource {
- customSource.delegate?.customSource(customSource, didOutputSampleBuffer: frame.buffer, rotation: .ninety)
- }
- return frame
- }
-}
-```
-
-3. Register the FrameProcessorPlugin with Vision Camera:
- - Follow the [official documentation on registering plugins](https://react-native-vision-camera.com/docs/guides/frame-processors-plugins-ios).
-
-4. Register the CustomSource with Fishjam SDK to create a new track:
-
-```swift
-let source = WebrtcVisionCameraCustomSource()
-
-WebrtcFrameProcessorPlugin.currentSource = source
-
-try await RNFishjamProxy.add(customSource: source)
-```
-
-
-
-
-Follow these steps to implement Vision Camera as a custom source on Android:
-
-1. Create a CustomSource class that implements the required interface:
-
-```kotlin
-import com.fishjamcloud.client.models.CustomSource
-import com.fishjamcloud.client.models.CustomSourceConsumer
-import com.fishjamcloud.client.models.VideoParameters
-import com.fishjamcloud.client.models.Metadata
-
-class WebrtcVisionCameraCustomSource: CustomSource {
- override val isScreenShare = false
- override val metadata: Metadata = mapOf("type" to "camera")
- override val videoParameters = VideoParameters.presetFHD43
-
- var consumer: CustomSourceConsumer? = null
- private set
-
- override fun initialize(consumer: CustomSourceConsumer) {
- this.consumer = consumer
- }
-}
-```
-
-2. Create a FrameProcessorPlugin that will extract frames from Vision Camera and pass them to Fishjam SDK:
-
-```kotlin
-import com.mrousavy.camera.frameprocessors.Frame
-import com.mrousavy.camera.frameprocessors.FrameProcessorPlugin
-import com.mrousavy.camera.frameprocessors.VisionCameraProxy
-
-class WebrtcFrameProcessorPlugin(proxy: VisionCameraProxy, options: Map?): FrameProcessorPlugin() {
- companion object {
- var currentSource: WebrtcVisionCameraCustomSource? = null
- }
-
- override fun callback(frame: Frame, arguments: Map?): Frame {
- currentSource?.consumer?.onImageProxyCaptured(frame.imageProxy)
- return frame
- }
-}
-```
-
-3. Register the FrameProcessorPlugin with Vision Camera:
- - Follow the official documentation on registering plugins [here](https://react-native-vision-camera.com/docs/guides/frame-processors-plugins-android).
-
-4. Register the CustomSource with Fishjam SDK to enable creating tracks using the new source:
-
-```kotlin
-val source = WebrtcVisionCameraCustomSource()
-
-WebrtcFrameProcessorPlugin.currentSource = source
-
-RNFishjamClient.createCustomSource(source)
-```
-
-
-
-
-
-#### Usage
-
-Depending on your React Native setup, create an interface for Javascript to interact with this code. If you're using Expo, we recommend using [Expo Modules](https://docs.expo.dev/modules/overview/). If you're using a bare React Native setup, we recommend using [Turbo Modules](https://reactnative.dev/docs/turbo-native-modules-introduction).
diff --git a/docs/how-to/react-native/installation.mdx b/docs/how-to/react-native/installation.mdx
deleted file mode 100644
index 3ff03257..00000000
--- a/docs/how-to/react-native/installation.mdx
+++ /dev/null
@@ -1,105 +0,0 @@
----
-sidebar_position: 1
----
-
-import Tabs from "@theme/Tabs";
-import TabItem from "@theme/TabItem";
-import InstallPackage from "./_components/install-package.mdx";
-import ConfigurePermissions from "./_components/configure-permissions.mdx";
-
-# Installation
-
-## Optional: Create a New App
-
-
- Follow these steps to create a new mobile app
-
-If you don't have an existing project, you can create a new Expo app using a template
-
-```bash
-npx create-expo-app@latest my-video-app
-```
-
-As the next step, you have to generate native files with the `expo prebuild` command:
-
-```bash
-npx expo prebuild
-```
-
-You can also follow more detailed [Expo instructions](https://docs.expo.dev/get-started/introduction/).
-
-
-
-## Step 1: Install the Package
-
-Install `@fishjam-cloud/react-native-client` with your preferred package manager.
-
-
-
-## Step 2: Configure App Permissions
-
-
-
-## Optional: Request Camera and Microphone Permissions
-
-:::info
-You donβt need to explicitly request permissions as theyβre automatically asked for when your app needs them.
-:::
-
-If you want more control, you can use the `useCameraPermissions` and `useMicrophonePermissions` hooks to manage permissions manually. Both hooks return an array with three elements:
-
-1. `permission`: The current permission status
-2. `requestPermission`: A function to request permission
-3. `getPermission`: A function to get the current permission status
-
-Here's an example of how to use both hooks:
-
-```tsx
-import {
- useCameraPermissions,
- useMicrophonePermissions,
-} from "@fishjam-cloud/react-native-client";
-import { useEffect } from "react";
-
-const [cameraPermission, requestCameraPermission, getCameraPermission] =
- useCameraPermissions();
-
-const [
- microphonePermission,
- requestMicrophonePermission,
- getMicrophonePermission,
-] = useMicrophonePermissions();
-
-useEffect(() => {
- requestCameraPermission();
- requestMicrophonePermission();
-}, []);
-```
-
-**Permission Response**
-
-The `permission` object has the following properties:
-
-- `canAskAgain`: Indicates if the user can be asked again for this permission. If `false`, it is recommended to direct the user to Settings app to enable/disable the permission.
-- `expires`: When the permission expires.
-- `granted`: Indicates if the permission is granted.
-- `status`: The current status of the permission.
-
-:::info
-You can control when and how permissions are requested by passing an `options` object to the hook.
-:::
-
-### Customizing Permission Request Behavior
-
-By default, `get` is called automatically (auto fetch is `true`), and `request` is not (auto request is `false`). You can change this behavior:
-
-```tsx
-import { useCameraPermissions } from "@fishjam-cloud/react-native-client";
-// Do not auto-fetch permission status, enable auto-request
-const [permission, requestPermission] = useCameraPermissions({
- get: false, // disables auto-fetch
- request: true, // enables auto-request
-});
-```
-
-Adjust these options to fit your app's needs.
diff --git a/docs/how-to/react-native/list-other-peers.mdx b/docs/how-to/react-native/list-other-peers.mdx
deleted file mode 100644
index 980f7c90..00000000
--- a/docs/how-to/react-native/list-other-peers.mdx
+++ /dev/null
@@ -1,54 +0,0 @@
----
-sidebar_position: 4
----
-
-# List other peers
-
-In order to see other streaming peers, you can use [`usePeers`](../../api/mobile/functions/usePeers). It will return all
-other peers, together with the tracks that they are streaming.
-
-### Example code that show all videos
-
-```tsx
-import React from "react";
-import { View } from "react-native";
-import {
- usePeers,
- VideoRendererView,
-} from "@fishjam-cloud/react-native-client";
-
-export function ShowAllPeers() {
- const { remotePeers, localPeer } = usePeers(); // [!code highlight]
-
- const videoTracks = remotePeers.flatMap(
- (peer) =>
- peer.tracks.filter((track) => track.type === "Video" && track.isActive), // [!code highlight]
- );
- const localTrack = localPeer?.tracks.find((t) => t.type === "Video"); // [!code highlight]
-
- return (
-
- {localTrack && (
-
- )}
- {videoTracks.map((track) => (
-
- ))}
-
- );
-}
-```
-
-:::tip[Enable Picture in Picture]
-
-To allow users to continue watching video in a floating window when they background your app, wrap your video call UI with the `PipContainerView` component. See the [Picture in Picture guide](./picture-in-picture) for more details.
-
-:::
diff --git a/docs/how-to/react-native/metadata.mdx b/docs/how-to/react-native/metadata.mdx
deleted file mode 100644
index 3c4bb899..00000000
--- a/docs/how-to/react-native/metadata.mdx
+++ /dev/null
@@ -1,101 +0,0 @@
----
-sidebar_position: 8
-title: "Metadata"
-description: "How to use metadata"
----
-
-import MetadataHeader from "../_common/metadata/header.mdx";
-import JoiningRoom from "../_common/metadata/joining_room.mdx";
-import UpdatingMetadata from "../_common/metadata/updating.mdx";
-import ReadingMetadata from "../_common/metadata/reading.mdx";
-
-
-
-
-
-```tsx
-const FISHJAM_ID = "fishjam-id";
-const PEER_TOKEN = "some-peer-token";
-// ---cut---
-import React, { useCallback } from "react";
-import { Button } from "react-native";
-import { useConnection } from "@fishjam-cloud/react-native-client";
-
-type PeerMetadata = {
- displayName: string;
-};
-
-export function JoinRoomButton() {
- const { joinRoom } = useConnection();
-
- const onPressJoin = useCallback(async () => {
- await joinRoom({
- fishjamId: FISHJAM_ID,
- peerToken: PEER_TOKEN,
- peerMetadata: { displayName: "John Wick" }, // [!code highlight]
- });
- }, [joinRoom]);
-
- return ;
-}
-```
-
-
-
-
-
-```tsx
-import React, { useCallback } from "react";
-import { Button } from "react-native";
-import { useUpdatePeerMetadata } from "@fishjam-cloud/react-native-client";
-
-type PeerMetadata = {
- displayName: string;
-};
-
-export function UpdateNameButton() {
- const { updatePeerMetadata } = useUpdatePeerMetadata(); // [!code highlight]
-
- const onPressUpdateName = useCallback(async () => {
- await updatePeerMetadata({ displayName: "Thomas A. Anderson" }); // [!code highlight]
- }, [updatePeerMetadata]);
-
- return ;
-}
-```
-
-
-
-
-
-```tsx
-import React from "react";
-import { Text, View } from "react-native";
-import { usePeers } from "@fishjam-cloud/react-native-client";
-
-type PeerMetadata = {
- displayName: string;
-};
-
-type ServerMetadata = {
- realName: string;
-};
-
-export function ListAllNames() {
- const { remotePeers } = usePeers(); // [!code highlight]
-
- return (
-
- {remotePeers.map((peer) => (
- // [!code highlight:4]
-
- Display name: {peer.metadata.peer?.displayName || "Unknown"}
- Real name: {peer.metadata.server?.realName || "Unknown"}
-
- ))}
-
- );
-}
-```
-
-
diff --git a/docs/how-to/react-native/start-streaming.mdx b/docs/how-to/react-native/start-streaming.mdx
deleted file mode 100644
index f3212a07..00000000
--- a/docs/how-to/react-native/start-streaming.mdx
+++ /dev/null
@@ -1,109 +0,0 @@
----
-sidebar_position: 3
----
-
-# Start streaming
-
-How to stream your camera
-
-:::tip[Enable devices before connecting]
-
-You can enable your camera and microphone before calling connect method.
-This way, you can show user camera preview. Once connect method is called,
-enabled camera and microphone will start streaming to Room.
-
-:::
-
-## Enable your camera
-
-First, you have to enable your camera by calling [`prepareCamera`](../../api/mobile/functions/useCamera#preparecamera) method.
-You can open show camera preview with [`VideoPreviewView`](../../api/mobile/variables/VideoPreviewView) component
-
-```tsx
-import React, { useEffect } from "react";
-import {
- useCamera,
- VideoPreviewView,
-} from "@fishjam-cloud/react-native-client";
-
-export function ViewPreview() {
- const { prepareCamera } = useCamera(); // [!code highlight]
-
- useEffect(() => {
- prepareCamera({ cameraEnabled: true }); // [!code highlight]
- }, [prepareCamera]);
-
- return ;
-}
-```
-
-### Listing user cameras
-
-To list all cameras available on device, you can use [`cameras`](../../api/mobile/functions/useCamera#cameras) property from [`useCamera`](../../api/mobile/functions/useCamera) hook.
-This way, you can either automatically choose camera (front/back) or allow user to select camera type.
-
-To change camera, simply call [`switchCamera`](../../api/mobile/functions/useCamera#switchcamera) method.
-
-```tsx
-import React, { useCallback } from "react";
-import { Button } from "react-native";
-import { useCamera } from "@fishjam-cloud/react-native-client";
-
-export function FlipButton() {
- const { cameras, switchCamera, currentCamera } = useCamera(); // [!code highlight]
-
- const onPressFlipCamera = useCallback(() => {
- // find first camera facing opposite direction than current camera
- const otherCamera = cameras.find(
- (camera) => camera.facingDirection !== currentCamera?.facingDirection,
- );
- if (otherCamera) {
- switchCamera(otherCamera.id); // [!code highlight]
- }
- }, [cameras, currentCamera?.facingDirection, switchCamera]);
-
- return ;
-}
-```
-
-### Disabling/enabling camera
-
-To change camera state, you use [`toggleCamera`](../../api/mobile/functions/useCamera#togglecamera) method.
-
-```tsx
-import { Button } from "react-native";
-import React from "react";
-import { useCamera } from "@fishjam-cloud/react-native-client";
-
-export function ToggleCameraButton() {
- const { isCameraOn, toggleCamera } = useCamera(); // [!code highlight]
-
- return (
-
- );
-}
-```
-
-## Enable microphone
-
-Microphone works similar to camera. In order to enable it, you have to call [`toggleMicrophone`](../../api/mobile/functions/useMicrophone#togglemicrophone) method.
-
-```tsx
-import { Button } from "react-native";
-import React from "react";
-import { useMicrophone } from "@fishjam-cloud/react-native-client";
-
-export function ToggleMicrophoneButton() {
- const { isMicrophoneOn, toggleMicrophone } = useMicrophone(); // [!code highlight]
-
- return (
-
- );
-}
-```
diff --git a/docs/how-to/react/_category_.json b/docs/how-to/react/_category_.json
deleted file mode 100644
index e4ca8099..00000000
--- a/docs/how-to/react/_category_.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "label": "React/Web",
- "position": 2
-}
diff --git a/docs/how-to/react/_common/metadata/header.mdx b/docs/how-to/react/_common/metadata/header.mdx
deleted file mode 100644
index 1602f006..00000000
--- a/docs/how-to/react/_common/metadata/header.mdx
+++ /dev/null
@@ -1,10 +0,0 @@
-Alongside audio and video, it is possible to send additional metadata with each peer. Metadata is just
-JSON that can contain arbitrary information. Its most common use is sending a user name associated with a peer.
-However, it can be also used to send the peer's camera type, application information etc.
-
-:::info
-
-You can also set metadata on [the server side, when adding user to the room](../backend/server-setup#metadata). This metadata is persistent throughout its lifetime and is useful for attaching information that
-can't be overwritten by the peer, like information about real user names or basic permission info.
-
-:::
diff --git a/docs/how-to/react/_common/metadata/joining_room.mdx b/docs/how-to/react/_common/metadata/joining_room.mdx
deleted file mode 100644
index 517a5b30..00000000
--- a/docs/how-to/react/_common/metadata/joining_room.mdx
+++ /dev/null
@@ -1,5 +0,0 @@
-## Setting metadata when joining the room
-
-The `joinRoom` method from the `useConnection` hook has a `peerMetadata` parameter, that can be used for setting object metadata.
-
-{props.children}
diff --git a/docs/how-to/react/_common/metadata/reading.mdx b/docs/how-to/react/_common/metadata/reading.mdx
deleted file mode 100644
index b0516650..00000000
--- a/docs/how-to/react/_common/metadata/reading.mdx
+++ /dev/null
@@ -1,9 +0,0 @@
-## Reading metadata
-
-Peer metadata is available as the `metadata` property for each peer. Therefore, when you list your peers with the `usePeers` hook, you can read
-the metadata associated with them.
-Note that the `metadata.peer` property contains only the metadata set by the client SDK (as in the examples examples above).
-The metadata set on the server side is available as `metadata.server`.
-Learn more about server metadata [here](../backend/server-setup#metadata).
-
-{props.children}
diff --git a/docs/how-to/react/_common/metadata/updating.mdx b/docs/how-to/react/_common/metadata/updating.mdx
deleted file mode 100644
index 1a56a0f6..00000000
--- a/docs/how-to/react/_common/metadata/updating.mdx
+++ /dev/null
@@ -1,5 +0,0 @@
-## Updating metadata during connection
-
-Once you've joined the room, you can update your peer metadata with `updatePeerMetadata` from `useUpdatePeerMetadata`:
-
-{props.children}
diff --git a/docs/how-to/react/connecting.mdx b/docs/how-to/react/connecting.mdx
deleted file mode 100644
index ca3f0d28..00000000
--- a/docs/how-to/react/connecting.mdx
+++ /dev/null
@@ -1,52 +0,0 @@
----
-sidebar_position: 2
----
-
-# Connecting
-
-## Prerequisites
-
-In order to connect, you need to obtain a **Peer Token** to authorize the peer in your room.
-You can get the token using the [Sandbox API](../../how-to/features/sandbox-api-testing) if you're using the Sandbox environment, or implement your own backend service that will provide the user with a **Peer Token**.
-
-## Connecting
-
-Use the [`useConnection`](../../api/web/functions/useConnection) hook to get
-the [`joinRoom`](../../api/web/functions/useConnection#joinroom) function.
-
-```tsx
-const PEER_TOKEN = "some-peer-token";
-// ---cut-before---
-import { useConnection, useSandbox } from "@fishjam-cloud/react-client";
-import React, { useCallback } from "react";
-
-export function JoinRoomButton() {
- const { joinRoom } = useConnection(); // [!code highlight]
- // get the peer token from sandbox or your backend
- const { getSandboxPeerToken } = useSandbox();
-
- const onJoinRoomPress = useCallback(async () => {
- // [!code highlight:5]
- const peerToken = await getSandboxPeerToken("Room", "User");
- await joinRoom({ peerToken });
- }, [joinRoom]);
-
- return Join room;
-}
-```
-
-## Disconnecting
-
-In order to close connection, use the [`leaveRoom`](../../api/web/functions/useConnection#leaveroom) method
-from [`useConnection`](../../api/web/functions/useConnection) hook.
-
-```tsx
-import { useConnection } from "@fishjam-cloud/react-client";
-import React, { useCallback } from "react";
-
-export function LeaveRoomButton() {
- const { leaveRoom } = useConnection(); // [!code highlight]
-
- return Leave room;
-}
-```
diff --git a/docs/how-to/react/installation.mdx b/docs/how-to/react/installation.mdx
deleted file mode 100644
index ce903bb7..00000000
--- a/docs/how-to/react/installation.mdx
+++ /dev/null
@@ -1,49 +0,0 @@
----
-sidebar_position: 1
----
-
-import Tabs from "@theme/Tabs";
-import TabItem from "@theme/TabItem";
-
-# Installation
-
-## 1. Install the package
-
-```bash npm2yarn
-npm install @fishjam-cloud/react-client
-```
-
-## 2. Setup Fishjam context
-
-Wrap your app in our [`FishjamProvider`](../../api/web/functions/FishjamProvider) component. Get your Fishjam ID from [Fishjam Dashboard](https://fishjam.io/app) and pass it to the provider.
-
-```tsx
-const App = () => {
- return
Hello world
;
-};
-
-// ---cut---
-import React from "react";
-import ReactDOM from "react-dom/client";
-// import App from "./App";
-import { FishjamProvider } from "@fishjam-cloud/react-client";
-
-// Check https://fishjam.io/app/ for your Fishjam ID
-const FISHJAM_ID = "your-fishjam-id";
-
-ReactDOM.createRoot(document.getElementById("root")!).render(
- // [!code highlight:5]
-
-
-
-
- ,
-);
-```
-
-:::tip
-
-It's possible to have many independent Fishjam contexts in one app.
-Just render many [`FishjamProvider`](../../api/web/functions/FishjamProvider) components and make sure they don't overlap.
-
-:::
diff --git a/docs/how-to/react/list-other-peers.mdx b/docs/how-to/react/list-other-peers.mdx
deleted file mode 100644
index ad5536df..00000000
--- a/docs/how-to/react/list-other-peers.mdx
+++ /dev/null
@@ -1,38 +0,0 @@
----
-sidebar_position: 5
----
-
-# Display media of other peers
-
-To access data and media of other peers, use the [`usePeers`](../../api/web/functions/usePeers) hook.
-It returns two properties, [`remotePeers`](../../api/web/functions/usePeers) and [`localPeer`](../../api/web/functions/usePeers).
-They contain all the tracks of other peers and all the tracks of the local user, respectively.
-
-### Example of playing other peers' available media
-
-```tsx
-import React, { FC } from "react";
-
-const VideoRenderer: FC<{ stream?: MediaStream | null }> = (_) => ;
-
-const AudioPlayer: FC<{ stream?: MediaStream | null }> = (_) => ;
-
-// ---cut---
-import { usePeers } from "@fishjam-cloud/react-client";
-
-export function Component() {
- const { remotePeers } = usePeers();
-
- return (
-
- // remember to import
- your VideoRenderer component
-
-
- ))}
-
- );
-}
-```
diff --git a/docs/how-to/react/managing-devices.mdx b/docs/how-to/react/managing-devices.mdx
deleted file mode 100644
index 38e0a6aa..00000000
--- a/docs/how-to/react/managing-devices.mdx
+++ /dev/null
@@ -1,90 +0,0 @@
----
-sidebar_position: 4
----
-
-# Managing devices
-
-The Fishjam SDK provides functions for dynamically controlling media device streams. This includes selecting desired cameras and microphones, turning them on and off, as well as muting and unmuting microphones.
-
-### Selecting Camera and Microphone - [`selectCamera()`](../../api/web/functions/useCamera#selectcamera) and [`selectMicrophone()`](../../api/web/functions/useMicrophone#selectmicrophone)
-
-To select the desired camera or microphone, use the [`selectCamera()`](../../api/web/functions/useCamera#selectcamera) and [`selectMicrophone()`](../../api/web/functions/useMicrophone#selectmicrophone) functions.
-Lists of the available devices are available via the [`cameraDevices`](../../api/web/functions/useCamera#cameradevices) and [`microphoneDevices`](../../api/web/functions/useMicrophone#microphonedevices) properties.
-
-#### Usage Example
-
-```tsx
-import React from "react";
-import { useCamera } from "@fishjam-cloud/react-client";
-
-export function CameraControl() {
- const { cameraDevices, selectCamera } = useCamera();
-
- return (
-
- {cameraDevices.map(({ deviceId, label }) => (
-
- selectCamera(deviceId)}>{label}
-
- ))}
-
- );
-}
-```
-
-### Turning Camera On and Off - [`toggleCamera()`](../../api/web/functions/useCamera#togglecamera)
-
-This function controls the physical operational state of the camera.
-
-- **Turning the camera off**: This action stops the camera device, disables the media stream, and pauses streaming. The webcam indicator light will shut down.
-- **Turning the camera on**: This action starts the camera and resumes streaming, allowing other participants to see video after a brief initialization period.
-
-#### Usage Example
-
-```tsx
-import React from "react";
-import { useCamera } from "@fishjam-cloud/react-client";
-
-export function CameraControl() {
- const { toggleCamera } = useCamera();
-
- return toggleCamera()}>Toggle Camera Device;
-}
-```
-
-### Turning Microphone On and Off - [`toggleMicrophone()`](../../api/web/functions/useMicrophone#togglemicrophone)
-
-This function toggles the microphone's physical operational state. The function interacts with a physical device, so it might take a noticeable amount of time.
-
-- **Turning the microphone off**: Turns the microphone off, disables the media stream, and pauses any audio transmission.
-- **Turning the microphone on**: Turns the microphone on and resumes audio streaming.
-
-### Muting and Unmuting Microphone - [`toggleMicrophoneMute()`](../../api/web/functions/useMicrophone#togglemicrophonemute)
-
-This function manages the audio stream's operational status without affecting the microphone's hardware state.
-Muting and unmuting is faster, but a muted device still uses resources. This is useful, as it is common to mute and unmute during a meeting. Unmuting needs to be quick to capture the first word of a sentence.
-
-- **Muting the microphone**: This action disables the media stream and stops audio transmission while keeping the microphone active.
-- **Unmuting the microphone**: This action enables the media stream, allowing immediate transmission of sounds.
-
-#### Usage Example
-
-```tsx
-import React from "react";
-import { useMicrophone } from "@fishjam-cloud/react-client";
-
-export function MicrophoneControl() {
- const { toggleMicrophone, toggleMicrophoneMute } = useMicrophone();
-
- return (
-
- );
-}
-```
-
-
diff --git a/docs/how-to/react/start-streaming.mdx b/docs/how-to/react/start-streaming.mdx
deleted file mode 100644
index 0f074eaa..00000000
--- a/docs/how-to/react/start-streaming.mdx
+++ /dev/null
@@ -1,76 +0,0 @@
----
-sidebar_position: 3
----
-
-# Streaming media
-
-### Initialize access to your devices
-
-Fishjam provides an API to browse and manage media devices you can use.
-To ask the browser for permission to list the available devices,
-call the [`initializeDevices`](../../api/web/functions/useInitializeDevices#initializedevices)
-function from [`useInitializeDevices`](../../api/web/functions/useInitializeDevices) hook.
-
-You can choose whether to initialize both camera and microphone devices or just one of them by passing [`InitializeDevicesSettings`](../../api/web/type-aliases/InitializeDevicesSettings)
-as an argument. By default, both camera and microphone are initialized.
-
-The [`initializeDevices`](../../api/web/functions/useInitializeDevices#initializedevices) function will return a [`Promise`](../../api/web/type-aliases/InitializeDevicesResult) object.
-
-```ts
-import React, { useEffect } from "react";
-import { useInitializeDevices } from "@fishjam-cloud/react-client";
-
-export function useExample() {
- const { initializeDevices } = useInitializeDevices();
-
- useEffect(() => {
- initializeDevices().then((result) => {
- // optionally handle the result
- console.log(result);
- });
- }, [initializeDevices]);
-}
-```
-
-:::note
-The [`useInitializeDevices`](../../api/web/functions/useInitializeDevices) hook gives you the convenience of asking the user for all permissions at once.
-
-It is not the only way to enable the device. You can just toggle the device using [`useCamera`](../../api/web/functions/useCamera) or [`useMicrophone`](../../api/web/functions/useMicrophone) hooks.
-:::
-
-### Device API
-
-To manage users' camera and microphone devices, use the respective [`useCamera`](../../api/web/functions/useCamera)
-and [`useMicrophone`](../../api/web/functions/useMicrophone) hooks.
-Both of them has similar API. To keep things simple, we will just use the camera hook.
-
-```tsx
-import React, { useEffect, useRef } from "react";
-import { useCamera } from "@fishjam-cloud/react-client";
-
-export function ExampleCameraPreview() {
- const videoRef = useRef(null);
-
- const { activeCamera, selectCamera, cameraStream, cameraDevices } =
- useCamera(); // [!code highlight]
-
- useEffect(() => {
- if (!videoRef.current) return;
- videoRef.current.srcObject = cameraStream ?? null;
- }, [cameraStream]);
-
- return (
-
-
Active camera: {activeCamera?.label ?? "None"}
-
- {cameraStream && } // [!code highlight]
-
- );
-}
-```
diff --git a/docs/index.mdx b/docs/index.mdx
index 64c7ef92..460ff63b 100644
--- a/docs/index.mdx
+++ b/docs/index.mdx
@@ -63,8 +63,7 @@ To get started, we recommend you check out one of the two guides below:
-
[React Native Installation and Setup](./how-to/react-native/installation.mdx)
-
[React Installation and Setup](./how-to/react/installation.mdx)
+
[Web & Mobile Installation](./how-to/client/installation.mdx)
[Backend Production Deployment](./how-to/backend/production-deployment.mdx)