diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml new file mode 100644 index 0000000..8b67f55 --- /dev/null +++ b/.github/workflows/publish-docs.yml @@ -0,0 +1,48 @@ +name: Publish Docs + +on: + push: + branches: [main] + paths: + - "docs/**" + - "skill/**" + - "AGENTS.md" + workflow_dispatch: + +permissions: + contents: write + +jobs: + publish: + name: Publish Docs Release + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Generate docs-version.json + run: | + cat > docs-version.json << EOF + { + "date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)", + "commit": "${{ github.sha }}" + } + EOF + + - name: Create docs tarball + run: tar -czf mux-docs.tar.gz docs/ skill/ AGENTS.md docs-version.json + + - name: Delete existing docs-latest release + run: gh release delete docs-latest --yes --cleanup-tag || true + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Create docs-latest release + run: | + gh release create docs-latest \ + --title "Latest Documentation" \ + --notes "Automatically updated documentation bundle. Last updated: $(date -u +%Y-%m-%dT%H:%M:%SZ)" \ + --prerelease \ + mux-docs.tar.gz \ + docs-version.json + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 495978a..6b5fabf 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,7 +3,7 @@ name: Release on: push: tags: - - 'v*' + - "v*" permissions: contents: write @@ -38,9 +38,30 @@ jobs: name: mux-${{ matrix.target }} path: ./dist/mux-${{ matrix.target }} + docs: + name: Package Docs + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Generate docs-version.json + run: | + cat > docs-version.json << EOF + { + "date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)", + "commit": "${{ github.sha }}" + } + EOF + - name: Create docs tarball + run: tar -czf mux-docs.tar.gz docs/ skill/ AGENTS.md docs-version.json + - uses: actions/upload-artifact@v4 + with: + name: mux-docs + path: mux-docs.tar.gz + release: name: Create Release - needs: build + needs: [build, docs] + if: always() && needs.build.result == 'success' runs-on: ubuntu-latest steps: - uses: actions/download-artifact@v4 @@ -61,8 +82,8 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: '24' - registry-url: 'https://registry.npmjs.org' + node-version: "24" + registry-url: "https://registry.npmjs.org" - uses: actions/download-artifact@v4 with: path: artifacts @@ -88,6 +109,12 @@ jobs: echo "darwin_x64=$(sha256sum artifacts/mux-darwin-x64 | cut -d ' ' -f1)" >> "$GITHUB_OUTPUT" echo "linux_arm64=$(sha256sum artifacts/mux-linux-arm64 | cut -d ' ' -f1)" >> "$GITHUB_OUTPUT" echo "linux_x64=$(sha256sum artifacts/mux-linux-x64 | cut -d ' ' -f1)" >> "$GITHUB_OUTPUT" + if [ -f artifacts/mux-docs.tar.gz ]; then + echo "has_docs=true" >> "$GITHUB_OUTPUT" + echo "docs=$(sha256sum artifacts/mux-docs.tar.gz | cut -d ' ' -f1)" >> "$GITHUB_OUTPUT" + else + echo "has_docs=false" >> "$GITHUB_OUTPUT" + fi - name: Generate GitHub App token id: app-token @@ -108,6 +135,28 @@ jobs: VERSION="${GITHUB_REF_NAME#v}" BASE_URL="https://github.com/muxinc/cli/releases/download/${GITHUB_REF_NAME}" + DOCS_RESOURCE="" + DOCS_INSTALL="" + + if [ "${{ steps.checksums.outputs.has_docs }}" = "true" ]; then + DOCS_RESOURCE=$(cat < Formula/mux.rb << RUBY class Mux < Formula @@ -136,9 +185,13 @@ jobs: end end + ${DOCS_RESOURCE} + def install binary = Dir.glob("mux-*").first || "mux" bin.install binary => "mux" + + ${DOCS_INSTALL} end test do diff --git a/.github/workflows/sync-docs.yml b/.github/workflows/sync-docs.yml new file mode 100644 index 0000000..1f1d96d --- /dev/null +++ b/.github/workflows/sync-docs.yml @@ -0,0 +1,44 @@ +name: Sync Mux Docs + +on: + schedule: + - cron: '0 6 * * *' # Daily at 6am UTC + workflow_dispatch: + +permissions: + contents: write + pull-requests: write + +jobs: + sync: + runs-on: ubuntu-latest + steps: + - name: Checkout CLI repo + uses: actions/checkout@v4 + + - name: Generate GitHub App token + id: app-token + uses: actions/create-github-app-token@v1 + with: + app-id: ${{ secrets.MUX_COM_APP_ID }} + private-key: ${{ secrets.MUX_COM_APP_PRIVATE_KEY }} + owner: muxinc + repositories: mux.com + + - name: Run sync script + run: bash scripts/sync-docs.sh + env: + MUX_COM_TOKEN: ${{ steps.app-token.outputs.token }} + + - name: Create pull request + uses: peter-evans/create-pull-request@v7 + with: + commit-message: 'docs: sync guides from muxinc/mux.com' + title: 'docs: sync guides from muxinc/mux.com' + body: | + Automated sync of MDX guide files from `muxinc/mux.com`. + + This PR was created automatically by the `sync-docs` workflow. + branch: docs-sync + delete-branch: true + labels: documentation diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000..88faaee --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,13 @@ +# Agent Instructions — Mux CLI + +## Mux Documentation + +The `docs/guides/` directory contains the full Mux documentation as MDX files, synced from the `muxinc/mux.com` repo. Use these files as the authoritative source for Mux API behavior, concepts, and integration patterns. + +For searching and using the docs, see the canonical guide in `skill/SKILL.md`. + +### Repo-specific notes + +- MDX files have YAML frontmatter with `title`, `product`, and `description` fields. They may contain JSX components; focus on the prose and code blocks for factual content. +- Docs are synced daily via GitHub Actions. Run `bash scripts/sync-docs.sh` to update locally. +- For installed copies of the CLI, prefer `mux docs path --json` over guessing npm, Homebrew, or shell-install locations. diff --git a/Formula/mux.rb b/Formula/mux.rb index 6251938..c2612d3 100644 --- a/Formula/mux.rb +++ b/Formula/mux.rb @@ -24,9 +24,20 @@ class Mux < Formula end end + resource "docs" do + url "https://github.com/muxinc/cli/releases/download/v#{version}/mux-docs.tar.gz" + sha256 "PLACEHOLDER" + end + def install binary = Dir.glob("mux-*").first || "mux" bin.install binary => "mux" + + resource("docs").stage do + (share/"mux").install "docs" + (share/"mux").install "skill" + (share/"mux").install "AGENTS.md" + end end test do diff --git a/README.md b/README.md index 9b9ff8a..be0d1b9 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,19 @@ npx @mux/cli curl -fsSL https://raw.githubusercontent.com/muxinc/cli/main/install.sh | bash ``` +### Agent skill and bundled docs + +The CLI ships with bundled Mux docs plus a standards-aligned `SKILL.md` so agents can use the CLI consistently across npm, Homebrew, and shell installs. + +To discover the installed paths: + +```bash +mux docs path --json +``` + +This prints the paths to the bundled docs, packaged skill, and `AGENTS.md`. +AI agents, use `mux docs path --json` to discover the Mux docs and skills + ### Download the binary Download the latest binary for your platform from [GitHub Releases](https://github.com/muxinc/cli/releases): diff --git a/docs/guides/core/ai-agents.mdx b/docs/guides/core/ai-agents.mdx new file mode 100644 index 0000000..db50184 --- /dev/null +++ b/docs/guides/core/ai-agents.mdx @@ -0,0 +1,194 @@ +--- +title: Getting started for AI agents +product: video +description: A reference for LLMs and AI agents writing code against the Mux API. +--- + +# Getting started for AI agents + +This guide is written for LLMs and AI coding agents. It contains everything you need to write working code against the Mux API on the first try. + +## CLI + +The [Mux CLI](/docs/integrations/mux-cli) (`@mux/cli`) lets you manage Mux resources directly from the terminal. It is useful for quick operations, scripting, automation, and CI/CD pipelines. + +Install: + +```bash +npm install -g @mux/cli # global install +# or run directly without installing: +npx @mux/cli +# or install via Homebrew: +brew install muxinc/tap/mux +``` + +After installing, authenticate with `mux login` or set `MUX_TOKEN_ID` and `MUX_TOKEN_SECRET` environment variables. Always pass `--agent` to optimize output for AI agents (includes JSON output). + +Common commands: + +| Command | What it does | +| :--- | :--- | +| `mux assets create --input-url URL` | Create an asset from a URL | +| `mux assets create --file video.mp4` | Upload a local file | +| `mux assets list` | List assets | +| `mux assets get ${ASSET_ID}` | Get asset details | +| `mux assets delete ${ASSET_ID}` | Delete an asset | +| `mux live-streams create` | Create a live stream | +| `mux uploads create` | Create a direct upload URL | +| `mux sign ${PLAYBACK_ID}` | Generate a signed playback URL | + +Use `--wait` to block until an asset is ready: + +```bash +mux assets create --input-url "https://example.com/video.mp4" --wait --agent +``` + +For the full list of commands, see the [CLI documentation](/docs/integrations/mux-cli). + +## Common resources + +- [Authentication and making API requests](/docs/core/make-api-requests) — how to authenticate with the Mux API +- [Rate limits](/docs/core/make-api-requests#api-rate-limits) — API rate limit details +- [Full API reference](/docs/api-reference/video) — complete endpoint documentation +- [Webhooks guide](/docs/core/listen-for-webhooks) — listen for events like `video.asset.ready` +- [Mux Player guide](/docs/guides/play-your-videos) — the easiest way to play Mux videos +- [Direct uploads guide](/docs/guides/upload-files-directly) — let users upload video files from the browser +- [Stream video files guide](/docs/core/stream-video-files) — ingest a video, wait for it to be ready, get a playback URL +- [Live streaming guide](/docs/guides/start-live-streaming) — set up live streams +- [Secure video playback](/docs/guides/secure-video-playback) — signed playback for private content +- [Static MP4 renditions guide](/docs/guides/enable-static-mp4-renditions) — downloadable MP4/M4A files +- [MCP Server](/docs/integrations/mcp-server) — use Mux from AI tools that support MCP +- [Docs for LLMs](/docs/core/llms-txt) — machine-readable documentation bundles +- [OpenAPI spec](https://www.mux.com/api-spec.json) — full API specification + +## Instructions for your human + +Before you can make API calls, your user needs to set up a Mux account and provide you with API credentials. + +1. **Sign up for Mux** at the [Mux dashboard](https://dashboard.mux.com/signup). No credit card is required. The free tier includes enough usage to develop and test. +2. **Go to the [Mux dashboard](https://dashboard.mux.com)**, then navigate to **Settings → Access Tokens**. +3. **Create an API access token** with all permissions — **Mux Video**, **Mux Data**, and **System** — with read and write access. +4. **Save the Access Token ID and Access Token Secret.** The secret is only shown once. + +Store the credentials as environment variables: + +```bash +MUX_TOKEN_ID=your-token-id +MUX_TOKEN_SECRET=your-token-secret +``` + +For details on how authentication works, see the [make API requests guide](/docs/core/make-api-requests). + + + Never expose API credentials in client-side code. All Mux API calls must be made from a server. + + +## Two main ways to use Mux + +### Website embed + +This is for when you have a small, constrained number of videos — a hero video or background video on your homepage, a demo reel, or a few dozen videos across your site. The key characteristic is that the set of videos doesn't change often and is manageable enough to hardcode. + +In this case, you can hardcode playback IDs directly in your code or extract them into a JSON config file with metadata: + +```json +{ + "videos": [ + { "title": "Hero Video", "playbackId": "TXjw00EgPBPS6acv7gBUEJ14PEr5XNWOe" }, + { "title": "Product Demo", "playbackId": "a4nOgR00sKz6cMWLeM5skT8ePBn7U6gC5" } + ] +} +``` + +Then use [Mux Player](/docs/guides/play-your-videos) to embed each video: + +```jsx +import MuxPlayer from '@mux/mux-player-react'; + + +``` + +To create your assets and get playback IDs, use the CLI (`mux assets create --input-url URL --wait --json`) or the [stream video files guide](/docs/core/stream-video-files). + +### User uploaded + +This is for when videos are uploaded dynamically as part of your application. Common scenarios include: + +- **Admin-managed content** — an admin area where authorized users upload and manage videos (e.g., a course platform, media library, or CMS) +- **User-generated content (UGC)** — end users upload their own videos (e.g., a social platform, portfolio site, or community forum) +- **Programmatic ingestion** — videos are created automatically from external sources or pipelines + +For these use cases, you will need to: + +1. **Accept uploads** — use [direct uploads](/docs/guides/upload-files-directly) to let users upload video files from the browser, or create assets server-side from URLs using the [stream video files guide](/docs/core/stream-video-files) +2. **Listen for events** — use [webhooks](/docs/core/listen-for-webhooks) to know when a video is ready for playback (`video.asset.ready`), when it errors, or when it's deleted +3. **Persist video data** — store asset IDs, playback IDs, status, and metadata in your database (see [Persisting Mux data](#persisting-mux-data) below) +4. **Play videos** — use [Mux Player](/docs/guides/play-your-videos) with the stored playback ID + +## SDKs + +Official server-side SDKs: + +| Language | Package | Docs | +| :--- | :--- | :--- | +| Node.js | `@mux/mux-node` | [Guide](/docs/integrations/mux-node-sdk) | +| Python | `mux_python` | [Guide](/docs/integrations/mux-python-sdk) | +| Ruby | `mux_ruby` | [Guide](/docs/integrations/mux-ruby-sdk) | +| PHP | `mux-php` | [Guide](/docs/integrations/mux-php-sdk) | +| Go | `mux-go` | [GitHub](https://github.com/muxinc/mux-go) | +| Java | `com.mux:mux-sdk-java` | [Guide](/docs/integrations/mux-java-sdk) | +| C# | `Mux.Csharp.Sdk` | [Guide](/docs/integrations/mux-csharp-sdk) | +| Elixir | `mux` | [Guide](/docs/integrations/mux-elixir-sdk) | + +## Sensible defaults + +Unless the user specifies otherwise, use these values: + +| Parameter | Default to use | Notes | +| :--- | :--- | :--- | +| `playback_policy` | `["public"]` | Use `"signed"` only if the user needs secure/private playback | +| `video_quality` | `"basic"` | No encoding costs and great for most use cases. Use `"plus"` if the user needs higher quality encoding | +| `static_renditions` | Do not set | Only set if the user explicitly needs downloadable MP4/M4A files. See the [static renditions guide](/docs/guides/enable-static-mp4-renditions) | +| `max_resolution_tier` | Do not set | Defaults to `1080p`. Set to `"2160p"` only if the user requests 4K | + +### Cross-references for defaults + +- `playback_policy`: `"public"` allows open access. Use `"signed"` for [secure video playback](/docs/guides/secure-video-playback) with [signed JWTs](/docs/guides/signing-jwts). +- `video_quality`: See [pricing](/docs/pricing/video) for the difference between `"basic"` and `"plus"`. +- `static_renditions`: Replaces the deprecated `mp4_support` parameter. Use `[{ "resolution": "highest" }]` for an MP4 download or `[{ "resolution": "audio-only" }]` for an M4A file. See the [static renditions guide](/docs/guides/enable-static-mp4-renditions) for all options. + +## Persisting Mux data + +After creating an asset, save the relevant data into your database or persistence layer. At minimum, store the **asset ID** and **playback ID**. You will also want to save metadata as it becomes available: `status`, `duration`, `aspect_ratio`, `resolution_tier`, and any `static_renditions` information. + +For **simple integrations** with a fixed set of videos — hero videos, background videos, demo reels on a marketing site — you can hardcode playback IDs in a JSON file or config object. These rarely change and don't need a database. + +For **production applications** where users upload videos, videos are created programmatically, or the video catalog changes over time, always persist asset data in a database. Use [webhooks](/docs/core/listen-for-webhooks) to keep your database in sync — listen for `video.asset.ready` to update status, and `video.asset.deleted` to clean up records. + +## IDs reference + +| ID type | What it's for | +| :--- | :--- | +| Asset ID | Managing the asset (get, update, delete) via `api.mux.com` | +| Playback ID | Streaming the video via `stream.mux.com` | +| Upload ID | Tracking direct upload status | +| Stream Key | Broadcasting to a live stream (keep secret) | +| Live Stream ID | Managing the live stream via `api.mux.com` | + +## Common mistakes + +**Do NOT confuse Asset IDs with Playback IDs.** Asset IDs are for API operations (`api.mux.com`). Playback IDs are for streaming (`stream.mux.com`). They are different strings. + +**Do NOT use the playback URL before the asset is ready.** Always check `status === "ready"` first. A playback URL for a `preparing` asset will not work. + +**Do NOT construct playback URLs with the Asset ID.** The correct URL is `https://stream.mux.com/{PLAYBACK_ID}.m3u8`, not `https://stream.mux.com/{ASSET_ID}.m3u8`. + +**Do NOT expose API keys in client-side code.** API credentials (Token ID and Token Secret) must never be included in frontend JavaScript, mobile apps, or any code that runs on the user's device. All Mux API requests must be made from a trusted server. + +**Do NOT expose stream keys in client-side code.** Stream keys allow anyone to broadcast to your live stream. Keep them server-side only. + +**Do NOT hardcode playback URLs.** Always construct them from the playback ID returned by the API. + +**Do NOT poll more than once per second.** The API has rate limits. Poll every 2 seconds for asset status. + +**Do NOT use `POST` endpoints at high volume without backoff.** POST requests are rate limited to ~1 request per second sustained. GET requests allow ~5 per second. diff --git a/docs/guides/core/content-security-policy.mdx b/docs/guides/core/content-security-policy.mdx new file mode 100644 index 0000000..b1fdb3d --- /dev/null +++ b/docs/guides/core/content-security-policy.mdx @@ -0,0 +1,163 @@ +--- +title: Content Security Policy for Mux +product: system +description: Learn how to configure Content Security Policy (CSP) to work with Mux Video and Data services. +steps: + - title: Understanding CSP with Mux + description: Learn what Content Security Policy is and why it matters for Mux integrations. + topic: security + - title: Basic CSP configuration + description: Get started with a simple CSP that covers all Mux services. + topic: configure + - title: Granular CSP configuration + description: Use specific directives for more restrictive security policies. + topic: configure + - title: Upload and media handling + description: Configure CSP to support file uploads and media processing. + topic: configure + - title: Product-specific requirements + description: Understand CSP requirements for specific Mux features. + topic: configure +--- + + + +Content Security Policy (CSP) is a security feature that helps protect your web application from cross-site scripting (XSS) attacks and other code injection attacks. CSP works by restricting the resources (such as scripts, stylesheets, images, and network connections) that a web page can load. + +When integrating Mux Video and Mux Data into your application, you'll need to configure your CSP to allow connections to Mux services. This guide will help you set up the appropriate CSP directives to ensure your Mux integration works securely. + + + If you're new to Content Security Policy, we recommend reading [Google's CSP guide](https://developers.google.com/web/fundamentals/security/csp) for a comprehensive introduction to CSP concepts and implementation. + + + + +For most applications, the simplest approach is to use a basic CSP that allows all Mux services. This configuration ensures compatibility with all current and future Mux features: + +``` +Content-Security-Policy: default-src 'self' *.mux.com *.litix.io storage.googleapis.com +``` + +This CSP directive allows your application to: +- Load resources from your own domain (`'self'`) +- Connect to all Mux Video services (`*.mux.com`) +- Connect to all Mux Data services (`*.litix.io`) +- Connect to Google Cloud Storage (`storage.googleapis.com`) -- this is needed for [Direct Uploads](/docs/guides/upload-files-directly) + +The wildcard approach for `mux.com` and `litix.io` is recommended because Mux utilizes multiple CDNs and subdomains to provide optimal performance globally. These hostnames may change without notice as we optimize our infrastructure. + + + +If your security requirements call for a more restrictive CSP, you can use specific directives instead of the broad `default-src` approach. Here's a granular configuration that covers all Mux functionality: + +``` +Content-Security-Policy: + connect-src 'self' https://*.mux.com https://*.litix.io https://storage.googleapis.com; + media-src 'self' blob: https://*.mux.com; + img-src 'self' https://image.mux.com https://*.litix.io; + script-src 'self' https://src.litix.io; + worker-src 'self' blob: +``` + + + The above configuration must be merged with your existing CSP directives. Each directive should combine values from both your current policy and the Mux requirements. + + + + +If your application uploads media files to Mux via [Direct Uploads](/docs/guides/upload-files-directly), you'll need additional CSP directives to handle binary data and file uploads: + +``` +Content-Security-Policy: + connect-src 'self' https://*.mux.com https://*.litix.io https://storage.googleapis.com; + media-src 'self' blob: https://*.mux.com; + img-src 'self' https://image.mux.com https://*.litix.io; + script-src 'self' https://src.litix.io; + worker-src 'self' blob:; + form-action 'self' https://*.mux.com https://storage.googleapis.com +``` + +The key additions for upload functionality are: + +| Directive | Purpose | +| :-------- | :------ | +| `https://storage.googleapis.com` in `connect-src` | Allows uploads to Google Cloud Storage endpoints used by Mux | +| `form-action` directive | Permits form submissions and PUT/POST requests to upload endpoints | +| `blob:` in `media-src` and `worker-src` | Enables handling of binary file data during upload processing | + + + +Different Mux features have specific CSP requirements. Here's what you need for each: + +### Mux Video Playback + +For video playback functionality, you **must** include: + +``` +connect-src https://*.mux.com; +media-src blob: https://*.mux.com; +worker-src blob: +``` + +This is required because: +- HLS manifests and video segments are delivered via `https://stream.mux.com` and other `*.mux.com` subdomains +- Video players use web workers and blob URLs for optimal performance +- Mux uses multiple CDNs with different hostnames for global performance + +### Video Thumbnails and Storyboards + +If you're displaying video thumbnails or timeline hover previews, include: + +``` +img-src https://image.mux.com; +connect-src https://image.mux.com +``` + +The `connect-src` directive is needed for dynamic thumbnail loading in timeline hover previews, while `img-src` covers standard image embedding. + +### Mux Data Integration + +For Mux Data analytics, you **must** allow: + +``` +connect-src https://*.litix.io; +img-src https://*.litix.io +``` + +This covers: +- Data collection endpoints across multiple subdomains +- Fallback beacon loading through image tags +- Various monitoring and analytics endpoints + + + For tighter security, you can replace `https://*.litix.io` with `https://img.litix.io` and `https://.litix.io` where `` is your Mux environment key. However, the wildcard approach is recommended for maximum compatibility. + + +### Hosted Mux Data Integrations + +If you're loading pre-built Mux Data integrations from our hosted domain (rather than installing via NPM), add: + +``` +script-src https://src.litix.io +``` + +This is not required if you bundle the Mux Data SDK directly into your application code. + +### Complete Example + +Here's a complete CSP that supports all Mux features including uploads: + +``` +Content-Security-Policy: + default-src 'self'; + connect-src 'self' https://*.mux.com https://*.litix.io https://storage.googleapis.com; + media-src 'self' blob: https://*.mux.com; + img-src 'self' https://image.mux.com https://*.litix.io; + script-src 'self' https://src.litix.io; + worker-src 'self' blob:; + form-action 'self' https://*.mux.com https://storage.googleapis.com +``` + + + After implementing your CSP, test all Mux functionality in your application including video playback, uploads, thumbnails, and analytics to ensure everything works as expected. + diff --git a/docs/guides/core/listen-for-webhooks.mdx b/docs/guides/core/listen-for-webhooks.mdx new file mode 100644 index 0000000..d3f952d --- /dev/null +++ b/docs/guides/core/listen-for-webhooks.mdx @@ -0,0 +1,238 @@ +--- +title: Listen for webhooks +product: system +description: Learn how to listen for webhooks from Mux. +--- + +Mux uses [webhooks](https://webhooks.fyi) to let your application know when things happen asynchronously, outside of an API request cycle. For example, you may want to update something on your end when an asset transitions its status from `processing` to `ready`, or when a live stream starts or ends. When these asynchronous events happen, we'll make a POST request to the address you give us and you can do whatever you need with it on your end. + +After a webhook is configured for an environment, notifications will be sent for all events for that environment. + + + Note that webhooks are scoped per *environment*. If you have configured webhooks and you are not seeing them show up, double check that the webhook is correctly configured for the environment you are working in. + + +If Mux doesn't receive a `2xx` response from your system, we will continue to try the message for the next 24 hours (with an increasing delay between attempts). + + + Mux makes an effort to deliver each message successfully once, but in certain + situations duplicate webhook messages may be sent even if your service + responds with a 2xx response code. Please ensure that your webhook handling + mechanism treats duplicated event delivery appropriately. + + +# Webhooks vs. polling + +Please use webhooks to track asset status rather than polling the Asset API. Webhooks are much more efficient for both you and Mux, and we rate limit GET requests to the `/assets` endpoint, which means polling the `/assets` API doesn't scale. + +# Handling webhooks locally + +A common gotcha for anyone new to working with webhooks is figuring out how to receive them when working in a local environment. Since your application runs on a local URL like `http://localhost:3000`, Mux can't reach it directly to deliver webhook events. + +The recommended approach is to use the [Mux CLI](/docs/integrations/mux-cli) to listen for events and forward them to your local server. + +## Using the Mux CLI + +The Mux CLI can connect to Mux's event stream and forward webhook events to your local development server in real-time. + + + CLI webhook forwarding is for **local development only** and provides **no delivery guarantees**. In production, you must configure a webhook endpoint in the [Mux Dashboard](https://dashboard.mux.com) that points to your server's webhook URL. + + +### Listen and forward events + +```bash +mux webhooks listen --forward-to http://localhost:3000/api/webhooks/mux +``` + +When using `--forward-to`, the CLI displays a webhook signing secret and signs each forwarded request with a `mux-signature` header. Set `MUX_WEBHOOK_SECRET` in your app's environment to [verify these signatures](/docs/core/verify-webhook-signatures): + +```typescript +const event = mux.webhooks.unwrap(body, headers, process.env.MUX_WEBHOOK_SECRET); +``` + +The signing secret is unique per environment and persisted between sessions, so you only need to configure it once. + +### Replay past events + +The CLI stores the last 100 events received during `listen` sessions. You can replay them to re-test your webhook handler without creating new resources: + +```bash +# List stored events +mux webhooks events list + +# Replay a specific event +mux webhooks events replay --forward-to http://localhost:3000/api/webhooks/mux + +# Replay all stored events +mux webhooks events replay --all --forward-to http://localhost:3000/api/webhooks/mux +``` + +### Trigger synthetic events + +You can also send synthetic webhook events to your local server for testing, without making any API calls or creating real resources: + +```bash +mux webhooks trigger video.asset.ready --forward-to http://localhost:3000/api/webhooks/mux +``` + +Run `mux webhooks trigger ` to see all supported event types. + +For the full list of webhook CLI commands, see the [Mux CLI docs](/docs/integrations/mux-cli#webhook-forwarding). + +## Alternative: using ngrok + +If you prefer, you can also use a tunneling tool like [ngrok](https://ngrok.com/docs/integrations/webhooks/mux-webhooks) to expose your local server to the internet and receive webhooks directly from Mux. + +```bash +ngrok http 3000 +``` + +This gives you a public URL (e.g. `https://abc123.ngrok.io`) that you can configure as a webhook endpoint in the [Mux Dashboard](https://dashboard.mux.com). Your full webhook URL would be something like `https://abc123.ngrok.io/api/webhooks/mux`. + + + You'll need to create an ngrok account (a free account works for most testing purposes). See [ngrok's Mux integration docs](https://ngrok.com/docs/integrations/webhooks/mux-webhooks) for more details. + + +# Configuring endpoints + +Webhook endpoints are configured in the Mux dashboard under "Settings." + + + +Enter a URL from your application that Mux will call for event notifications. + + + +# Receiving events + +Mux will submit a POST request to the configured URL, which your application can treat the same as any other route. Your event handler can do things like update the state of the specified asset in your database, or trigger other work. + +Note that a single request attempt will timeout after 5 seconds, after which the attempt is considered failed and will be reattempted. If you expect this will be a problem in your workflow, consider doing the work in an asynchronous task so you can respond to the event immediately. + +For more details on the Webhook event object definition, see [the example response](#example-response). + +# Example response + +```json +{ + "type": "video.asset.ready", + "object": { + "type": "asset", + "id": "0201p02fGKPE7MrbC269XRD7LpcHhrmbu0002" + }, + "id": "3a56ac3d-33da-4366-855b-f592d898409d", + "environment": { + "name": "Demo pages", + "id": "j0863n" + }, + "data": { + "tracks": [ + { + "type": "video", + "max_width": 1280, + "max_height": 544, + "max_frame_rate": 23.976, + "id": "0201p02fGKPE7MrbC269XRD7LpcHhrmbu0002", + "duration": 153.361542 + }, + { + "type": "audio", + "max_channels": 2, + "max_channel_layout": "stereo", + "id": "FzB95vBizv02bYNqO5QVzNWRrVo5SnQju", + "duration": 153.361497 + } + ], + "status": "ready", + "max_stored_resolution": "SD", + "max_stored_frame_rate": 23.976, + "id": "0201p02fGKPE7MrbC269XRD7LpcHhrmbu0002", + "duration": 153.361542, + "created_at": "2018-02-15T01:04:45.000Z", + "aspect_ratio": "40:17" + }, + "created_at": "2018-02-15T01:04:45.000Z", + "accessor_source": null, + "accessor": null, + "request_id": null +} +``` + +# Types of Events + +## Asset Events + + +| Event | Description | +|-------|-------------| +| `video.asset.created` | Asset has been created | +| `video.asset.ready` | Asset is ready for playback. You can now use the asset's `playback_id` to successfully start streaming this asset. | +| `video.asset.errored` | Asset has encountered an error. Use this to notify your server about assets with errors. Asset errors can happen for a number of reasons, most commonly an input URL that Mux is unable to download or a file that is not a valid video file. | +| `video.asset.updated` | Asset has been updated. Use this to make sure your server is notified about changes to assets. | +| `video.asset.deleted` | Asset has been deleted. Use this so that your server knows when an asset has been deleted, at which point it will no longer be playable. | +| `video.asset.live_stream_completed` | The live stream for this asset has completed. Every time a live stream starts and ends a new asset gets created and this event fires. | +| `video.asset.static_rendition.created` | A new static rendition for this asset has been created. Static renditions are streamable mp4 files that are most commonly used for allowing users to download files for offline viewing. | +| `video.asset.static_rendition.ready` | A static rendition for this asset is ready. Static renditions are streamable mp4 files that are most commonly used for allowing users to download files for offline viewing. | +| `video.asset.static_rendition.skipped` | A static rendition for this asset was skipped, due to the source not being suitable for the requested static rendition. Static renditions are streamable mp4 files that are most commonly used for allowing users to download files for offline viewing. | +| `video.asset.static_rendition.deleted` | A static rendition for this asset was deleted. The static renditions (mp4 files) for this asset will no longer be available. | +| `video.asset.static_rendition.errored` | A static rendition for this asset errored. This indicates that there was some error when creating a static rendition (mp4s) of your asset. This should be rare and if you see it unexpectedly please open a support ticket. | +| `video.asset.master.ready` | Master access for this asset is ready. Master access is used when downloading an asset for purposes of editing or post-production work. The master access file is not intended to be streamed or downloaded by end-users. | +| `video.asset.master.preparing` | Master access for this asset is being prepared. After requesting master access you will get this webhook while it is being prepared. | +| `video.asset.master.deleted` | Master access for this asset has been deleted. Master access for this asset has been removed. You will no longer be able to download the master file. If you want it again you should re-request it. | +| `video.asset.master.errored` | Master access for this asset has encountered an error. This indicates that there was some error when creating master access for this asset. This should be rare and if you see it unexpectedly please open a support ticket. | +| `video.asset.track.created` | A new track for this asset has been created, for example a subtitle text track. | +| `video.asset.track.ready` | A track for this asset is ready. In the example of a subtitle text track the text track will now be delivered with your HLS stream. | +| `video.asset.track.errored` | A track for this asset has encountered an error. There was some error preparing this track. Most commonly this could be a text track file that Mux was unable to download for processing. | +| `video.asset.track.deleted` | A track for this asset has been deleted. | +| `video.asset.warning` | This event fires when Mux has encountered a non-fatal issue with the recorded asset of the live stream. At this time, the event is only fired when Mux is unable to download a slate image from the URL set as `reconnect_slate_url` parameter value. More details on this event is available [here](/docs/guides/handle-live-stream-disconnects#reconnect-window-and-slates). | + + +## Upload Events + + +| Event | Description | +|-------|-------------| +| `video.upload.asset_created` | An asset has been created from this upload. This is useful to know what a user of your application has finished uploading a file using the URL created by a [Direct Upload](/docs/guides/upload-files-directly). | +| `video.upload.cancelled` | Upload has been canceled. This event fires after hitting the cancel direct upload API. | +| `video.upload.created` | Upload has been created. This event fires after creating a direct upload. | +| `video.upload.errored` | Upload has encountered an error. This event fires when the asset created by the direct upload fails. Most commonly this happens when an end-user uploads a non-video file. | + + +## Live Stream Events + + +| Event | Description | +|-------|-------------| +| `video.live_stream.created` | A new live stream has been created. Broadcasters with a `stream_key` can start sending encoder feed to this live stream. | +| `video.live_stream.connected` | An encoder has successfully connected to this live stream. | +| `video.live_stream.recording` | Recording on this live stream has started. Mux has successfully processed the first frames from the encoder. If you show a _red dot_ icon in your UI, this would be a good time to show it. | +| `video.live_stream.active` | This live stream is now "active". The live streams `playback_id` OR the `playback_id` associated with this live stream's asset can be used right now to created HLS URLs (`https://stream.mux.com/{PLAYBACK_ID}.m3u8` and start streaming in your player. Note that before the live stream is `"active"`, trying to stream the HLS URL will result in HTTP `412` errors. | +| `video.live_stream.disconnected` | An encoder has disconnected from this live stream. Note that while disconnected the live stream is still `status: "active"`. | +| `video.live_stream.idle` | The `reconnect_window` for this live stream has elapsed. The live stream `status` will now transition to `"idle"`. | +| `video.live_stream.updated` | This live stream has been updated. For example, after resetting the live stream's stream key. | +| `video.live_stream.enabled` | This live stream has been enabled. This event fires after enable live stream API. | +| `video.live_stream.disabled` | This live stream has been disabled. This event fires after disable live stream API. Disabled live streams will no longer accept new RTMP connections. | +| `video.live_stream.deleted` | This live stream has been deleted. This event fires after delete live stream API API. | +| `video.live_stream.warning` | This live stream event fires when Mux has encountered a non-fatal issue. There is no disruption to the live stream ingest and playback. At this time, the event is only fired when Mux is unable to download an image from the URL set as `reconnect_slate_url` parameter value. More details on this event is available [here](/docs/guides/handle-live-stream-disconnects#reconnect-window-and-slates). | + + +## Simulcast Target Events + +These simulcast target events are useful when creating a UI that shows your users the status of their configured 3rd party endpoints. These events are handy when you want to build a UI that shows the state of each simulcast target and keep track of the state changes as they happen. + + +| Event | Description | +|-------|-------------| +| `video.live_stream.simulcast_target.created` | A new simulcast target has been created for this live stream. | +| `video.live_stream.simulcast_target.idle` | When the parent live stream is `"disconnected"`, all simulcast targets will have be `"idle"`. | +| `video.live_stream.simulcast_target.starting` | When the parent live stream fires `"connected"` then the simulcast targets transition to `"starting"`. | +| `video.live_stream.simulcast_target.broadcasting` | This fires when Mux has successfully connected to the simulcast target and has begun pushing content to that third party. | +| `video.live_stream.simulcast_target.errored` | This fires when Mux has encountered an error either while attempting to connect to the third party streaming service or while broadcasting. Mux will try to re-establish the connection and if it does successfully the simulcast target will transition back to `"broadcasting"`. | +| `video.live_stream.simulcast_target.updated` | This simulcast target has been updated. | +| `video.live_stream.simulcast_target.deleted` | This simulcast target has been deleted. | + + +# Webhook specification + +A machine-readable specification of all Mux webhook events is available at [`https://www.mux.com/webhook-spec.json`](https://www.mux.com/webhook-spec.json). You can use this to generate types, validate payloads, or integrate with any tooling that supports OpenAPI-style schemas. diff --git a/docs/guides/core/make-api-requests.mdx b/docs/guides/core/make-api-requests.mdx new file mode 100644 index 0000000..2ff0e74 --- /dev/null +++ b/docs/guides/core/make-api-requests.mdx @@ -0,0 +1,260 @@ +--- +title: Make API requests +product: system +description: Learn how to work with Mux's API through HTTP requests. +videoWalkthrough: + src: EcHgOK9coz5K4rjSwOkoE7Y7O01201YMIC200RI6lNxnhs + thumbnailTime: 25 + created: '2021-06-09T20:36:00Z' +steps: + - title: HTTP basic auth + description: Mux uses HTTP basic auth with your access token to authenticate requests. + topic: authentication + - title: Access token permissions + description: Configure the necessary permissions for your access tokens. + topic: authorization + - title: CORS and client side API requests + description: Mux APIs are meant to be requested from a trusted server environment, not from clients directly. + topic: API + - title: Using Mux with serverless functions + description: Serverless functions are a great way to make API calls to Mux. + topic: configure + - title: API pagination + description: Learn the best way to paginate through large API responses. + - title: API rate limits + description: Understand rate limits when accessing the Mux API. +--- + + + +| Term | Description | +| :----------- | :----------------------------------------------------- | +| Token ID | access token ID, the "username" in HTTP basic auth | +| Token secret | access token secret, the "password" in HTTP basic auth | + +Every request to the API is authenticated via an [Access Token](https://dashboard.mux.com/settings/access-tokens), which includes the ID and the secret key. You can think of the Access Token’s ID as its username and secret as the password. Mux only stores a hash of the secret, not the secret itself. If you lose the secret key for your access token, Mux cannot recover it; you will have to create a new Access Token. If the secret key for an Access Token is leaked you should revoke that Access Token on the settings page: https://dashboard.mux.com/settings/access-tokens. + +Note that in order to access the settings page for access tokens you must be an admin on the Mux organization. + +API requests are authenticated via HTTP Basic Auth, where the username is the Access Token ID, and the password is the Access Token secret key. Due to the use of Basic Authentication and because doing so is just a Really Good Idea™, all API requests must made via HTTPS (to `https://api.mux.com`). + + + Access tokens are scoped to an environment, for example: a development token cannot be used in requests to production. Verify the intended environment when creating an access token. + + + +This is an example of authenticating a request with cURL, which automatically handles HTTP Basic Auth. If you run this request yourself it will not work, you should replace the Access Token ID (`44c819de-4add-4c9f-b2e9-384a0a71bede`) and secret (`INKxCoZ+cX6l1yrR6vqzYHVaeFEcqvZShznWM1U/No8KsV7h6Jxu1XXuTUQ91sdiGONK3H7NE7H`) in this example with your own credentials. + +```shell +curl https://api.mux.com/video/v1/assets \ + -H "Content-Type: application/json" \ + -X POST \ + -d '{ "inputs": [{ "url": "https://muxed.s3.amazonaws.com/leds.mp4" }], "playback_policies": ["public"], "video_quality": "basic" }' \ + -u 44c819de-4add-4c9f-b2e9-384a0a71bede:INKxCoZ+cX6l1yrR6vqzYHVaeFEcqvZShznWM1U/No8KsV7h6Jxu1XXuTUQ91sdiGONK3H7NE7H +``` + +HTTP basic auth works by base64 encoding the username and password in an `Authorization` header on the request. + +Specifically, the header looks something like this: + +```bash +'Authorization': 'Basic base64(MUX_TOKEN_ID:MUX_TOKEN_SECRET)' +``` + +1. The access token ID and secret are concatenated with a `:` and the string is base64 encoded. +1. The value for the `Authorization` header is the string `Basic` plus a space ` ` followed by the base64 encoded result from Step 1. + +In the cURL example above, the cURL library is taking care of the base64 encoding and setting the header value internally. The HTTP library you use in your server-side language will probably have something similar for handling basic auth. You should be able to pass in the `username` (Access Token ID) and `password` (Access Token secret) and the library will handle the details of formatting the header. + + + + + If you're just getting started with Mux Video, use Read and Write. + + + +If you are creating or modifying resources with Mux Video then you need **Read** and **Write** permissions. This includes things like: + +- Creating new assets +- Creating direct uploads +- Creating new live streams + +If you need to create signed tokens for secure video playback, your access token needs **System** write permissions. Learn more about [secure video playback](/docs/guides/secure-video-playback) and signing keys. + +Mux Data only requires **Write** permissions if you need to create Annotations via API. Annotations created in the Dashboard do not require **Write** permissions. + +Mux access token permissions + +If your code is not creating anything and only doing `GET` requests then you can restrict the access token to **Read** only. + + + +Mux API endpoints do not have CORS headers, which means if you try to call the Mux API from the browser you will get an error: + + + request has been blocked by CORS policy: Response to preflight request doesn't pass access control check: No 'Access-Control-Allow-Origin' header is present on the requested resource. + + + +This is expected. Although making API requests directly from the browser or your mobile app would be convenient, it leaves a massive security hole in your application by the fact that your client side code would contain your API keys. Anyone who accesses your application would have the ability to steal your API credentials and make requests to Mux on your behalf. An attacker would be able to gain full control of your Mux account. + +Mux API Credentials should never be stored in a client application. All Mux API calls should be made from a trusted server. + +Instead of trying to make API requests from the client, the flow that your application should follow is: + +1. Client makes a request to your server +1. Your server makes an authenticated API request to Mux +1. Your server saves whatever it needs in your database +1. Your server responds to the client with only the information that the client needs. For example, with live streaming that's the stream key for a specific stream, for uploads that's just the direct upload URL + + + +Serverless functions are a great way to add pieces of secure server-side code to your client heavy application. Examples of services that help you run serverless functions are: + +- [AWS Lambda](https://aws.amazon.com/lambda/) +- [Firebase Cloud Functions](https://firebase.google.com/docs/functions) +- [Cloudflare Workers](https://workers.cloudflare.com/) +- [Vercel Functions](https://vercel.com/docs/functions) +- [Netlify Functions](https://docs.netlify.com/functions/overview/) + +The basic idea behind serverless functions is that you can write a bit of server code and deploy it to run on these platforms. Your client application can make requests to these endpoints to perform specific actions. Below is an example from [with-mux-video](https://github.com/vercel/next.js/blob/canary/examples/with-mux-video/pages/api/upload.js) of a serverless function endpoint that makes an API call to create a Mux Direct Upload. + +```js +// pages/api/upload.js +// see: https://github.com/vercel/next.js/tree/canary/examples/with-mux-video +import Mux from '@mux/mux-node'; + +const mux = new Mux(); + +export default async function uploadHandler(req, res) { + const { method } = req; + + switch (method) { + case 'POST': + try { + const upload = await mux.video.uploads.create({ + new_asset_settings: { playback_policy: ['public'], video_quality: 'basic' }, + cors_origin: '*', + }); + res.json({ + id: upload.id, + url: upload.url, + }); + } catch (e) { + console.error('Request error', e); + res.status(500).json({ error: 'Error creating upload' }); + } + break; + default: + res.setHeader('Allow', ['POST']); + res.status(405).end(`Method ${method} Not Allowed`); + } +} +``` + + +Our list endpoints (such as List Assets) do not return every single relevant record. +To offer everyone the best performance we limit the amount of records you can receive and offer pagination parameters to help you navigate through your list. + +### Page/limit pagination +Our most common pagination controls are `page` and `limit`. + +| Parameter | Default | Maximum | Description | +| :-------- | :------ | :---- | :--------------------------------------------------| +| `page` | `1` | None | The page number to return. The first page is `1`. | +| `limit` | `10` | `100` | The number of records to return per page. | + +If you have 100 assets and you want to get the first 10, you would make a request like this: + +```http +GET /video/v1/assets?page=1&limit=10 +``` + +And if you want to get the next 10, you would increment the page parameter from `1` to `2` and make a request like this: + +```http +GET /video/v1/assets?page=2&limit=10 +``` + +### Cursor pagination +In addition to `page`/`limit`, the List Assets endpoint also supports cursor pagination. +Cursor pagination is a more efficient and reliable way of paginating through very large collections. + + +Cursor pagination is only available on the List Assets endpoint, but we plan to add it to more endpoints in the future. If you want it added to any specific endpoints please [let us know!](/support) + + +When you make a request to the list assets endpoint we return a `next_cursor` value. + +```json +// GET /video/v1/assets +{ + "data": [ + { + "id": "asset_id", + "status": "ready", + ... + } + ], + "next_cursor": "eyJwYWdlX2xpbWl0IjoxMDAwLCJwYWdlX2NvdW50IjoxfQ" +} +``` + +Take that `next_cursor` value and make a new request to the list assets endpoint with the `cursor` parameter. + +```json +// GET /video/v1/assets?cursor=eyJwYWdlX2xpbWl0IjoxMDAwLCJwYWdlX2NvdW50IjoxfQ +{ + "data": [ + { + "id": "asset_id", + "status": "ready", + ... + } + ], + "next_cursor": null +} +``` + +If `next_cursor` is `null`, you've reached the end of your list. If `next_cursor` is not `null` you can use that value to get the next page, repeating this pattern until `next_cursor` is `null`. + + + + +Mux Video implements a simple set of rate limits. Rate limits are set per account (not per environment). These rate limits exist for two reasons: + +1. First, to protect you, or customers from runaway scripts or batch process - we don't want you to accidentally delete all your content, or run up a large bill if you're not expecting it. +1. Second, to ensure that there's always Mux infrastructure available when our customers need it, for example to start that critical live stream, or ingest that urgent video. + + +When the rate limit threshold is exceeded, the API will return a HTTP status code `429`. + + + +### Video API + +1. All Video API activities that include a `POST` request to `https://api.mux.com/video/` are rate limited to a sustained 1 request per second (RPS) with the ability to burst above this for short periods of time. This includes creating new Assets, Live Streams, and Uploads. + +1. All other request methods are limited to 5 sustained requests per second (RPS) with the ability to burst above this for short periods of time. This includes `GET`, `PUT`, `PATCH`, & `DELETE` verbs. Examples include (but not limited to) requests for retrieving an asset, updating mp4 support, & listing delivery usage. + +### Playback + +There are no limits as to the number of viewers that your streams can have, all we ask is that you let us know if you're planning an event expected to receive more than 100,000 concurrent live viewers. + +### Monitoring Data API + +Requests against the Monitoring Data APIs are rate limited to a sustained 1 request per second (RPS) with the ability to burst above this for short periods of time. + +### General Data API + +Requests against the all other General Data APIs are rate limited to a sustained 5 request per second (RPS) with the ability to burst above this for short periods of time. + +# OpenAPI specification + +The complete Mux API is described by an OpenAPI specification, available at [`https://www.mux.com/api-spec.json`](https://www.mux.com/api-spec.json). You can use this spec to generate API clients, import endpoints into tools like [Postman](/docs/core/postman), or integrate with any tooling that supports OpenAPI. diff --git a/docs/guides/core/mux-fundamentals.mdx b/docs/guides/core/mux-fundamentals.mdx new file mode 100644 index 0000000..ec896db --- /dev/null +++ b/docs/guides/core/mux-fundamentals.mdx @@ -0,0 +1,299 @@ +--- +title: Mux fundamentals +product: system +description: A reference guide covering the essential concepts, terminology, and components you need to understand when building with Mux. +--- + +Whether you're just getting started with Mux or need a quick refresher on how the pieces fit together, this guide covers the fundamental concepts you'll encounter when building video, audio, and live streaming applications. + +# Quick reference + +| Term | Description | +| :--- | :---------- | +| [**Organization**](#organizations) | The top-level account container. You can belong to multiple organizations, each with its own billing, team members, and environments. | +| [**Environment**](#environments) | A container within an organization for organizing your Mux resources (assets, live streams, API tokens, etc.). Each organization can have multiple environments. | +| [**Access Token**](#access-tokens) | A credential pair (Token ID + Token Secret) used to authenticate API requests. Scoped to a single environment. | +| [**Asset**](#assets) | A video or audio file that has been uploaded to Mux and processed for streaming playback. | +| [**Playback ID**](#playback-ids) | A unique identifier used to stream an asset or live stream to viewers. | +| [**Live Stream**](#live-streams) | A resource representing a live broadcast that can receive RTMP/SRT input and deliver to viewers. | +| [**Stream Key**](#live-streams) | A secret credential that allows a broadcaster to push video to a specific live stream. | +| [**Signing Key**](#signing-keys) | A public/private key pair used to create signed tokens (JWTs) for secure playback. | +| [**Webhook**](#webhooks) | An HTTP callback that Mux sends to your server when events occur (e.g., asset ready, live stream started). | + +# Organizations + +An **organization** is your top-level Mux account. It's the highest container in the Mux hierarchy and contains everything else: environments, team members, and billing settings. + +Key things to know about organizations: + +- **You can belong to multiple organizations.** This is useful if you work with different companies or clients, each with their own Mux account. +- **Each organization has its own billing.** Usage charges are tracked and billed per organization. +- **Team members are managed at the organization level.** You can invite collaborators and assign roles (Admin, Member) within each organization. +- **Organizations contain environments.** All your media resources live inside environments, which live inside organizations. + +You can switch between organizations and create new ones from the [Mux Dashboard](https://dashboard.mux.com/organizations). + +# Environments + +An **environment** is a container within an organization for organizing your Mux resources. Each environment has its own isolated set of assets, live streams, access tokens, signing keys, and webhooks. + +Common use cases for multiple environments: +- Separate **development** and **production** resources +- Isolate resources for different websites or domains (e.g., `site1.com`, `site2.com`) +- Organize by project or use case (e.g., CMS media, marketing site, customer uploads) +- Keep test data separate from production content + + +Resources are scoped to their environment. An access token created in Development cannot be used to manage assets in Production, and webhooks configured for one environment won't fire for events in another. + + +You can view and manage environments in the [Mux Dashboard](https://dashboard.mux.com/organizations). + +# Access Tokens + +**Access tokens** are credentials that authenticate your API requests to Mux. Each token consists of two parts: + +| Part | Description | +| :--- | :---------- | +| **Token ID** | The "username" portion of your credential. Safe to log (but not expose publicly). | +| **Token Secret** | The "password" portion. Keep this secure and never expose it in client-side code. | + + +Mux only stores a hash of your token secret. If you lose it, you'll need to create a new access token. + + + +Mux API requests must be made from a server, not from client-side code. The API does not support CORS, and exposing your credentials in a browser or mobile app is a security risk. + + +## Token permissions + +When creating an access token, you configure which permissions it has: + +| Permission | Use case | +| :--------- | :------- | +| **Mux Video Read** | Retrieve information about assets and live streams | +| **Mux Video Write** | Create, update, and delete assets and live streams | +| **Mux Data Read** | Access playback performance metrics | +| **Mux Data Write** | Create Data annotations | +| **System Read** | View signing keys and other system resources | +| **System Write** | Create and manage signing keys | + +For most use cases when getting started, you'll want **Mux Video Read** and **Write** permissions. + +You can create and manage access tokens in the [Mux Dashboard](https://dashboard.mux.com/settings/access-tokens). + +**Learn more:** [Make API requests](/docs/core/make-api-requests) | [Use an SDK](/docs/core/sdks) + +# Assets + +An **asset** is a video or audio file that has been ingested into Mux and processed for adaptive bitrate streaming. When you create an asset, Mux: + +1. Downloads the file from your provided URL (or receives it via [direct upload](/docs/guides/upload-files-directly)) +2. Transcodes it into multiple quality levels +3. Packages it for HLS streaming +4. Generates a unique **asset ID** + +```json +// Example asset response +{ + "data": { + "id": "01itgOBvgjAbES7Inwvu4kEBtsQ44HFL6", + "status": "ready", + "playback_ids": [ + { + "id": "TXjw00EgPBPS6acv7gBUEJ14PEr5XNWOe", + "policy": "public" + } + ], + "duration": 120.5, + "aspect_ratio": "16:9" + } +} +``` + +## Asset status lifecycle + +Assets progress through several statuses: + +| Status | Description | +| :----- | :---------- | +| `preparing` | Mux is downloading and processing the file | +| `ready` | The asset is ready for playback | +| `errored` | Something went wrong during processing | + +Rather than polling the API to check status, use [webhooks](/docs/core/listen-for-webhooks) to be notified when an asset is ready. + +**Learn more:** [Stream videos in five minutes](/docs/core/stream-video-files) | Assets API + +# Playback IDs + +A **playback ID** is what you use to actually stream content to viewers. While asset IDs are used to _manage_ your content (via `api.mux.com`), playback IDs are used to _stream_ your content (via `stream.mux.com`). + +``` +https://stream.mux.com/{PLAYBACK_ID}.m3u8 +``` + +## Playback policies + +Each playback ID has a policy that controls how it can be accessed: + +| Policy | Description | +| :----- | :---------- | +| `public` | Anyone with the URL can access the content | +| `signed` | Viewers need a valid JWT token to watch | + +An asset can have multiple playback IDs with different policies. This lets you, for example, have a public playback ID for trailers and a signed playback ID for the full content. + + +You can add and remove playback IDs without affecting the underlying asset. This is useful for revoking access without re-encoding your content. + + +**Learn more:** [Play your videos](/docs/guides/play-your-videos) | [Secure video playback](/docs/guides/secure-video-playback) + +# Live streams + +A **live stream** represents a live broadcast channel. Unlike assets (which are created from existing files), live streams receive real-time input and deliver it to viewers with low latency. + +## Key live stream components + +| Component | Description | +| :-------- | :---------- | +| **Stream Key** | A secret credential broadcasters use to connect their encoder to Mux | +| **RTMP URL** | The ingest endpoint (`rtmp://global-live.mux.com:5222/app`) | +| **SRT URL** | Alternative ingest endpoint for SRT protocol | +| **Playback ID** | Used to stream to viewers (same concept as asset playback IDs) | + + +Anyone with your stream key can broadcast to your live stream. Treat it like a password. + + +## Live stream lifecycle + +| Status | Description | +| :----- | :---------- | +| `idle` | No one is broadcasting; waiting for input | +| `active` | A broadcaster is connected and viewers can watch | +| `disabled` | The live stream has been disabled and won't accept connections | + +When a live stream ends, Mux automatically creates a new asset from the recording (if recording is enabled). + +**Learn more:** [Configure broadcast software](/docs/guides/configure-broadcast-software) | [Handle disconnections](/docs/guides/handle-live-stream-disconnects) | Live Streams API + +# Signing keys + +**Signing keys** are cryptographic key pairs used to generate JWTs (JSON Web Tokens) for [secure video playback](/docs/guides/secure-video-playback). When you have assets or live streams with `signed` playback policies, you need signing keys to create valid playback tokens. + +| Component | Description | +| :-------- | :---------- | +| **Key ID** | A unique identifier for the signing key | +| **Private Key** | Used by your server to sign JWTs. Keep this secret. | + +Your server uses the private key to create short-lived tokens that grant access to specific content. The token can include claims for: + +- **Expiration time** - When the token becomes invalid +- **Playback restrictions** - Additional rules like allowed domains + + +Signing keys and access tokens serve different purposes: +- **Access tokens** authenticate your server-to-Mux API requests +- **Signing keys** create tokens that authenticate viewer playback requests + + +You can create and manage signing keys in the [Mux Dashboard](https://dashboard.mux.com/settings/signing-keys). + +**Learn more:** [Secure video playback](/docs/guides/secure-video-playback) | Signing Keys API + +# Webhooks + +**Webhooks** are HTTP callbacks that Mux sends to your application when events occur. Instead of repeatedly polling the API to check if an asset is ready, you configure a webhook URL and Mux notifies you automatically. + +Common webhook events: + +| Event | Description | +| :---- | :---------- | +| `video.asset.ready` | An asset has finished processing and is ready for playback | +| `video.asset.errored` | An asset failed to process | +| `video.live_stream.active` | A live stream has started broadcasting | +| `video.live_stream.idle` | A live stream has stopped broadcasting | +| `video.upload.asset_created` | A direct upload has completed and created an asset | + + +Webhooks are configured per environment. Make sure your webhook is set up in the same environment where your resources are created. + + +**Learn more:** [Listen for webhooks](/docs/core/listen-for-webhooks) | [Verify webhook signatures](/docs/core/verify-webhook-signatures) + +# IDs at a glance + +Mux uses several different types of identifiers. Here's a quick reference: + +| ID Type | Format Example | Purpose | +| :------ | :------------- | :------ | +| **Organization ID** | `abc123` | Identify your organization | +| **Environment ID** | `j0863n` | Identify specific environments within an organization | +| **Asset ID** | `01itgOBvgj...` | Identify and manage assets via the API | +| **Playback ID** | `TXjw00EgPB...` | Stream content to viewers | +| **Live Stream ID** | `aA02skpHX...` | Identify and manage live streams via the API | +| **Upload ID** | `OA02dANZ...` | Track direct upload status | +| **Token ID** | `44c819de-4add-...` | Identify access tokens (part of API auth) | +| **Signing Key ID** | `JjPXgkqO...` | Identify signing keys for JWT creation | + +# SDKs + +Mux provides official SDKs for several languages that handle authentication and make it easier to work with the API: + +- [Node.js](/docs/integrations/mux-node-sdk) +- [Python](/docs/integrations/mux-python-sdk) +- [Ruby](/docs/integrations/mux-ruby-sdk) +- [PHP](/docs/integrations/mux-php-sdk) +- [Java](/docs/integrations/mux-java-sdk) +- [C# / .NET](/docs/integrations/mux-csharp-sdk) +- [Elixir](/docs/integrations/mux-elixir-sdk) + +For client-side playback, see [Mux Player](/docs/guides/mux-player-web) and the various player SDK guides. + +**Learn more:** [Use an SDK](/docs/core/sdks) + +# API and webhook specifications + +Mux publishes machine-readable specifications for both the API and webhook events: + +| Specification | URL | Description | +| :------------ | :-- | :---------- | +| **Combined spec** | [`mux.com/full-combined-spec.json`](https://www.mux.com/full-combined-spec.json) | All API endpoints and webhook events in one spec | +| **API spec** | [`mux.com/api-spec.json`](https://www.mux.com/api-spec.json) | Core API endpoints only | +| **Webhook spec** | [`mux.com/webhook-spec.json`](https://www.mux.com/webhook-spec.json) | Webhook event schemas only | +| **Image API spec** | [`mux.com/image-spec.json`](https://www.mux.com/image-spec.json) | Thumbnail, animated GIF, and storyboard endpoints | +| **Streaming API spec** | [`mux.com/stream-spec.json`](https://www.mux.com/stream-spec.json) | HLS and MP4 streaming playback endpoints | +| **Engagement Counts spec** | [`mux.com/stats-spec.json`](https://www.mux.com/stats-spec.json) | Real-time view and viewer count endpoints | + +These are useful for generating API clients, importing into tools like [Postman](/docs/core/postman), validating webhook payloads, or integrating with any tooling that supports OpenAPI. Use the combined spec if you want everything in one file. + +# What's next? + +Now that you understand the fundamentals, here are some recommended next steps: + + + + + + diff --git a/docs/guides/core/postman.mdx b/docs/guides/core/postman.mdx new file mode 100644 index 0000000..cfe003b --- /dev/null +++ b/docs/guides/core/postman.mdx @@ -0,0 +1,130 @@ +--- +title: Make API requests with Postman +product: system +description: In this guide you will learn how to fork, set up, and work with Mux's API collection using Postman's API interface. +videoWalkthrough: + src: ub1CcCuqhGNuMuJ9fh3OdDD0001w7oQxDn38wjpdbbUP8 + thumbnailTime: 233 + created: "2022-07-11T16:01:00Z" +steps: + - title: Fork the collection + description: Create your own copy of Mux's API collection. + topic: customize + - title: Basic authentication + description: Setup environment variables in Postman using your Mux credentials in order to authenticate each request. + topic: authentication + - title: Sample request body and responses + description: To reduce friction, we provide sample data and example responses so you always have data to work with. + topic: code + - title: Stay up to date with the main collection + description: Similar to GitHub, pull changes to keep your fork in sync with the main collection. + topic: fetch +--- + + + +We recommend [Postman](https://postman.com) as a way to easily explore and interact with our API. + +Similar to forking a repository on GitHub, forking a collection on Postman allows you to create a new instance of the collection. +Here, you can send requests, collaborate, and submit changes to the original collection. +Without forking the collection, the collection will be **read-only** and you will not be able to make requests unless you're a member of the workspace — even if the collection is public. + +If you're already a Postman user, you can fork our [officially supported Postman collection](https://www.postman.com/muxinc/workspace/mux-apis/overview?utm_campaign=postman-collab&utm_medium=guide&utm_source=mux) and add it to your workspace by clicking the button below. + +You can then stay up to date with future changes to our API specification by pulling changes. More on that in the sections below. + +[![Run in Postman](https://run.pstmn.io/button.svg)](https://god.gw.postman.com/run-collection/18282356-97f1767e-f35a-4fca-b1c5-bf612e6f8e76?action=collection%2Ffork&collection-url=entityId%3D18282356-97f1767e-f35a-4fca-b1c5-bf612e6f8e76%26entityType%3Dcollection%26workspaceId%3D2bcc854d-f831-4c9f-ac0a-3b4382f3a5cd) + + + +| Term | Description | +| :----------- | :--------------------------------------------------------- | +| Token ID | access token ID, the "username" in basic auth | +| Token secret | access token secret key, the "password" in basic auth | + +## Set up credentials + +Once you've created your access tokens via your [Mux account](https://dashboard.mux.com/signup?type=video?utm_campaign=postman-collab&utm_medium=guide&utm_source=mux), you can input them into their respective fields under authorization. + +Basic authentication in Postman + +## Environment variables + +You can use [environment variables](https://learning.postman.com/docs/sending-requests/variables/?utm_campaign=mux-collab&utm_medium=site&utm_source=mux) to store and reuse values — like your credentials — +across requests and collections. Variables can either be scoped to the environment or globally, available to all collections within a workspace. + +To create environment variables, click the eye icon on the right-hand side of the collection and choose the scope you want your credentials to apply to. + +Environment variables menu in Postman + +Next, add your credentials and set the type to **secret**. This will hide values on-screen. Once you've finished setting up your environment variables, +you can go back to basic authentication and use the variables instead of the values directly. To do this, use `{{variable_name}}` in the form field. + +Hidden authentication in Postman + + + +Even with extensive documentation, it can be hard to navigate an API for the first time. To help you make requests and understand their responses, we use Postman's +[examples feature](https://learning.postman.com/docs/sending-requests/examples/?utm_campaign=mux-collab&utm_medium=site&utm_source=mux) for all Mux Video and Mux Data endpoints. + +You can view an endpoint's sample request body by clicking the endpoint on the left-hand API menu and then clicking **body** in the main section of the interface. + +Sample API request body in Postman + +You can view an endpoint's sample request response by clicking the right-facing carat on the endpoint. A new item will appear in the collection with the icon **e.g.**. + +Sample API request response in Postman + + + +Similar to a forked repository on GitHub, your Postman fork will only stay up to date with the origin collection if you periodically [pull changes](https://learning.postman.com/docs/collaborating-in-postman/version-control/#pulling-updates) +to keep your fork in sync. + +You can pull changes by clicking the three dots next to the name of your fork. This will open a sub-menu. Click on **merge changes** near the bottom of the menu. + +Forked Postman collection's sub-menu + +If your fork is not in sync with the origin collection, there will be a yellow banner that states, "The destination has been modified since you last updated the fork. We’d recommend pulling changes." Click **pull changes** on the right. + +You will then see a diff where source is the origin and destination is your fork. + +API diff when pulling changes + +Sometimes there will be merge conflicts. If you encounter them, you can choose whether you keep the source or destination version of a change. + +Once everything looks good, click the orange button labeled **pull changes**. diff --git a/docs/guides/core/sdks.mdx b/docs/guides/core/sdks.mdx new file mode 100644 index 0000000..3f93bc2 --- /dev/null +++ b/docs/guides/core/sdks.mdx @@ -0,0 +1,14 @@ +--- +title: Use a Mux SDK +description: Mux SDKs are available for a variety of languages and platforms. +--- + +Mux has API SDKs for several major languages. You are not required to use them, but these SDKs handle the details of authentication for you and make it a little nicer to send API requests to Mux; in languages with static typing or type hints, they also will help you form correct requests and reduce development time. + +- [Node](/docs/integrations/mux-node-sdk) +- [Python](/docs/integrations/mux-python-sdk) +- [PHP](/docs/integrations/mux-php-sdk) +- [Ruby](/docs/integrations/mux-ruby-sdk) +- [Elixir](/docs/integrations/mux-elixir-sdk) +- [Java](/docs/integrations/mux-java-sdk) +- [C# and other .NET languages](/docs/integrations/mux-csharp-sdk) \ No newline at end of file diff --git a/docs/guides/core/stream-video-files.mdx b/docs/guides/core/stream-video-files.mdx new file mode 100644 index 0000000..195ad09 --- /dev/null +++ b/docs/guides/core/stream-video-files.mdx @@ -0,0 +1,203 @@ +--- +title: Stream videos in five minutes +product: video +description: Upload and play back your video files in your application using Mux in five minutes or less. +videoWalkthrough: + src: g11xsFT2MA9E92016CuQTSh8kv01aaUhJK + thumbnailTime: 0 + created: "2024-05-31T12:00:00Z" +steps: + - title: 1. Get an API Access Token + description: The Mux Video API uses a token key pair that consists of a Token ID and Token Secret for authentication. Generate a new Access Token in the settings of your Mux account dashboard. + - title: 2. POST a video + description: Videos stored in Mux are called assets. To create your first video asset, send a POST request to the `/assets` endpoint and set the `input` property to the URL of a video file that's accessible online. + - title: 3. Wait for `ready` + description: As soon as you POST a video, Mux begins downloading and processing the video. For shorter files, this often takes just a few seconds. + - title: 4. Watch your Video + description: To play back an asset, create a playback URL using the `playback_id` you received when you created the asset. + - title: 5. Manage your Mux assets + description: Delete, update, and more asset functionalities are available via the Video asset API methods + +--- + + + +The Mux Video API uses a token key pair that consists of a **Token ID** and **Token Secret** for authentication. If you haven't already, generate a new Access Token in the [Access Token settings](https://dashboard.mux.com/settings/access-tokens) of your Mux account dashboard. + +Mux access token settings + +You'll be presented with a form to create your new Access Token. + +Mux Video access token permissions + +- **Access Tokens** belong to an **Environment** — a container for the various Access Tokens, Signing Keys, and assets that you'll come to add to Mux. For this guide, you can keep the **Production** environment selected. +- **Access Tokens** can have varying permissions to control what kinds of changes they have the ability to make. For this guide, your **Access Token** should have Mux Video **Read** and **Write** permissions. +- You can give your **Access Token** an internal-only name like "Onboarding" so you know where you've used it within your application. + +Now, click the **Generate token** button. + +You'll be presented with your new **Access Token ID** and **Secret Key**. + +Mux access token environment + +Once you have your new **Access Token ID** and **Secret Key**, you're ready to upload your first video. + + + +Videos stored in Mux are called assets. To create your first video asset, you need to send a POST request to the /assets endpoint and set the `input` value to the URL of a video file that's accessible online. + +Here are a few demo videos you can use that are stored on common cloud storage services: + +- Amazon S3: https://muxed.s3.amazonaws.com/leds.mp4 +- Google Drive: https://drive.google.com/uc?id=13ODlJ-Dxrd7aJ7jy6lsz3bwyVW-ncb3v +- Dropbox: https://www.dropbox.com/scl/fi/l2sm1zyk6pydtosk3ovwo/get-started.mp4?rlkey=qjb34b0b7wgjbs5xj9vn4yevt&dl=0 + +To start making API requests to Mux, you might want to install one of our officially supported API SDKs. These are lightweight wrapper libraries that use your API credentials to make authenticated HTTP requests to the Mux API. + + + + + For an example of how to make API Requests from your local environment, see the [Make API Requests](/docs/core/make-api-requests) guide. + + + + + + +The response will include an **Asset ID** and a **Playback ID**. + +- Asset IDs are used to manage assets using `api.mux.com` (e.g. to read or delete an asset). +- Playback IDs are used to stream an asset to a video player through `stream.mux.com`. You can add multiple playback IDs to an asset to create playback URLs with different viewing permissions, and you can delete playback IDs to remove access without deleting the asset. + +```json +{ + "data": { + "status": "preparing", + "playback_ids": [ + { + "policy": "public", + "id": "TXjw00EgPBPS6acv7gBUEJ14PEr5XNWOe" + } + ], + "video_quality": "basic", + "mp4_support": "none", + "master_access": "none", + "id": "01itgOBvgjAbES7Inwvu4kEBtsQ44HFL6", + "created_at": "1607876845" + } +} +``` + + + Mux does not store the original file in its exact form, so if your original quality files are important to you, don't delete them after submitting them to Mux. + + + + +As soon as you make the `POST` request, Mux begins downloading and processing the video. For shorter files, this often takes just a few seconds. Very large files over poor connections may take a few minutes (or longer). + +When the video is ready for playback, the asset `status` changes to `ready`. You should wait until the asset status is `ready` before you attempt to play the video. + +The best way to be notified of asset status updates is via **webhooks**. Mux can send a webhook notification as soon as the asset is ready. See the [webhooks guide](/docs/core/listen-for-webhooks) for details. + +If you can't use webhooks for some reason, you can manually **poll** the asset API to see asset status. Note that this only works at low volume. Try this example: + +## Try an example request + + + + + +Please don't poll this API more than once per second. + + + +To play back an asset, create a playback URL using the `PLAYBACK_ID` you received when you created the asset. + + + +```curl +https://stream.mux.com/{PLAYBACK_ID}.m3u8 +``` + +## Preview in a player + + + +See the [playback guide](/docs/guides/play-your-videos) for more information about how to integrate with a video player. + +## Preview with `stream.new` + +[Stream.new](https://stream.new/) is an open source project by Mux that allows you to add a video and get a shareable link to stream it. + +Go to `stream.new/v/{PLAYBACK_ID}` to preview your video streaming. This URL is shareable and automatically generated using the video playback ID. Copy the link below and open it in a browser to view your video. + +``` +https://stream.new/v/{PLAYBACK_ID} +``` + +After you have everything working [integrate Mux Data](/docs/guides/track-your-video-performance) with your player for monitoring playback performance. + + + +After you have assets created in your Mux environment, you may find some of these other endpoints handy: + +- Create an asset +- List assets +- Retrieve an asset +- Delete an asset +- Retrieve asset input info +- Create asset playback ID +- Retrieve asset playback ID +- Delete asset playback ID +- Update MP4 support on asset +- Update master access on asset +- Update asset track +- Delete an asset track + +More Video methods and descriptions are available at the API Docs. + +# Next Steps + + + + + + diff --git a/docs/guides/core/verify-webhook-signatures.mdx b/docs/guides/core/verify-webhook-signatures.mdx new file mode 100644 index 0000000..ed23b63 --- /dev/null +++ b/docs/guides/core/verify-webhook-signatures.mdx @@ -0,0 +1,50 @@ +--- +title: Verify webhook signatures +product: system +description: You have the option to verify webhook requests that Mux sends to your endpoints. Mux will include a signature in the request's header. You can use this signature in your code to make sure the request was sent by Mux and not a third party. +--- + +## Obtain your signing secret + +Before you get started, you will need your signing secret for your webhook. You can find that where you configure webhooks on the [webhooks settings page](https://dashboard.mux.com/settings/webhooks). Please note that the signing secret is different for each webhook endpoint that we notify. + + + +Webhooks contain a header called `mux-signature` with the timestamp and a signature. The timestamp is prefixed by `t=` and the signature is prefixed by a scheme. Schemes start with `v`, followed by an integer. Currently, the only valid signature scheme is `v1`. Mux generates signatures using [HMAC](https://en.wikipedia.org/wiki/HMAC) with [SHA-256](https://en.wikipedia.org/wiki/SHA-2). +```text +Mux-Signature: t=1565220904,v1=20c75c1180c701ee8a796e81507cfd5c932fc17cf63a4a55566fd38da3a2d3d2` +``` + +## How to verify webhook signatures + +### Step 1: Extract the timestamp and signature + +Split the header at the `,` character and get the values for `t` (timestamp) and `v1` (the signature) + +### Step 2: Prepare the `signed_payload` string + +You will need: + * the timestamp from Step 1 as a string (for example: "1565220904") + * the dot character `.` + * the raw request body (this will be JSON in a string format) + +### Step 3: Determine the expected signature + +Use the 3 components from Step 2 to compute an HMAC with the SHA256 hash function. Depending on the language that you are using this will look something like the following: + +```js +secret = 'my secret' // your signing secret +payload = timestamp + "." + request_body +expected_signature = createHmacSha256(payload, secret) +``` + +### Step 4: Compare signature + +Compare the signature in the header to the expected signature. If the signature matches, compute the difference between the current timestamp and the received timestamp, then check to make sure that the timestamp is within our tolerance. By default, our SDKs allow a tolerance of 5 minutes. + +## Examples + +Our official SDKs for [Node](https://github.com/muxinc/mux-node-sdk) and [Elixir](https://github.com/muxinc/mux-elixir) contain helper methods for verifying Mux webhooks. If you're using one of these languages it's best to use our available helper methods. Note that the helper methods use the raw request body instead of a payload including the timestamp. + + + diff --git a/docs/guides/developer/add-alternate-audio-tracks-to-your-videos.mdx b/docs/guides/developer/add-alternate-audio-tracks-to-your-videos.mdx new file mode 100644 index 0000000..a7d3ff1 --- /dev/null +++ b/docs/guides/developer/add-alternate-audio-tracks-to-your-videos.mdx @@ -0,0 +1,76 @@ +--- +title: Add alternate audio tracks to videos +product: video +description: >- + Learn how to use multi-track audio to add alternate audio tracks to your + videos +--- + +## Introduction to multi-track audio + +The multi-track audio feature allows you to add alternate audio tracks to the video assets in your Mux account. + +Videos with multi-track audio can be used for increased accessibility or multi-language support, or just to allow viewers to opt into a different audio experience, like a director's commentary. + +## (Optional) Set the language and name for your primary audio track + + + Optional but highly recommended to increase accessibility if you're delivering alternate audio tracks. + + +When you create an asset in Mux, you can also specify the `language_code` and `name` of the primary audio track that's embedded in your first input file. + +```json +// POST https://api.mux.com/video/assets + +{ + "inputs": [ + { + "url": "{VIDEO_INPUT_URL}", + "language_code" : "en", + "name" : "English" + } + ], + "playback_policies": [ + "public" + ], + "video_quality": "basic" +} +``` + +A `name` is optional but highly recommended. If you don't specify it, we'll generate it for you based on the `language_code` you provided. The `language_code` must be a [BCP-47 language tag](https://en.wikipedia.org/wiki/IETF_language_tag), such as `en` for English, or `es` for Spanish. You can find a list of common [BCP-47 language tags here](https://en.wikipedia.org/wiki/IETF_language_tag#List_of_common_primary_language_subtags). + +You can still use multi-track audio with assets that don't have a language or name set on your initial upload; we'll just call your primary audio track "Default," with no language. + +## Add alternate audio tracks to your asset + +Once you've created your asset with a primary audio track, you can add alternate audio tracks using the create asset track API, specifying the URL of the audio file you wish to add, and the `language_code` of the alternate audio track. This is the same API that you can use to add captions to your assets. + +Mux supports most audio file formats and codecs, such as M4A, WAV, or MP3 file. but for fastest processing, you should [use standard inputs wherever possible](/docs/guides/minimize-processing-time). + +```json +// POST https://api.mux.com/video/assets/${ASSET_ID/tracks + +{ + "url": "https://example.com/bar.m4a", + "type": "audio", + "language_code": "fr", + "name": "Français" +} +``` + +Assets must be in the `ready` state before you can use the create asset track API to add the alternate audio track. + +You always need to specify the `language_code` for an alternate audio track, but the `name` is optional. If you don't specify a `name`, we'll generate it for you based on the language code you provided. + +You will need to call the API once for each alternate audio track that you want to add. + +## Play your videos with multi-track audio + +When the alternate audio track has been processed, Mux will automatically add it to the HLS playback URL for your asset. + +Many video players already support multi-track audio right out of the box, including [Mux Player](/docs/guides/mux-player-web), Video.js, ExoPlayer, and AVPlayer. So just drop your usual playback URL into your favorite video player, and click play. If your player doesn't support multi-track audio, you'll just hear the primary audio track. + +Switching between audio tracks differs in each video player, but this will usually be a menu on the bottom right allowing you to change the track. For example below in Mux Player, you need to click the waveform icon. + + diff --git a/docs/guides/developer/add-autogenerated-captions-and-use-transcripts.mdx b/docs/guides/developer/add-autogenerated-captions-and-use-transcripts.mdx new file mode 100644 index 0000000..1e55391 --- /dev/null +++ b/docs/guides/developer/add-autogenerated-captions-and-use-transcripts.mdx @@ -0,0 +1,179 @@ +--- +title: Add auto-generated captions to your videos and use transcripts +product: video +description: >- + Learn how to add auto-generated captions to your on-demand Mux Video assets, + to increase accessibility and to create transcripts for further processing. +videoWalkthrough: + src: 9AIgOif3kR1TMBm700xDcKO3FPmDpMOeY + thumbnailTime: 59 +--- + +## How auto-generated captions work + +Mux uses [OpenAI's Whisper model](https://openai.com/index/whisper) to automatically generate captions for on-demand assets. This guide shows you how to enable this feature, what you can do with it, and what some of the limitations you might encounter are. + +Generally, you should expect auto-generated captions to work well for content with reasonably clear audio. It may work less well with assets that contain a lot of non-speech audio (music, background noise, extended periods of silence). + +We recommend that you try it out on some of your typical content, and see if the results meet your expectations. + +This feature is designed to generate captions in the same language that your content's audio is produced in. It should not be used to programatically generate translated captions in other languages. + +## Enable auto-generated captions + +When you create a Mux Asset, you can add a `generated_subtitles` array to the API call, as follows: + +```json +// POST /video/v1/assets +{ + "inputs": [ + { + "url": "...", + "generated_subtitles": [ + { + "language_code": "en", + "name": "English CC" + } + ] + } + ], + "playback_policies": [ + "public" + ], + "video_quality": "basic" +} +``` + +Mux supports the following languages and corresponding language codes for VOD generated captions. Languages labeled as "beta" may have lower accuracy. + +| Language | Language Code | Status | +| :-- | :-- | :-- | +| English | en | Stable | +| Spanish | es | Stable | +| Italian | it | Stable | +| Portuguese | pt | Stable | +| German | de | Stable | +| French | fr | Stable | +| Automatic Detection | auto | Stable | +| Polish | pl | Beta | +| Russian | ru | Beta | +| Dutch | nl | Beta | +| Catalan | ca | Beta | +| Turkish | tr | Beta | +| Swedish | sv | Beta | +| Ukrainian | uk | Beta | +| Norwegian | no | Beta | +| Finnish | fi | Beta | +| Slovak | sk | Beta | +| Greek | el | Beta | +| Czech | cs | Beta | +| Croatian | hr | Beta | +| Danish | da | Beta | +| Romanian | ro | Beta | +| Bulgarian | bg | Beta | + +You can also enable autogenerated captions if you're using Direct Uploads by specifying the `generated_subtitles` configuration in the first entry of the `input` list of the `new_asset_settings` object, like this: + +```json +// POST /video/v1/uploads +{ + "new_asset_settings": { + "playback_policies": [ + "public" + ], + "video_quality": "basic", + "inputs": [ + { + "generated_subtitles": [ + { + "language_code": "en", + "name": "English CC" + } + ] + } + ] + }, + "cors_origin": "*" +} +``` + +Auto-captioning happens separately from the initial asset ingest, so that this doesn't delay the asset being available for playback. If you want to know when the text track for the captions is ready, listen for the `video.asset.track.ready` webhook for a track with `"text_source": "generated_vod"`. + +### Retroactively enable auto-generated captions + +You can retroactively add captions to any asset by POSTing to the `generate-subtitles` endpoint on the asset audio track that you want to generate captions for, as shown below: + +```json +// POST /video/v1/assets/${ASSET_ID}/tracks/${AUDIO_TRACK_ID}/generate-subtitles + +{ + "generated_subtitles": [ + { + "language_code": "en", + "name": "English (generated)" + } + ] +} +``` + +**For self-service customers:** You can add captions to any asset using this API. + +**For contract customers:** If you need to add captions to assets older than 7 days, [please contact support](/support/human) and we'd be happy to help. Please note that there may be a charge for backfilling captions onto large libraries. + + +## Retrieve a transcript + +For assets that have a `ready` auto-generated captions track, you can also request a transcript (a plain text file) of the speech recognized in your asset. + +To get this, use a playback id for your asset and the track id for the `generated_vod` text track: + + + If you don't know the `TRACK_ID`, you can retrieve it by listing the asset's tracks using the{' '} + Asset endpoint under `tracks` and the corresponding `track.id`. + + +``` +https://stream.mux.com/{PLAYBACK_ID}/text/{TRACK_ID}.txt +``` + +Signed assets require a `token` parameter specifying a JWT with the same `aud` claim used for [video playback](/docs/guides/secure-video-playback#4-generate-a-json-web-token-jwt): + +``` +https://stream.mux.com/{PLAYBACK_ID}/text/{TRACK_ID}.txt?token={JWT} +``` + +You can also retrieve a WebVTT version of the text track by replacing `.txt` with `.vtt` in the URL. + +You might find this transcript useful for doing further processing in other systems. For example, content moderation, sentiment analysis, summarization, extracting insights from your content, and many more. + +## FAQ + +### How much does auto-generated captioning cost for on-demand assets? + +There is no additional charge for this feature. It's included as part of the standard encoding and storage charges for Mux Video assets. + +### How long does it take to generate captions? + +It depends on the length of the asset, but generally it takes about 0.1x content duration. As an example, a 1 hour asset would take about 6 minutes to generate captions for. + +### Help, the captions you generated are full of mistakes! + +We're sorry to hear that! Unfortunately, though automatic speech recognition has improved enormously in recent years, sometimes it can still get things wrong. + +One option you have is to edit and replace the mis-recognized speech in the captions track: +1. Download the full VTT file we generated at `https://stream.mux.com/{PLAYBACK_ID}/text/{TRACK_ID}.vtt` +1. Edit the VTT file using your preferred text editor +1. Delete the autogenerated track with the 'delete track' API +1. Add a new track to your asset using the edited VTT file, using the `create track` API + +### My content is in multiple languages + +We currently do not recommend using this feature on mixed-language content. + +### I want to generate captions in a different language to my content + +We currently do not support automatic translation in generated captions - you should only generate captions in the language that matches your audio track. + +### My content is in a language you don't support + +We'd love to hear more about the languages that you'd like to see us support, please [reach out](/support) with details. diff --git a/docs/guides/developer/add-autogenerated-live-captions.mdx b/docs/guides/developer/add-autogenerated-live-captions.mdx new file mode 100644 index 0000000..bc4ebb8 --- /dev/null +++ b/docs/guides/developer/add-autogenerated-live-captions.mdx @@ -0,0 +1,277 @@ +--- +title: Add Auto-Generated Live Captions +product: video +description: In this guide you will learn how to add auto-generated live captions to your Mux live stream. +seoTitle: Add your own live stream closed captions +seoDescription: Learn how to use Mux's auto-generated closed caption integration for live streaming video and how it improves accessibility. +steps: + - title: Overview + description: We use A.I. based speech-to-text technology to automatically generate the closed captions. + topic: overview + - title: 1. Is my content suitable for auto-generated live closed captions? + description: Non technical content with clear audio and minimal background noise is most suitable for auto-generated live captions. + - title: 2. Increase accuracy of captions with transcription vocabulary + description: Providing vocabulary for technical terms and proper nouns can increase accuracy of auto-generated live captions + - title: 3. Create a new transcription vocabulary + description: + - title: 4. Enable auto-generated live closed captions + description: Get started with adding auto-generated live closed captions to your Mux live stream. + - title: 5. Update stream to not auto-generate closed captions for future connections + description: Let Mux know to not auto-generate closed captions when the live stream starts again. + - title: 6. Manage and update your transcription vocabulary + - title: FAQs + description: + topic: FAQ +--- + + + +Mux is excited to offer auto-generated live closed captions in English, French, German, Italian, Portuguese, and Spanish. Closed captions make video more accessible to people who are deaf or hard of hearing, but the benefits go beyond accessibility. Captions empower your viewers to consume video content in whichever way is best for them, whether it be audio, text, or a combination. + +For auto-generated live closed captions, we use artificial intelligence based speech-to-text technology to generate the closed captions. Closed captions refer to the visual display of the audio in a program. + + + +Non technical content with clear audio and minimal background noise is most suitable for auto-generated live captions. Content with music and multiple speakers speaking over each other are not good use cases for auto-generated live captions. + +Accuracy ranges for auto-generated live captions range from 70-95%. + + + +For all content, we recommend you provide transcription vocabulary of technical terms (e.g. CODEC) and proper nouns. By providing the transcription vocabulary beforehand, you can **increase the accuracy** of the closed captions. + +The transcription vocabulary helps the speech to text engine transcribe terms that otherwise may not be part of general library. Your use case may involve brand names or proper names that are not normally part of a language model’s library (e.g. "Mux"). Or perhaps you have a term, say "Orchid" which is a brand name of a toy. The engine will recognize "orchid" as a flower but you would want the word transcribed with proper capitalization in the context as a brand. + +Please note that it can take up to 20 seconds for the transcription vocabulary to be applied to your live stream. + + + +You can create a new transcription library by making a `POST` request to `/video/v1/transcription-vocabularies` endpoint API and define the input parameters. Each transcription library can have up to 1,000 phrases. + +## Request Body Parameters + +| Input parameters | Type | Description | +| ---------------- | -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | +| name | `string` | The human readable description of the transcription library. | +| phrases | `array` | An array of phrases to populate the transcription library. A phrase can be one word or multiple words, usually describing a single object or concept. | + +### API Request + +```json +POST /video/v1/transcription-vocabularies +{ + "name": "TMI vocabulary", + "phrases": ["Mux", "Demuxed", "The Mux Informational", "video.js", "codec", "rickroll"] +} +``` + +### API Response + +```json +{ + "data": { + "updated_at": "1656630612", + "phrases": ["Mux", "Demuxed", "The Mux Informational", "video.js", "codec", "rickroll"], + "name": "TMI vocabulary", + "id": "4uCfJqluoYxl8KjXxNF00TgB56OyM152B5ZR00cLKXFlc", + "created_at": "1656630612" + } +} +``` + + + +Add the `generated_subtitles` array at time of stream creation or to an existing live stream. + +## Request Body Parameters + +| Input parameters | Type | Description | +| ------------------------------ | -------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `name` | `string` | The human readable description for the generated subtitle track. This value must be unique across all the text type and subtitles text type tracks. If not provided, the name is generated from the chosen `language_code`. | +| `passthrough` | `string` | Arbitrary metadata set for the generated subtitle track. | +| `language_code` | `string` | BCP-47 language tag for captions. Defaults to `"en"`. | +| `transcription_vocabulary_ids` | `array` | The IDs of existing Transcription Vocabularies that you want to be applied to the live stream. If the vocabularies together contain more than 1,000 unique phrases, only the first 1,000 will be used. | + +Mux supports the following languages and corresponding language codes for live generated captions. + +| Language | Language Code | +| :-- | :-- | +| English | `"en"` | +| Spanish | `"es"` | +| Italian | `"it"` | +| Portuguese | `"pt"` | +| German | `"de"` | +| French | `"fr"` | + +Locale codes such as `"en-US"` or `"es-MX"` are accepted and will be parsed down to their language code (e.g., `"en-US"` → `"en"`). + +# Step 1A: Create a live stream in Mux + +Create a live stream using the Live Stream Creation API. Let Mux know that you want auto-generated live closed captions. + +### API Request + +```json +POST /video/v1/live-streams + +Request Body +{ + "playback_policy" : ["public"], + "generated_subtitles": [ + { + "name": "English CC (auto)", + "passthrough": "English closed captions (auto-generated)", + "language_code": "en", + "transcription_vocabulary_ids": ["4uCfJqluoYxl8KjXxNF00TgB56OyM152B5ZR"] + } + ], + "new_asset_settings" : { + "playback_policy" : ["public"] + } +} +``` + +### API Response + +```json +Response +{ + "data": { + "stream_key": "5bd28537-7491-7ffa-050b-bbb506401234", + "playback_ids": [ + { + "policy": "public", + "id": "U00gVu02hfLPdaGnlG1dFZ00ZkBUm2m0" + } + ], + "new_asset_settings": { + "playback_policies": [ + "public" + ] + }, + "generated_subtitles" : [ + "name": "English CC (auto)", + "passthrough": "English closed captions (auto-generated)", + "language_code": "en", + "transcription_vocabulary_ids": ["4uCfJqluoYxl8KjXxNF00TgB56OyM152B5ZR"] + ], + "id": "e00Ed01C9ws015d5SLU00ZsaUZzh5nYt02u", + "created_at": "1624489336" + } +} + +``` + +# Step 1B: Configure live captions for an existing live stream + +Use the Generated Subtitles API to configure generated closed captions to an existing live stream. Live closed captions can not be configured to an active live stream. + +### API Request + +```json +PUT /video/v1/live-streams/{live_stream_id}/generated-subtitles + +Request Body +{ + "generated_subtitles": [ + { + "name": "English CC (auto)", + "passthrough": "{\"description\": \"English closed captions (auto-generated)\"}", + "language_code": "en", + "transcription_vocabulary_ids": ["4uCfJqluoYxl8KjXxNF00TgB56OyM152B5ZR"] + } + ] +} +``` + +### API Response + +```json +Response +{ + "data": { + "stream_key": "5bd28537-7491-7ffa-050b-bbb506401234", + "playback_ids": [ + { + "policy": "public", + "id": "U00gVu02hfLPdaGnlG1dFZ00ZkBUm2m0" + } + ], + "new_asset_settings": { + "playback_policies": [ + "public" + ] + }, + "generated_subtitles": [ + { + "name": "English CC (auto)", + "passthrough": "{\"description\": \"English closed captions (auto-generated)\"}", + "language_code": "en", + "transcription_vocabulary_ids": ["4uCfJqluoYxl8KjXxNF00TgB56OyM152B5ZR"] + } + ] + } +} +``` + +# Step 2: Start your live stream + +- At the start of the Live Stream, two text tracks will be created for the active asset, with `text_source` attributes of `generated_live` and `generated_live_final`, respectively. + +- While the stream is live, the `generated_live` track will be available and include predicted text for the audio. + +- At the end of the stream, the `generated_live_final` track will transition from the preparing to ready state; this track will include finalized predictions of text and result in higher-accuracy, better-timed text. + +- After the live event has concluded, the playback experience of the asset created will only include the more accurate `generated_live_final` track, but the sidecar VTT files for both tracks will continue to exist. + + + +To prevent future connections to your live stream from receiving auto-generated closed captions, update the `generated_subtitles` configuration to `null` or an empty array. + +### API Request + +```json +PUT /video/v1/live-streams/{live_stream_id}/generated-subtitles + +Request Body +{ + "generated_subtitles" : [] +} +``` + + + +### Update phrases in a transcription vocabulary + +Phrases can be updated at any time, but won't go into effect to active live streams with auto-generated live closed captions enabled where the transcription vocabulary has been applied. If the updates are applied to an active live stream, they will not be applied until the next time the stream is active. + +### API Request + +```json +PUT /video/v1/transcription-vocabularies/$ID +{ + "phrases": ["Demuxed", "HLS.js"] +} +``` + + + +### What happens if my live stream has participants speaking languages other than the caption stream I've chosen? + +If you've added an auto-generated English caption stream and your audio contains a non-English language, we will attempt to auto-generate captions for all the content in English. e.g. If French and English are spoken, we will create captions for the French language content using the English model and the output would be incomprehensible. + +### When can I edit my live caption configuration? + +Only when the live stream is idle. You cannot make any changes while the live stream is active. + +### How do I download my auto-generated closed caption track? + +```json +https://stream.mux.com/{PLAYBACK_ID}/text/{TRACK_ID}.vtt +``` + +More details can be found at [Advanced Playback features](/docs/guides/play-your-videos#advanced-playback-features) + +### Do live captions work with low latency live streams? + +Not at this time. diff --git a/docs/guides/developer/add-live-captions.mdx b/docs/guides/developer/add-live-captions.mdx new file mode 100644 index 0000000..ef206d0 --- /dev/null +++ b/docs/guides/developer/add-live-captions.mdx @@ -0,0 +1,230 @@ +--- +title: Add your own live closed captions +product: video +description: >- + Learn how to add your own closed captions to your live stream for + accessibility. +--- + +## Why are closed captions important? + +Closed captions refers to the visual display of the audio in a program. Closed captions make video more accessible to people who are deaf or hard of hearing, but the benefits go beyond accessibility. Closed captions empower your viewers to consume video content in whichever way is best for them, whether it be audio, text, or a combination. + +## Supported live caption formats + +There are many types of closed caption sources, and each streaming standard may use a different format for embedding captions on the output. Mux supports receiving closed captions embedded in the H.264 video stream using the CEA-608 standard for a single language. + +CEA-608 stems from the analog era where closed captions data was carried directly in the transmission in a line of the video content that wasn’t displayed unless the decoder was told to look for it. These were often referred to as “Line 21” captions. CEA-608 is still the primary standard for transmitting closed captions within the same stream as audio/video content. + +Most major live caption providers (e.g. AI-Media, EEG Falcon, 3Play, Verbit) will support the CEA-608 standard. Mux will translate the CEA-608 captions into WebVTT that will be delivered as part of the HLS stream/manifest, in a standard HLS-supported manner. We will continue to evaluate demand for supporting captions for multiple languages and other caption formats. + +## Integrate your own closed captions + +Add the `embedded_subtitles` array at time of stream creation or to an existing live stream. Closed captions are a type of subtitle. The resulting Asset's subtitle text track will have `closed_captions: true` set. + +| Input Parameters | Type | Description | +| ---------------- | ------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| name | string | The name of the track containing a human-readable description. This value must be unique across all the text type and subtitles text type tracks. Defaults to `language_code` if not provided. | +| passthrough | string | Arbitrary metadata set for the live closed caption track. Max 255 characters. | +| language_code | string | The language of the closed caption stream. Value must be BCP 47 compliant. Defaults to `en` if not provided | +| language_channel | string | CEA-608 caption channel to read caption data from. Possible values: "cc1" | + +### Step 1A: Create a live stream in Mux + +Create a live stream using the Live Stream Creation API. Let Mux know that closed captions will be embedded in the RTMP stream at time of live stream creation. + +#### API Request + +```json +POST /video/v1/live-streams + +Request Body +{ + "playback_policy" : [ + "public" + ], + "embedded_subtitles" : [ + { + "name": "English CC", + "passthrough": "English closed captions", + "language_code": "en-US", + "language_channel" : "cc1" + } + ], + "new_asset_settings" : { + "playback_policy" : [ + "public" + ] + } +} +``` + +#### API Response + +```json +{ + "data": { + "stream_key": "5bd28537-7491-7ffa-050b-bbb506401234", + "playback_ids": [ + { + "policy": "public", + "id": "U00gVu02hfLPdaGnlG1dFZ00ZkBUm2m0" + } + ], + "new_asset_settings": { + "playback_policies": ["public"] + }, + "embedded_subtitles": [ + { + "name": "English CC", + "passthrough": "English closed captions", + "language_code": "en-US", + "language_channel": "cc1" + } + ], + "id": "e00Ed01C9ws015d5SLU00ZsaUZzh5nYt02u", + "created_at": "1624489336" + } +} +``` + +### Step 1B: Configure live closed captions for an existing live stream + +Use the Live Stream Closed Captions API to configure closed captions to an existing live stream. Live closed captions can not be configured to an active live stream. + +#### API Request + +```json +PUT / video/v1/live-streams/{live_stream_id}/embedded-subtitles + +Request Body +{ + "embedded_subtitles": [ + { + "name": "en-US", + "language_code": "en-US", + "language_channel": "cc1" + } + ] +} +``` + +#### API Response + +```json +Response +{ + "data": { + "stream_key": "5bd28537-7491-7ffa-050b-bbb506401234", + "playback_ids": [ + { + "policy": "public", + "id": "U00gVu02hfLPdaGnlG1dFZ00ZkBUm2m0" + } + ], + "new_asset_settings": { + "playback_policies": [ + "public" + ] + }, + "embedded_subtitles" : [ + { + "name": "English", + "language_code": "en-US", + "language_channel": "cc1" + } + ], + "id": "e00Ed01C9ws015d5SLU00ZsaUZzh5nYt02u", + "created_at": "1624489336" + } +} +``` + +### Step 2: Create an event with your preferred closed caption vendor + +Log into your preferred closed caption provider account (e.g. AI-Media, 3Play, Verbit) and create an event that needs to be captioned. To create an event, you will need to provide the following inputs + +- Start date and time +- Language of audio to be captioned +- Destination Stream URL and Stream Key (Mux). The caption vendor will send video with captions encoded via the 608 standard to this destination. + +| RTMP Server URL | Description | Common Applications | +| ----------------------------------- | -------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | +| rtmp://global-live.mux.com:5222/app | Mux's standard RTMP ingest URL. Compatible with the majority of streaming applications and services. | Open Source RTMP SDKs, most [app-store streaming applications](https://mux.com/blog/guide-to-rtmp-broadcast-apps-for-ios/). | +| rtmps://global-live.mux.com:443/app | Mux's secure RTMPS ingest URL. Compatible with fewer streaming applications, but offers a higher level of security. | OBS, Wirecast, Streamaxia RTMP SDKs | + +Mux's global RTMP or RTMPS ingest urls will connect you to the closest ingest region. While these ingest URLs typically provide optimal performance, you can also select a specific region using our [regional ingest URLs.](/docs/guides/configure-broadcast-software#available-ingest-urls). + +Upon successful event creation, the closed caption provider will provide the following + +- Stream URL +- Stream Key + +Learn more about: + +- [How to setup live captions with AI-Media EEG Falcon](https://www.ai-media.tv/wp-content/uploads/Manual_Falcon-User-Guide-.pdf) +- [How to setup live captions with 3Play Media](https://support.3playmedia.com/hc/en-us/articles/360048839533-Live-Captions-Schedule-Live-Captions-for-an-RTMP-Stream) +- [How to setup live captions with Verbit](https://verbit-ai.zendesk.com/hc/en-us/articles/4403013880594-Verbit-s-RTMP-Solution-for-Livestreaming-Events) + +### Step 3: Point your RTMP stream to your caption provider + +Configure your video encoder with the Stream URL and Stream Key provided by the closed caption provider in Step 2. + +### Step 4: Start your live stream + +When the stream goes live, a new live asset is created and tracks will be created for the corresponding captions. + +### Step 5: Monitor closed caption stream health + +When your stream is live, visit the Live Health Dashboard to monitor closed caption stream health. The dashboard will show whether Mux is receiving closed captions. More details can be found at [Debug live stream issues](/docs/guides/debug-live-stream-issues) + +## Update stream to not expect live captioning for future connections + +Let Mux know to not expect closed captions when the live stream starts again. This can be done by configuring your live stream to not have any captions. This request can only be made while the live stream is idle. + +### API Request + +```json +PUT /video/v1/live-streams/{live_stream_id}/embedded-subtitles + +Request Body +{ + "embedded_subtitles" : [] +} +``` + +## FAQ + +### Are there any language restrictions? + +Yes. The 608 standard only supports the following languages: English, Spanish, French, German, Dutch, and Portuguese, or Italian. We currently only support live closed captions for a single language. We will evaluate supporting multiple languages based off of customer feedback. + +### Is the 608 standard supported by my closed caption vendor? + +Caption vendors known to support the 608 captions: 3Play, AI-Media EEG Falcon, Verbit + +Caption vendors known to not support 608 captions: Rev.ai + +### When can I edit my live closed caption configuration? + +You can only edit your live caption configuration while the live stream is idle; you cannot make any changes while the live stream is active. + +### Will formatting be preserved? + +Mux will translate the CEA-608 captions into WebVTT. Though Mux attempts to preserve the caption formatting, some formatting may be lost. + +### Does live captions work with audio-only? + +No. If you have a use case for this, please let us know. + +### How do I download my closed caption track? + +```json +https://stream.mux.com/{PLAYBACK_ID}/text/{TRACK_ID}.vtt +``` + +More details can be found at [Advanced Playback features](/docs/guides/play-your-videos#advanced-playback-features) + +### Does live closed captions work with low latency? + +Not at this time. If you have a use case for this, please let us know. diff --git a/docs/guides/developer/add-subtitles-to-your-videos.mdx b/docs/guides/developer/add-subtitles-to-your-videos.mdx new file mode 100644 index 0000000..d3fd6aa --- /dev/null +++ b/docs/guides/developer/add-subtitles-to-your-videos.mdx @@ -0,0 +1,121 @@ +--- +title: Add subtitles/captions to videos +product: video +description: >- + Learn how to add subtitles or captions to your videos for accessibility and + multi-language support. +--- + +## Introduction to subtitles and captions + +Subtitles and captions allow for text overlays on a video to be shown at a specified time. First, let's clarify these two terms which are often used interchangeably. + +* **Subtitles** refers to text on screen for translation purposes. +* **Captions** refers to text on screen for use by deaf and hard of hearing audiences. If you see text like `[crowd cheers]`, you are seeing *captions* on your screen. + +In any case, Mux supports both in the form of [WebVTT](https://www.w3.org/TR/webvtt1/) or [SRT](https://en.wikipedia.org/wiki/SubRip) and these files can be human or computer generated. From Mux's perspective these files are converted into "text tracks" associated with the asset. If the text track provided is *captions* then supply the attribute `closed_captions: true` when creating the text track. + +The rest of this guide will use the term "subtitles" to refer to adding text tracks that can be either subtitles or captions. + +## How to add subtitles to your video + +You can add subtitles to any video asset in Mux. To add subtitles, you will need to provide either a `SRT` or `WebVTT` file containing the subtitle information to the Mux API. + +Here's an example of what a WebVTT file looks like: + +```html +00:28.000 --> 00:30.000 position:90% align:right size:35% +...you have your robotics, and I +just want to be awesome in space. + +00:31.000 --> 00:33.000 position:90% align:right size:35% +Why don't you just admit that +you're freaked out by my robot hand? +``` + + + Mux can also [automatically generate your captions](/docs/guides/add-autogenerated-captions-and-use-transcripts) + + +## Create a subtitle track + +When you create an asset in Mux, you can also include text tracks as part of the input. There's no limit on the number of tracks you can include when you make the request. + +The first input in your array of inputs must be the video file. After that, the caption tracks should be appended to the list, each including the source URL to the caption track, plus additional metadata. Here's an example of the order to use here: + +```json +{ + "inputs": [ + { + "url": "{VIDEO_INPUT_URL}" + }, + { + "url": "https://tears-of-steel-subtitles.s3.amazonaws.com/tears-en.vtt", + "type": "text", + "text_type": "subtitles", + "closed_captions": false, + "language_code": "en", + "name": "English" + }, + { + "url": "https://tears-of-steel-subtitles.s3.amazonaws.com/tears-fr.vtt", + "type": "text", + "text_type": "subtitles", + "closed_captions": false, + "language_code": "fr", + "name": "Français" + } + ], + "playback_policies": [ + "public" + ], + "video_quality": "basic" +} +``` + +This will enable WebVTT subtitles in the stream URL, which can then be used by many different players. + +You can also add text tracks using the create asset track. This can be helpful for adding captions to live stream recordings once they have finished, or if you need to update or remove additional languages for a video after it was first added to Mux. Assets must be in the `ready` state before you can use the create asset track API to add a text track. + +## Showing subtitles by default + +To show subtitles by default, you can include an additional playback modifier with the HLS stream request like this: + +``` +https://stream.mux.com/{PLAYBACK_ID}.m3u8?default_subtitles_lang=en +``` + +The `default_subtitles_lang` playback modifier requires a valid [BCP-47](https://tools.ietf.org/rfc/bcp/bcp47.txt) language value to set the DEFAULT attribute value to YES for that language. +If there's no exact language match, the closest match of the same language is selected. + +For instance, subtitles text track with language `en-US` is selected for `default_subtitles_lang=en`. This helps with regional variations and gives more flexibility. + +Video players play the default text track for autoplaying videos even when muted. + + + +If you are using [signed playback URLs](/docs/guides/secure-video-playback) make sure you include the extra parameter in your signed token. + + +## Accessibility + +The [A11Y project](https://www.a11yproject.com/) is a community-driven effort to make digital accessibility easier and includes checking videos for accessibility. + +With Mux videos, the `jsx-a11y/media-has-caption` rule fails because it looks for a `` attribute on the player. However, Mux videos include subtitles with HLS manifest when you request the stream. +If you have added text tracks to your Mux videos you can safely disable this linting rule and still provide accessible video. + +## Workflow for generating subtitles + +You may want to generate subtitle tracks for your Mux assets. These might be machine generated or human-generated by yourself or a 3rd party. Some example third-party services you might use to do this are [Rev.com](https://www.rev.com/) and [Simon Says](https://www.simonsays.ai/). + + + Mux can also [automatically generate your captions](/docs/guides/add-autogenerated-captions-and-use-transcripts) + + +Using static renditions and webhooks from Mux, your automated flow might look like this: + +1. Create a Mux asset (either with a Direct Upload, an `input` parameter, or the recording of a live stream). +2. Add `mp4_support` to your asset either at asset creation time or add `mp4_support` to your asset if it is already created. See [Download your videos guide](/docs/guides/download-your-videos) for details about how to do this. +3. Wait for the `video.asset.static_renditions.ready` webhook. This lets you know that the mp4 rendition(s) are now available. +4. Fire off a request to the 3rd party you are using for creating subtitles. You should pass along the mp4 file and get back either an SRT file or WebVTT file when the subtitle track is ready. +5. Wait for the subtitle track to be ready, when it is, make an API request to add this text track to your asset, as described above. diff --git a/docs/guides/developer/add-video-metadata.mdx b/docs/guides/developer/add-video-metadata.mdx new file mode 100644 index 0000000..975d049 --- /dev/null +++ b/docs/guides/developer/add-video-metadata.mdx @@ -0,0 +1,171 @@ +--- +title: Add metadata to your videos +product: video +description: >- + Learn how to add titles and other metadata to your videos for better organization, + discoverability, and actionable analytics. +steps: + - title: What is asset metadata? + description: Learn what asset metadata is, and how it can help you organize your video assets + topic: start + - title: Manage metadata through the Dashboard + description: Learn how to add and modify asset metadata when you're using the Mux dashboard + topic: info + - title: Manage metadata through the API + description: Learn how to add and modify asset metadata when you're using the Mux Video API + topic: API +--- + + + +Metadata provides additional descriptive information about your video assets. Mux currently supports three key optional metadata fields that help you organize and manage your video content across the API and dashboard: + +* `title`: A descriptive name for your video content. We limit this to 512 code points. +* `creator_id`: A value you set to identify the creator or owner of the video. We limit this to 128 code points. +* `external_id`: Another value you set to reference this asset in your system, such as the video ID in your database. We limit this to 128 code points. + + + **What is a code point?** Many of us use the term "characters" when referring to letters in a string, but when storing those characters some cost more than others. This cost is called a "code point".

While each ASCII character can be stored with a single code point, some unicode characters, such as `é`, are stored as two code points. One for the `e`, and one for the ` ́`. You can easily test this in JavaScript. JavaScript's `.length` property counts code points, not characters, so `"é".length` will be `2`. +
+ +Here's an example of what a `meta` object might look like: + +```json +{ + "title": "Guide: Adding metadata to videos", + "creator_id": "user_23456", + "external_id": "cdef2345" +} +``` + + + **Note:** Do not include personally identifiable information in these fields. They will be accessible by browsers to display player UI. + + +Once set on an asset, you'll find this metadata on assets across the Mux API and dashboard, including asset management, [engagement](/beta/engagement) and [data](/data). + + + +We've deeply integrated asset metadata throughout the Mux dashboard: + + + +* When uploading, we use your filename as the title - but you can change it at any time +* For live streams, you can set the default metadata for recordings on the stream details page +* When viewers watch your content, all metadata flows into Mux Data and the engagement dashboard - making it easy to find videos by title, or filter by creator id. + + + +## Create an asset with metadata + +When creating an asset you can include your metadata in the body of the request. + +#### Example request + +```json +// POST /video/v1/assets +{ + "inputs": [ + { + "url": "https://storage.googleapis.com/muxdemofiles/mux.mp4" + } + ], + "playback_policies": [ + "public" + ], + "video_quality": "basic", + "meta": { + "title": "Mux demo video", + "creator_id": "abcd1234", + "external_id": "bcde2345" + } +} +``` + +#### Need more help? + +- Check out our [getting started guide](/docs/core/stream-video-files) for a more thorough introduction to creating assets. +- Check out our Create an asset for a list of all possible parameters. + +## Update the metadata on an asset + +Once an asset has been created the metadata can be changed at any time. Make a request to update the asset and include your metadata in the request body. + +#### Example request + +```json +// PATCH /video/v1/assets/{ASSET_ID} +{ + "meta": { + "title": "Updated Mux demo video", + "creator_id": "cdef3456", + "external_id": "defg4567" + } +} +``` + +#### Need more help? + +- Check out our Update asset API reference for more details. + +## Directly upload a video with metadata + +Direct uploads are a [multi-step process](/docs/guides/upload-files-directly), and metadata should be attached in the very first step. When creating your authenticated URL in that first step you can include your metadata alongside the rest of the asset settings in `new_asset_settings`. + +#### Example Request + +```json +// POST /video/v1/uploads +{ + "new_asset_settings": { + "playback_policies": [ + "public" + ], + "video_quality": "basic", + "meta": { + "title": "Mux demo video", + "creator_id": "abcd1234", + "external_id": "bcde2345" + } + }, + "cors_origin": "*", +} +``` + +#### Need more help? + +- Check out our [direct upload guide](/docs/guides/upload-files-directly) for details on every step. + +## Set live stream metadata defaults for creating assets + +Mux automatically creates a new asset each time you connect to a live stream. When creating or updating your live stream you can include metadata that gets automatically set on the generated assets in the request body, under `new_asset_settings`. + +#### Example "Create Live Stream" request + +```json +// POST /video/v1/live-streams +{ + "playback_policies": [ + "public" + ], + "new_asset_settings": { + "playback_policies": [ + "public" + ], + }, + "meta": { + "title": "Mux demo live stream recording", + "creator_id": "abcd1234", + "external_id": "bcde2345" + } +} +``` + +#### Need more help? + +- Check out our "[start live streaming](/docs/guides/start-live-streaming)" guide for a deeper walkthrough. diff --git a/docs/guides/developer/add-watermarks-to-your-videos.mdx b/docs/guides/developer/add-watermarks-to-your-videos.mdx new file mode 100644 index 0000000..6b0726f --- /dev/null +++ b/docs/guides/developer/add-watermarks-to-your-videos.mdx @@ -0,0 +1,103 @@ +--- +title: Add watermarks to your videos +product: video +description: >- + This guide will show how to add watermarks (overlays) to your videos. + Watermarks can be added to assets, live streams and direct uploads. +--- + +A watermark is an image overlaid on a video, often used to brand a video or visually label a specific version of a video. + + +You can add a watermark to your video using the `overlay_settings` in the asset creation API. The first input in your array of inputs must be the video file you want to apply the watermark to, and the second should be the URL to the source watermark image along with placement details. Multiple watermarks are possible using additional inputs as described in our API documentation for creating an asset. + + + Valid file types for watermarks are `.png` and `.jpg`. + + Other file types such as `.gif`, `.webp`, and `.svg` are not supported at this time. + + +For a live stream, the `overlay_settings` must be embedded under the `input` array within `new_asset_settings` in the live stream creation API, and the overlays will apply both to playback through the live stream's playback IDs _and_ all assets created from the live stream. The watermark image will be retrieved from this URL at the start of each live stream, so you should make sure that the image will be available at that URL for as long as you plan to use the live stream. + + + +## Positioning with percents vs. pixels + +The overlay settings are made to help you position and size a watermark consistently no matter what the size or shape of the input video. When setting the width, height, and margins you have the option of using either percents or pixels. + +With percent values the watermark `width` and `horizontal_margin` will be relative to the width of the video while the `height` and `vertical_margin` will be relative to the height of the video. For example if you set the watermark `horizontal_margin` to 10% for a video that is 1920 pixels wide, the watermark will be 192 pixels from the edge. + +```json +{ + "inputs": [ + { + "url": "{VIDEO_INPUT_URL}" + }, + { + "url": "{WATERMARK_URL}", + "overlay_settings": { + "vertical_align": "top", + "vertical_margin": "10%", + "horizontal_align": "left", + "horizontal_margin": "10%" + } + } + ], + "playback_policies": ["public"] +} +``` + + + +While the result of using percents is probably easiest to understand, the one shortcoming is positioning a watermark with an exact margin. For example you may want your horizontal and vertical margins to be equal, or for there to be the same exact horizontal margin for vertical videos as with horizontal videos. Both of those examples can be a challenge with percents, where the actual result can be different depending on the width and height of the video. + +Setting margins with pixels allows you to get exact with your margins, widths, and heights. However, you can't always control the size of the input video, and a watermark that is 80px wide would look very different on a video that is 960 pixels wide compared to a video that is 1920 pixels wide. For that reason, when you use pixel values in your overlay settings they will always be applied as if the video is first scaled to fit 1920x1080 for horizontal videos or 1080x1920 for vertical videos. So in the previous example, the watermark would be 80px wide on the 1920px wide video, and 40px wide on the 960px wide video. + +```json +{ + "inputs": [ + { + "url": "{INPUT_URL}" + }, + { + "url": "{WATERMARK_URL}", + "overlay_settings": { + "width": "80px", + "vertical_align": "top", + "vertical_margin": "40px", + "horizontal_align": "left", + "horizontal_margin": "40px" + } + } + ], + "playback_policies": ["public"] +} +``` + + + +The reason behind this is that your watermark should look the same no matter what the original size of the input video, and videos are most often scaled to fit the player window or the screen of the device. + +## Center a watermark + + + +To center a watermark on the video, simply set `vertical_align` to "middle" and `horizontal_align` to "center". + +```json +{ + "inputs": [ + { + "url": "{INPUT_URL}" + }, + { + "url": "{WATERMARK_URL}", + "overlay_settings": { + "vertical_align": "middle", + "horizontal_align": "center" + } + } + ], + "playback_policies": ["public"] +} +``` \ No newline at end of file diff --git a/docs/guides/developer/adjust-audio-levels.mdx b/docs/guides/developer/adjust-audio-levels.mdx new file mode 100644 index 0000000..765d558 --- /dev/null +++ b/docs/guides/developer/adjust-audio-levels.mdx @@ -0,0 +1,80 @@ +--- +title: Adjust audio levels +product: video +description: >- + This guide will show how to adjust the audio level to your videos. Audio + normalization can be added to on-demand assets. +--- +## What is audio normalization? + +Here at Mux, When we refer to audio normalization, we are referring to loudness normalization. Loudness normalization adjusts the recording based on perceived loudness. + +Below, is an audio stream _before_ normalization +Audio norm before + +An audio stream _after_ normalization + +Audio norm after + +LUFS, which stands for Loudness Units relative to Full Scale, are a measurement of loudness over the entire length of an audio stream. This is the measurement used in the normalization process. +The whole goal of normalizing is attaining the gain to bring the average amplitude to a target level; the "norm". + +## When to use audio normalization + +The main use of audio normalization is to standardize the perceived loudness of your assets. Whether to use normalization at all depends on the content. +When audio gain is normal and audio quality is high, normalization can be beneficial. Please note however, similar to other video and audio processing, this processing on your audio is going to change it some way. +So make an informed decision on whether to use this feature or not. + +## How to turn on audio normalization + +At this moment, the only way to enable audio normalization on a Mux asset is through the create asset endpoint. You cannot update this after the asset has been created. This option also only applies to on-demand assets (audio-only included) but not live streams. + +To enable audio normalization on your asset ingest, set the `normalize_audio` key to `true` in the body of your asset creation. By default, this boolean is set to false. + +A typical request and response might look something like this: + +### Example request + +```bash +curl https://api.mux.com/video/v1/assets \ + -H "Content-Type: application/json" \ + -X POST \ + -d '{ + "inputs": [ + { + "url": "https://example.com/myVideo.mp4" + } + ], + "playback_policies": ["public"], + "video_quality": "basic" + "normalize_audio": true + }' \ + -u ${MUX_TOKEN_ID}:${MUX_TOKEN_SECRET} +``` + +### Example response + +```json +{ + "data": { + "status": "preparing", + "playback_ids": [ + { + "policy": "public", + "id": "006Hx6bozgZv2sL9700Y8TT02MKdw4nq01ipMVawIGV9j000" + } + ], + "normalize_audio": true, + "mp4_support": "none", + "master_access": "none", + "id": "jlJydoVkYh01Z3JrLr02RGcp4mJdLvPRbk9n00000", + "video_quality": "basic", + "created_at": "1612979762" + } +} +``` + + +## Target loudness + +Our target loudness value for audio normalization in our video stack is currently –24 LUFS. So, if possible, master your audio with this value in mind. diff --git a/docs/guides/developer/build-a-custom-dashboard.mdx b/docs/guides/developer/build-a-custom-dashboard.mdx new file mode 100644 index 0000000..44404df --- /dev/null +++ b/docs/guides/developer/build-a-custom-dashboard.mdx @@ -0,0 +1,310 @@ +--- +title: Build a custom dashboard +product: data +description: Create custom dashboards in Mux Data to visualize and track the metrics that matter most to your video performance. Custom dashboards allow you to combine multiple metrics, apply filters, and organize data in a way that best serves your monitoring and analysis needs. + +--- + +## What are Custom Dashboards? + +Custom dashboards provide a centralized view of your video performance data through configurable components. You can create dashboards with multiple visualization types, apply filters, and customize time periods to focus on specific aspects of your video performance. + +### Key features: + +* Four component types: Timeseries, Bar charts, Lists, and Metric numbers +* 10 components per Dashboard +* Dashboard and component-level filtering +* Flexible time period selection +* Comparison intervals +* Dashboard sharing and duplication + + +Custom Dashboards are available on **Mux Data Media** plans. Learn more about [Mux Data Plans](https://data.mux.com/pricing) or [contact support](https://mux.com/support). + + +## Creating a Dashboard + +To create a new custom dashboard: + +1. Navigate to the **Dashboards** section in Mux Data +2. Select **Create Dashboard** from the left menu or main window +3. Enter a descriptive name for your dashboard +4. Select **Create Dashboard** + +Your new dashboard will be created and ready for customization with components and filters. + + + +## Dashboard Configuration + +### Time Periods + +Configure the time period for your entire dashboard to focus on specific date ranges: + +* **Default**: Last 24 hours +* **Relative periods**: Choose from predefined options like last 7 days or last 30 days +* **Specific periods**: Set exact start and end dates for consistent historical analysis + +Time period changes apply to all dashboard components. Save your dashboard to preserve time period settings. + + +Custom Dashboards are currently only available for the standard 100 days of data. Long-term Metrics are not yet available with Custom Dashboards. + + +### Dashboard Filters + +Dashboard filters apply to all components within the dashboard providing consistent data filtering across visualizations. + +#### Dimension Filters + +Filter by dimension values such as country, operating system, or player version: + +1. Select the **Filter Dimensions** button +2. Search for and select the dimension type +3. Choose specific values to include or exclude +4. Multiple values use OR logic (e.g., selecting iOS and Android shows views from either platform) + +New Dashboard creation screen in Mux, showing a dimension filter panel with “Device Model” selected. Viewer Device Model is filtered to “iPhone,” and view counts for different iPhone models are listed. + +#### Metric Filters + +Filter by metric values to focus on specific performance thresholds: + +1. Select the **Filter Metrics** button +2. Choose a metric (e.g., rebuffering percentage) +3. Select an operator (≤, ≥, \=, etc.) +4. Set the value threshold + +Metrics filter interface in Mux dashboard builder, showing a filter applied to only include results where Rebuffer Percentage is greater than 5%. + +Filter changes can be previewed without saving. Click **Save** at the bottom of the dashboard to apply filters permanently. + +### Component Filters + +Components can have their own filters in addition to Dashboard filters. Dashboard filters act as parent filters affecting all components. Component level filters are additive to Dashboard filters but only apply to the component. + + +If dashboard and component filters conflict, the component may show no data. Ensure filter combinations are logical and compatible. + + +## Dashboard Components + +Components visualize individual metrics within your dashboard. Each component type serves different analytical purposes and can be customized with specific filters and options. + +1. To add a Dashboard component to a new Dashboard, select the Create Component button. +2. To add a Dashboard component to an existing Dashboard, select the Edit Icon next to the date selector. + +### Metric Numbers + +Display key performance indicators in a prominent metrics bar at the top of your dashboard. Up to 5 Metric numbers can be added per dashboard. Metric numbers (up to 5\) collectively count as 1 component. + +A dashboard titled “Platform Player Key Metrics” displaying metrics for the last 24 hours, including Views, Unique Viewers, Video Startup Failure Percentage, Playback Failure Percentage, and Rebuffer Percentage. + +#### Configuration: + +1. Select **Metric Number** as the component type +2. Choose the metric to display +3. Provide a descriptive name (50 character limit) +4. **Optional**: Add a comparison time period to show rate of change +5. **Optional**: Apply component-specific dimension or metric filters + + +Metric number components appear in creation order and cannot be reordered. + + +### Timeseries + +Line graph showing “Video Startup Time” over a 24-hour period in Mux, comparing performance for “Last 24 hours” (orange line) versus “One day ago” (purple dashed line). + +Track metrics over time to identify trends, patterns, and anomalies in your video performance. + +#### Configuration: + +1. Select **Timeseries** as the component type +2. Choose the metric to chart over time +3. Set a descriptive component name +4. Select component size (half or full width) +5. **Optional**: Choose either: + * **Comparison interval**: Compare current period with a previous timeframe + * **Breakdown values**: Chart multiple values for a single dimension type (e.g., different device types) +6. **Optional**: Apply component-specific filters + + +Comparison intervals and breakdown values are mutually exclusive options. Also note that breakdown dimensions will take priority over dashboard and component filters of the same dimension. + + +### Bars + +Bar chart titled “Video Startup Failure Percentage” broken down by browser. Chrome has the highest failure rate, followed by Firefox, Safari, and Edge. A tooltip highlights Firefox with a failure percentage of 1.39%. + +Compare performance across different dimension values using horizontal bars. + +#### Configuration: + +1. Select **Bars** as the component type +2. Choose the metric to measure in the bars visualization +3. Select component size (half or full width) +4. Choose breakdown dimension type and values that you wish to display +5. **Optional**: Add a comparison interval to compare current period with a previous timeframe +6. **Optional**: Apply component-specific filters + + +Breakdown values must come from a single dimension category. + + +### Lists + +Rank and organize data to quickly identify top performers or problem areas. + +Table showing Rebuffer Percentage broken down by operating system. Windows has the highest rebuffer rate at 0.70%, followed by iOS and Android, with directional trend indicators in green or red. + +#### Configuration: + +1. Select **List** as the component type +2. Choose the metric to measure for each list item +3. Select the dimension to list (e.g., player names, video titles) +4. Set sort order (ascending or descending) +5. Specify the number of items to display in the list component +6. Provide a descriptive component name +7. **Optional**: Add a comparison interval +8. **Optional**: Apply component-specific filters + + +Lists are only available in half-width size. + + +## Dashboard Management + +### Sharing Dashboards + +When creating a new dashboard, you can choose to share it with everyone in your environment. Public dashboards appear in the Shared folder for all users in your environment. You can change the sharing level at any time from the More Options dropdown. + +All users can view public dashboards. To save an editable version of a public dashboard, create a duplicate (see below). + +### Sharing via Dashboard Link + +Any dashboard can be shared with users who have access to your Mux environment via the dashboard link, even if it's not marked as public. Users who receive a link can: + +- View the dashboard +- Favorite it to save it to their personal list +- Create a duplicate to make their own editable copy (see below) + +### Editing Dashboard Permissions + +Users have the ability to edit dashboards they are the owner of but do not have the ability to edit public dashboards they do not own. Admins have full editing abilities for all dashboards. + +Advanced role-based permissions are coming soon. + +### Favoriting Dashboards + +Favorite personal or shared dashboards to allow quick access to your most frequently used dashboards. You can have up to 20 favorited dashboards across your environments. + +Favorite a dashboard by pressing the favoriting star in the dashboard menu. When a dashboard is favorited, the star will be highlighted and the dashboard will be added to the top of the custom dashboard navigation sidebar in the favorites section. + +Star a custom dashboard by pressing the star icon + +### Saving Dashboard Copies + +Save a modified version without affecting the original: + +1. Make your desired changes to the dashboard +2. Use the **Save As** option in the save menu +3. Provide a new name for the copy + +Bottom section of a Mux dashboard displaying two metric widgets: one for “Exits Before Video Start” (line chart) and another for “Rebuffer Percentage” by Windows. Save, Save As, and Cancel buttons appear below. + +### Exporting Dashboards + +Export a dashboard to a PDF to save a snapshot of your dashboard: + +1. Select the **More Options** menu (⋯) next to the favorite button +2. Choose **Export PDF** + +Dropdown menu under the time range selector “Last 24 hours” with options to “Export PDF“, “Duplicate“ or “Delete” the dashboard. + +### Duplicating Dashboards + +Create an exact copy of an existing dashboard: + +1. Select the **More Options** menu (⋯) next to the favorite button +2. Choose **Duplicate** + + +Duplication is not available while a dashboard is being edited. + + +### Deleting Dashboards + +Permanently remove dashboards you no longer need: + +1. Select the **More Options** menu (⋯) next to the favorite button +2. Choose **Delete** +3. Confirm the deletion + + +Deleting a dashboard removes it for all users. Duplicate dashboards are not affected. + + +### Dashboard Navigation + +#### Exploring Metric Details + +Access detailed metric analysis directly from dashboard components: + +1. Select the **Go To Metrics** icon on any component +2. The metrics page opens with: + * Selected filters from your dashboard applied + * The component's metric pre-selected diff --git a/docs/guides/developer/build-a-custom-data-integration.mdx b/docs/guides/developer/build-a-custom-data-integration.mdx new file mode 100644 index 0000000..6e52bef --- /dev/null +++ b/docs/guides/developer/build-a-custom-data-integration.mdx @@ -0,0 +1,61 @@ +--- +title: Build a Custom Integration +product: data +description: If Mux does not have an SDK specific to your player, you may want to build a custom integration. +seoTitle: Build a custom data integration for your player +seoDescription: If your player doesn’t have a Mux integration, this article will show you how you can integrate Mux data into your playback environment with a custom integration. +--- + +Mux provides pre-built SDKs and integrations for most major platforms, but there are some platforms for which there is no pre-built integration. In this case, Mux provides core SDKs for multiple languages, including JavaScript, Java, and Objective-C. These core libraries encapsulate the majority of the business and metric calculation logic, while exposing a common API for plugging in individual player integrations. + +# Integration Overview +Mux Data SDKs operate by tracking the playback events that occur through the idea of a `Player`. To Mux, a `Player` is an object that encapsulates the playback of videos, exposing APIs for playback events and retrieving playback state information. + +In most cases, the `Player` is a single object exposed by the player technology. For instance, for our Video.js integration (`videojs-mux`), the `Player` is just the Video.js [Player object](https://docs.videojs.com/player). However, in some scenarios, there may be one or more underlying player instances that are unified through a single composite API/object. In these cases, the `Player` would be that higher-level object. + +There are three major steps for building an integration for a `Player`: +1. Initialize a monitor for the `Player` that is being tracked. +2. Provide a set of callbacks for the core SDK to retrieve player/device information +3. Emit events for each of the important playback events. + +The core SDKs share the above common architecture, but there are differences driven primarily by each programming language. The individual documentation for each will describe the exact steps for integration: +- [JavaScript - Building a custom Integration](/docs/guides/data-custom-javascript-integration) +- [Java - Building a custom Integration](/docs/guides/data-custom-java-integration) +- [Objective-C - Building a custom Integration](/docs/guides/data-custom-objectivec-integration) + +Read on for an overview of each of these steps. + +# Initialize a Player monitor +Because each core SDK supports the idea of tracking multiple `Player`s (for instance, if more than one video is being played in the same view/web page), each `Player` must be identifiable with a unique ID. This ID is used when initializing the monitor, as well as when emitting events to the core SDK. + +The first step that a custom integration must do is initialize a monitor for the `Player`. This is done differently for each core SDK, but the goal is just to allow the core library to prepare the state necessary for tracking a `Player`. + +In this step, some information must be provided: + - the `Player` ID + - some integration-specific metadata + - methods to retrieve information from the `Player` (more on this in a later section) + +## Integration-level Metadata +When initializing a monitor for a `Player`, metadata about the integration itself should be passed. The possible fields that should be passed are the following (all are strings): + + - `player_software_name`: the name of the underlying player software (i.e. 'Video.js') + - `player_software_version`: the version of this player software + - `player_mux_plugin_name`: the name of the plugin + - `player_mux_plugin_version`: the version of the plugin + +# Provide Callbacks + +To ease the burden of sending a lot of data with each event that is emitted, the Mux core SDKs accept callbacks that allow the core to retrieve information from the player when necessary. The callbacks required differ by core SDK, so read the appropriate section for the core SDK that you are developing with: + +- [JavaScript SDK Callbacks](/docs/guides/data-custom-javascript-integration#provide-callbacks) +- [Java SDK Callbacks](/docs/guides/data-custom-java-integration#provide-callbacks) +- [Objective-C SDK Callbacks](/docs/guides/data-custom-objectivec-integration#provide-callbacks) + +# Emit events + +The majority of the work in an integration is creating and emitting the specific playback events as playback occurs. Most players have a concept of `events` such as `play`, `pause`, `error`, and others, but these events are often named differently depending on the player in use. The Mux core SDKs expect events named in a consistent manner, as defined in [Mux Playback Events](/docs/guides/mux-data-playback-events). + +Each core SDK has a different mechanism for emitting these events, so read the appropriate section for the core SDK that you are developing with: +- [JavaScript SDK Emit Events](/docs/guides/data-custom-javascript-integration#emit-events) +- [Java SDK Emit Events](/docs/guides/data-custom-java-integration#emit-events) +- [Objective-C SDK Emit Events](/docs/guides/data-custom-objectivec-integration#emit-events) diff --git a/docs/guides/developer/configure-broadcast-software.mdx b/docs/guides/developer/configure-broadcast-software.mdx new file mode 100644 index 0000000..269e9a2 --- /dev/null +++ b/docs/guides/developer/configure-broadcast-software.mdx @@ -0,0 +1,143 @@ +--- +title: Configure Broadcast Software +product: video +description: There are a number of popular (and even free) software encoders that you can use with the Mux live streaming API. Hardware encoders that allow for custom RTMP server configuration have similar settings. This guide details how to configure a few common encoders. +seoTitle: Configuring your broadcast software for RTMP streams +seoDescription: Learn about the variety of settings needed to configure your RTMP live video streams originating in desktop broadcasting software like OBS and Streamlabs. +--- + +## Overview / configuration term glossary + +Most broadcasting software uses some standard set of terms. Mux has chosen a set of terms are very commonly used. + +- **Server URL** - This is the URL of the Mux RTMP server, as listed in the table below. +- **Stream Key** - The Stream Key is essentially used to authenticate your live stream with the Mux RTMP server. This is your secret key to live streaming. Mux does not use additional authentication. + +--- + +| RTMP Server URL | Description | Common Applications | +| :-- | :-- | :-- | +| rtmp://global-live.mux.com:5222/app | Mux's standard RTMP entry point. Compatible with the majority of streaming applications and services | Open Source RTMP SDKs, most [app-store streaming applications](https://mux.com/blog/guide-to-rtmp-broadcast-apps-for-ios/) | +| rtmps://global-live.mux.com:443/app | Mux's secure RTMPS entry point. Compatible with less streaming applications, but offers a higher level of security | OBS, Wirecast, Streamaxia RTMP SDKs | + +--- + +Here is a list of other terms that we have heard: + +- **Stream Name** - A common alias and the technically correct term (in the RTMP specification) for *Stream Key*. +- **Location** or **URL** - Many times, broadcast software that just asks for a location or a URL wants a combination of the *Stream URL* and the *Stream Key* like `rtmp://global-live.mux.com:5222/app/{STREAM_KEY}`. If location or URL are asked for with a stream name/key, then this is an alias for *Server URL*. +- **FMS URL** - Flash Media Server URL, an alias for *Server URL*. + +Seen or heard a term that you don't understand? Ask us! Think we missed something that you know? Leave a comment at the bottom of the page! + + + +Mux's RTMP server URL uses port number 5222 and not the standard RTMP port number 1935. If your encoder does not provide a method to change the port number, please contact [support](/support) with your encoder details. + + + +## Recommended encoder settings + +Twitch has a clear and concise [guide to broadcast encoder settings](https://help.twitch.tv/s/article/broadcasting-guidelines?language=en_US). YouTube has [a bit more detailed guide](https://support.google.com/youtube/answer/2853702) as well. Here's a very simple recommendation of where to start, but we do recommend playing with your settings to see what works best for your content: + +### Common +- **Video CODEC** - H.264 (Main Profile) +- **Audio CODEC** - AAC + +### Great - 1080p 30fps +- **Bitrate** - 5000 kbps +- **Keyframe Interval** - 2 seconds + +### Good - 720p 30fps +- **Bitrate** - 3500 kbps +- **Keyframe Interval** - 2 seconds + +### Works - 480p 30fps +- **Bitrate** - 1000 kbps +- **Keyframe Interval** - 5 seconds + + + +You should also consider your available upload bandwidth when choosing an encoder bitrate. For a more reliable connection, we recommend using no more than ~50% of the available upload bandwidth for your live stream ingest. + + +## Alternate ingest protocols + +Mux Video also supports [Secure Reliable Transport (SRT) for receiving live streams](/docs/guides/use-srt-to-live-stream). + +## Available Ingest URLs + +Mux's regional ingest urls let you manually select your ingest region. This may be useful if you notice DNS is not routing your traffic efficiently, or if you would like to manage your own failover process. + +| Region | RTMP Ingest URL | SRT Ingest URL | +| :-- | :-- | :-- | +|Global (Auto-Select) | `rtmp://global-live.mux.com/app` | `srt://global-live.mux.com:6001?streamid={STREAM_KEY}&passphrase={SRT_PASSPHRASE}` | +|U.S. East | `rtmp://us-east.live.mux.com/app` | `srt://us-east.live.mux.com:6001?streamid={STREAM_KEY}&passphrase={SRT_PASSPHRASE}` | +|U.S. West | `rtmp://us-west.live.mux.com/app` | `srt://us-west.live.mux.com:6001?streamid={STREAM_KEY}&passphrase={SRT_PASSPHRASE}` | +|Europe | `rtmp://eu-west.live.mux.com/app` | `srt://eu-west.live.mux.com:6001?streamid={STREAM_KEY}&passphrase={SRT_PASSPHRASE}` | + + +All of these RTMP URLs support RTMPS. + +For example, `rtmp://us-east.live.mux.com/app` becomes `rtmps://us-east.live.mux.com/app` + + +### Choosing the right ingest URL + +- If you want Mux to automatically route to the best region, use `global-live.mux.com`. +- If you prefer manual control over routing, use a specific regional ingest URL (e.g., `us-east.live.mux.com`). +- For redundancy, configure your encoder to failover to another regional endpoint. + + +### Using regional ingest URLs in OBS + + +To set up OBS with Mux Live Streaming: +1. Go to: Settings → Stream +2. Select "Custom..." as the service +3. Enter the Ingest URL based on your preferred region + + `rtmps://us-east.live.mux.com/app` + +4. Enter your Stream Key (found in your Mux Live settings) +5. Click "Start Streaming" + +### Building your SRT URL + +Note: Before you use a SRT URL, make sure your encoder supports SRT Caller mode. + +The SRT URL is composed of three parts. + +1. The protocol and host: `srt://us-east.live.mux.com:6001` +2. A streamid query parameter +3. A passphrase query parameter + +Here's an example: +``` +srt://us-east.live.mux.com:6001?streamid=abc-123-def-456&passphrase=GHI789JKL101112 +``` + +For more information on SRT, check out our [Use SRT to live stream](https://www.mux.com/docs/guides/use-srt-to-live-stream) docs. + +## Software encoders + +Any encoder that supports RTMP should work with Mux Video. +- [OBS](https://obsproject.com/) (Free and Open Source) +- [Wirecast](https://www.telestream.net/wirecast/) (Commercial) +- [XSplit](https://www.xsplit.com/broadcaster) (Commercial) +- [vMix](https://www.vmix.com) (Commercial) + +## Hardware encoders + +Any encoder that supports RTMP should work with Mux Video. +- [VidiU](https://teradek.com/collections/vidiu-family) +- [DataVideo RTMP Encoders](https://www.datavideo.com/global/category/video-encoder) +- [Magewell Ultra Stream](https://www.magewell.com/ultra-stream) +- [Osprey Talon](https://www.ospreyvideo.com/talon-encoders) (contact [sales@ospreyvideo.com](mailto:sales@ospreyvideo.com) for documentation) +- [Videon](https://support.videonlabs.com/hc/en-us/articles/4408934112659-Stream-to-Mux-RTMP-) + +## Mobile devices (iOS, Android) + +If you just want a pre-built iOS application you can stream from, [check out our write up here](https://mux.com/blog/guide-to-rtmp-broadcast-apps-for-ios/). + +If you want to build your own application, [check out this documentation](/docs/guides/live-streaming-from-your-app). diff --git a/docs/guides/developer/control-playback-resolution.mdx b/docs/guides/developer/control-playback-resolution.mdx new file mode 100644 index 0000000..ceff7f6 --- /dev/null +++ b/docs/guides/developer/control-playback-resolution.mdx @@ -0,0 +1,75 @@ +--- +title: Control playback resolution +product: video +description: Control the video resolution your users receive in order to give the best user experience as well as take advantage of Mux's resolution based pricing. +--- + +# Default playback URL + +The default playback URL will contain all available resolutions of your video. The resolutions available will depend on the video source file. + +By default if the source file contains 1080p or higher, then the highest resolution provided by Mux will be 1080p. If the source file is lower than 1080p, the highest resolution available will be the resolution of the source. + +You can also stream 4K content using Mux Video, which will be delivered at higher resolutions including 2.5K and 4K. For more details see the [guide to streaming 4K videos](/docs/guides/stream-videos-in-4k). + +``` +https://stream.mux.com/{PLAYBACK_ID}.m3u8 +``` + +Use the default playback URL for most use cases. The video player will determine the best resolution based on the available bandwidth of the viewer. + +# Using playback modifiers to manipulate playback resolution + +Mux exposes a set of [playback modifiers](/docs/guides/modify-playback-behavior), which give you extra control over the availiable resolutions of your content. + +## Specify maximum resolution + +The playback URL below with the `max_resolution` query parameter modifies the resolutions available for the player to choose from. + +``` +https://stream.mux.com/{PLAYBACK_ID}.m3u8?max_resolution=720p +``` + +The `max_resolution` parameter can be set to `270p`, `360p`, `480p`, `540p`, `720p`, `1080p`, `1440p`, or `2160p`. You may want to do this in order to reduce your delivery costs, or build a feature to your product where only certain viewers get lower resolution video. + +_Please note that not all resolutions are available for all assets. If you specify a max resolution that is not available for the asset, Mux will limit the resolution to the highest resolution available below the one you specified. For example, if you specify `max_resolution=1080p` but the highest resolution available for the asset is 720p, then the manifest will be capped at 720p._ + +## Specify minimum resolution + +The playback URL below with the `min_resolution` query parameter modifies the resolutions available for the player to choose from. + +``` +https://stream.mux.com/{PLAYBACK_ID}.m3u8?min_resolution=720p +``` + +The `min_resolution` parameter can be set to `270p`, `360p`, `480p`, `540p`, `720p`, `1080p`, `1440p`, or `2160p`. You may want to use this to omit the lowest quality renditions from the HLS manifest when the visual quality of your content is critical to the delivery, for example in live streams where detailed screen share content is present. + +_Please note that not all resolutions are available for all assets. If you specify a min resolution that is not available for the asset, Mux will limit the resolution to the next highest resolution available below the one you specified. For example, if you specify `max_resolution=270p` but the lowest resolution available for the asset is 360p, then the manifest will start at at 360p._ + +## Specify rendition order + +By default the top resolution in the playlist is one of the middle resolutions. Many players will start with the first one listed so this default behavior strikes a balance by giving the player something that's not too low in terms of quality but also not too high in terms of bandwidth. + +You may want to change this behavior by specifying `rendition_order=desc` which will sort the list of renditions from highest (highest quality, most bandwidth) to lowest (lowest quality, least bandwidth). Players that start with the first rendition in the list will now attempt to start playback with the highest resolution. The tradeoff is that users on slow connections will experience increaesed startup time. + +``` +https://stream.mux.com/{PLAYBACK_ID}.m3u8?rendition_order=desc +``` + +# Usage with signed URLs + +If you are using `signed` Playback IDs according to the [Secure video playback guide](/docs/guides/secure-video-playback) then your playback modifiers must be encoded in the `token` that you generate on your server. [See the modify playback behaviour guide](/docs/guides/modify-playback-behavior) about embedding extra params in your JWT. + +# Using playback modifiers in Mux Player + +Mux Player supports `min_resolution`, `max_resolution` and `rendition_order` as attributes on the web component and props on the React component. + +For example to set the `max_resolution=` parameter with Mux Player, you can set `max-resolution="720p"` attribute (`maxResolution="720p"` in React). When setting this attribute Mux Player will internally add it on as a query parameter on the streaming URL. + +As with all playback modifiers, if you're using signed URLs, your parameters should be encoded in the `playback-token` attribute (`tokens.playback` in React). + +# When using AVPlayer on iOS + +Set the playback modifier by appending a `URLQueryItem` to the playback `URL`. [Initialize `AVPlayer` using the `URL` itself](https://developer.apple.com/documentation/avfoundation/avplayer/1385706-init) as shown in an example below using `max_resolution` or [initialize with an `AVPlayerItem` constructed with the URL](https://developer.apple.com/documentation/avfoundation/avplayer/1387104-init). + + diff --git a/docs/guides/developer/control-recording-resolution.mdx b/docs/guides/developer/control-recording-resolution.mdx new file mode 100644 index 0000000..8980817 --- /dev/null +++ b/docs/guides/developer/control-recording-resolution.mdx @@ -0,0 +1,93 @@ +--- +title: Control recording resolution +product: video +description: If the video being captured in your app doesn't need to be played back in full resolution, specify a lower resolution when recording to take advantage of Mux's resolution dependent pricing. +--- + +## Android +The way you control the resolution of a recorded video depends on the API used to record or encode it. All of Google's major camera and recording APIs have a method for setting either the exact or maximum resolution of the videos they create. + +### CameraX +With the CameraX library provide a `QualitySelector` that doesn't allow for resolutions beyond 720p (1280x720). +```kotlin +// Selects only Standard HD (720p) and Standard Definition (480p) +val selector = QualitySelector.fromOrderedList( + listOf(Quality.HD, Quality.SD), + FallbackStrategy.lowerQualityOrHigherThan(Quality.SD) + ) + +val recorder = Recorder.Builder() + .setQualitySelector(selector) + ... + .build() +``` + +### MediaCodec +If you are encoding video yourself via the `MediaCodec` API, you can set the encoder's output resolution by setting it in the input `MediaFormat`. For more information on how to configure and use `MediaCodec`, [try the docs](https://developer.android.com/reference/android/media/MediaCodec) +```kotlin +val mediaCodec = MediaCodec.createByCodecName(codecName) +val encodeFormat = MediaFormat().apply { + setInteger(MediaFormat.KEY_FRAME_RATE, myExampleFrameRate) + //... Other required params + // Output 720p + setInteger(MediaFormat.KEY_HEIGHT, 720) + setInteger(MediaFormat.KEY_WIDTH, 1280) +} +mediaCodec.configure( + encodeFormat, + myInputSurface, + null, + MediaCodec.CONFIGURE_FLAG_ENCODE +) +``` + +### Camera2 +Camera2 doesn't have an API to set the video resolution directly, but it infers it from the input surface. You have to call `SurfaceHolder.setFixedSize()` on your capture requests' targets. This can only be done on Lollipop/API 21 or higher. Please refer to [the camera2 docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#createCaptureSession(android.hardware.camera2.params.SessionConfiguration)) for more information +```kotlin +val supportedCameraResolutions = streamConfigMap.getOutputSizes(ImageFormat.NV21) +val size = + supportedCameraResolutions.toList().sortedBy { it.height }.findLast { it.height <= 720 && it.width <= 1280 } +size?.let { cameraSurfaceHolder.setFixedSize(it.width, it.height) } +cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD) + .apply { addTarget(cameraSurfaceHolder.surface) } + // ... + .build() +cameraDevice.createCaptureSession(...) +``` + +### MediaRecord +MediaRecord's output size can be configured by calling `MediaRecord.setVideoSize()` before calling `prepare()`. +```kotlin +mediaRecord.setVideoSize(1280, 720) +mediaRecord.prepare() +``` + +## iOS and iPadOS + +This guide covers setting maximum video resolution when recording video on iOS and iPadOS. The directions and code examples on this page assume you are using AVFoundation to setup and configure your camera. If you’ve never used AVFoundation before we recommend you brush up on the basics before proceeding further, see the official Apple [documentation](https://developer.apple.com/documentation/avfoundation) for a quick introduction and sample code. + + +Video recording on iOS is managed using [AVCaptureSession](https://developer.apple.com/documentation/avfoundation/avcapturesession). The resolution for video output from AVCaptureSession can be configured using a settings preset and this example shows how to configure VCaptureSession to output video at a resolution of 720p (1280 x 720 pixels). + +```swift +let session = fetchYourCaptureSession() +session.beginConfiguration() + +let updatedSessionPreset = AVCaptureSession.hd1280x720 +if session.canSetSessionPreset(updatedSessionPreset) { + session.sessionPreset = updatedSessionPreset +} + +session.commitConfiguration() +``` + +Don’t forget to call `beginConfiguration()` before applying any configuration changes. When all the configuration changes have been applied, make sure your implementation calls `commitConfiguration()`. + +It is best for any work that is done in-between calls to `beginConfiguration()` and `commitConfiguration()` to be synchronous. If you need to perform any asynchronous tasks, such as fetching the preferred resolution from your backend, make sure those are complete before you begin to configure `AVCaptureSession`. + + +## OBS + +Streams initiated via [OBS](https://obsproject.com/) can be configured in Settings > Video > Output (Scaled) Resolution. + + diff --git a/docs/guides/developer/create-clips-from-your-videos.mdx b/docs/guides/developer/create-clips-from-your-videos.mdx new file mode 100644 index 0000000..4f2974a --- /dev/null +++ b/docs/guides/developer/create-clips-from-your-videos.mdx @@ -0,0 +1,157 @@ +--- +title: Create clips from your videos +product: video +description: >- + Learn how to create clips from your video files or live stream event + recordings. +videoWalkthrough: + src: Hv8S2SfibcU6C8HR7iJo1001dgfodzJMmdD353PEYEsk + thumbnailTime: 135 + created: '2022-02-02T14:54:00Z' +--- + +To drive higher viewer engagement with the videos already on your service, you can create additional videos from your existing library or catalog. These videos could: +* Provide quick previews +* Highlight key moments +* Be polished versions of a live stream with the extra minutes trimmed from the beginning & end (aka preroll and postroll slates) for on-demand replays + +Mux can now help you quickly create these kinds of videos using the asset clipping functionality. + + + If you do not need frame accurate clips, or require immediate availability of clips, you may find that the [instant clipping feature may meet your requirements](/docs/guides/create-instant-clips). + + +## 1. Create a clip + +When you [POST a new video](/docs/core/stream-video-files) or [start live streaming](/docs/guides/start-live-streaming), Mux creates a new asset for the video file or live stream event recording. +You can create a clip from an existing asset by making a POST request to /assets endpoint and defining the `input` object's clipping parameters. +* `url` is defined with `mux://assets/{asset_id}` template where `asset_id` is the source Asset Identifier to create the clip from. +* `start_time` is the time offset in seconds from the beginning of the video, indicating the clip's start marker. The default value is 0 when not included. +* `end_time` is the time offset in seconds from the beginning of the video, indicating the clip's end marker. The default value is the duration of the video when not included. + +A request and response might look something like this: + +### Example request + +```bash +curl https://api.mux.com/video/v1/assets \ + -H "Content-Type: application/json" \ + -X POST \ + -d '{ + "inputs": [ + { + "url": "mux://assets/01itgOBvgjAbES7Inwvu4kEBtsQ44HFL6", + "start_time": 10.0, + "end_time": 51.10 + } + ], + "playback_policies": [ + "public" + ], + "video_quality" : "basic" + }' \ + -u ${MUX_TOKEN_ID}:${MUX_TOKEN_SECRET} +``` + +### Example response +```json +{ + "data": { + "status": "preparing", + "playback_ids": [ + { + "policy": "public", + "id": "TXjw00EgPBPS6acv7gBUEJ14PEr5XNWOe" + } + ], + "mp4_support": "none", + "master_access": "none", + "id": "kcP3wS3pKcEPywS5zjJk7Q1Clu99SS1O", + "created_at": "1607876845", + "video_quality" : "basic", + "source_asset_id": "01itgOBvgjAbES7Inwvu4kEBtsQ44HFL6" + } +} +``` + +Mux creates a new asset for the clip. And the response will include an **Asset ID** and a **Playback ID**. + +* Asset IDs are used to manage assets using `api.mux.com` (e.g. to read or delete an asset). +* Playback IDs are used to stream an asset to a video player through `stream.mux.com`. You can add multiple playback IDs to an asset to create playback URLs with different viewing permissions, and you can delete playback IDs to remove access without deleting the asset. +* `source_asset_id` is the video or live stream event recording asset used to create the clip. The `source_asset_id` can be useful for associating clips with the source video object in your CMS. + +## 2. Wait for "ready" event + +When the clip is ready for playback, the asset "status" changes to "ready". + +The best way to do this is via **webhooks**. Mux can send a webhook notification as soon as the asset is ready. See the [webhooks guide](/docs/core/listen-for-webhooks) for details. + +If you can't use webhooks for some reason, you can manually **poll** the asset API to see asset status. Note that this only works at low volume. + +### Build your own request + + + + + +Please don't poll this API more than once per second. + +## 3. Play your clip + +To play back the video, create a playback URL using the `PLAYBACK_ID` you received when you created the clip. + + + +```curl +https://stream.mux.com/{PLAYBACK_ID}.m3u8 +``` + + + +See the [playback guide](/docs/guides/play-your-videos) for more information about how to integrate with a video player. + + +## FAQs +A few commonly asked questions: + +### How many clips can be created from a single source asset? + +Unlimited! Mux creates a new asset for each clip. Hence, there is no limit to how many clips you can create. + +### Is there a cost to create clips? + +Each clip is a new asset and is considered an on-demand video. On-Demand video pricing applies and that includes Encoding, Storage, and Delivery usage. + +### Can I use basic video quality on clips? + +Yes! Clips can be created as either `basic` or `plus`. + +### Can I create clips when adding new video files? + +Mux only allows creating clips from existing videos in your account. That means, clipping specific parameters (`start_time` and `end_time`) added to Asset Creation are only applicable for `input.url` with `mux://assets/{asset_id}` format. + +### Can I create clips from live streams? + +Yes! Mux supports creating clips from the active asset being generated by a live stream while broadcasting. If you clip an asset while the broadcast is active, just remember that the active asset is still growing, so if you don't provide `end_time`, it will default to the end of the asset at the time of creation. As such, when clipping an active asset during the broadcast, for best results you should always provide an `end_time`. + +### My source asset has subtitles/captions text tracks. Will the clip have them? + +Mux copies all the text tracks from the source asset to the new asset created for the clip. Mux also trims the text tracks to match the clip's start and end markers. + +### What other data is copied from the source asset? + +Mux copies the captions and watermark image from the source asset to the clips created. If your source asset does not have a watermark image and you want your clipped +asset to have a watermark, pass it through in `overlay_settings`. See more details in the [watermark guide](/docs/guides/add-watermarks-to-your-videos). + +All other fields, such as `passthrough`, are not copied over. + +### What is the minimum duration for a clip? + +Clips must have a duration of at least 500 milliseconds. diff --git a/docs/guides/developer/create-instant-clips.mdx b/docs/guides/developer/create-instant-clips.mdx new file mode 100644 index 0000000..6706e46 --- /dev/null +++ b/docs/guides/developer/create-instant-clips.mdx @@ -0,0 +1,274 @@ +--- +title: Create instant clips +product: video +description: >- + Learn how to create instant clips at no extra cost. +videoWalkthrough: + src: LOMMdhiOET521ZEsVVyM01blbZXEgfgxj + thumbnailTime: 6 + created: '2024-05-07T16:00:00Z' +--- + +## Use cases for instant clipping + +Instant clipping allows you to set the start and end times of the streaming URL to make clips that are instantly available without the wait time or expense of a new asset being created. This feature can be used to build a variety of viewer facing workflows. + + + If you require frame accurate clips, clipped masters, or clipped MP4s, you should use the [asset-based clipping feature](/docs/guides/create-clips-from-your-videos). + + +Here are examples of workflows that can be built with instant clipping: + +### Pre-live workflows + +Sometimes you need to connect your contribution encoder to a live stream and test that the video is working end-to-end before exposing the live stream to your audience. But when you have [DVR mode](/docs/guides/stream-recordings-of-live-streams) turned on for your stream, it's often necessary to prevent viewers being able to seek back into the parts of the live stream where your announcers are saying "testing, testing, 1… 2… 3…". + +Instant clipping can be used to specify a start time to allow playback of a live stream, stopping users from seeking back into the stream beyond where you want. You can also specify an end time if you're worried about extra content at the end of your live events. + +### Post-live trimming without re-encoding + +With our [asset-based clipping feature](/docs/guides/create-clips-from-your-videos) you're able to create clipped on-demand assets, which are shortened versions of a given asset - this is commonly called "top and tail editing". These assets always incur an encoding cost to process the clipped version, and can take some time to process. + +With instant clipping, for any asset generated from a live stream, you can simply specify the start and end times of the content you want clipped directly during playback without the need for time-consuming and costly re-processing. + +For example, if you broadcast multiple sports events back-to-back on a single live stream, you can use instant clipping to generate instant on-demand streams of each match as it ends for no extra cost. + +### Highlight clips + +Sometimes a really exciting moment happens on a live stream, and you want to clip out a short highlight for others to enjoy. You can use instant clipping to pull out short clips from a currently active asset for promoting on your homepage or embedding into news articles. + +This can be used for example to instantly show just the 90th-minute equalizer goal on your home page while having extra time and penalties to watch live on your pay-to-view platform. + +## How instant clipping works + +### From a live stream + +Every live stream or asset generated from a live stream contains a timestamp that is close (usually within a second) to the time that Mux received the source video from the contribution encoder. This timestamp is known as ["Program Date Time"](https://www.mux.com/video-glossary/pdt-program-date-and-time) or "PDT" for short. + + + "PDT" has nothing to do with the Pacific Daylight time zone; all times are represented in UTC or with unix timestamps. + + +Instant clipping works by trimming the HLS manifests from live streams and VOD assets originating from live streams using these PDT timestamps, without re-encoding any segments. This means that instant clipping operates at the segment level of accuracy, so you should expect that the content that you clip out may be several seconds longer than you've requested. We always make sure to include the timestamps that you request, but your content may start a few seconds earlier, and end a few seconds later. The exact accuracy depends on the latency settings of the live stream that you're clipping from. + +### From a VOD asset + +Regardless if an asset has originated from a live stream or was uploaded, you can create instant clips using relative time markers for the start and end to generate the trimmed HLS manifest. The relative time markers are based on the beginning of the asset and so specifying a range of `10` - `20` would result in a 10 second clip between `0:00:10` and `0:00:20`. + +## Creating an instant clip URL + +Instant clipping is controlled by passing [playback modifiers](/docs/guides/modify-playback-behavior) (query string arguments or JWT claims) to the playback URL of your live stream or VOD assets. If you're using signed URLs, these playback modifiers need to be embedded into your JWT. + +### Live stream instant clips + +While Mux timestamps video frames when they are received, there is a delay while enough frames are processed to form sufficient segments for a live stream to be started. + +This means that you should expect some delay from wall-clock time to when you can use a given timestamp as a `program_start_time`. + +For example, if a commentator presses a “Go Live” button at 13:00 UTC, which sets the `program_start_time` of a Live Stream to that timestamp, you should expect request for the live stream's manifest to respond with a HTTP 412 error for up to 15 seconds after (this will depend on the `latency_mode` of your live stream). + +The start and end time of your trimmed live stream or on-demand asset are specified by using the following two parameters: + +#### Using `program_start_time` + +This parameter accepts an epoch time and can be set on a playback URL, and sets the start time of the content within the live stream or asset, for example: + +``` +# Format +https://stream.mux.com/${PLAYBACK_ID}.m3u8?program_start_time=${EPOCH_TIME} + +# Example +https://stream.mux.com/sp9WNcgcktsmlvFLKgNm3jjSGRD00RPlq.m3u8?program_start_time=1707740400 +``` + +When used on a live stream, this will cause the live stream to behave as if it is idle prior to this time. + +When used on an asset, this will trim the start of the streamed media to this timestamp if needed. + +#### Using `program_end_time` + +This parameter accepts an epoch time and can be set on a playback URL, and sets the end time of the content within the live stream or asset, for example: + +``` +# Format +https://stream.mux.com/${PLAYBACK_ID}.m3u8?program_end_time=${EPOCH_TIME} + +# Example +https://stream.mux.com/sp9WNcgcktsmlvFLKgNm3jjSGRD00RPlq.m3u8?program_end_time=1707740460 +``` + +When used on a live stream, this will cause the live stream to behave as if it is idle after this time. + +When used on an asset, this will trim the end of the streamed media to this timestamp. + +#### Combining `program_start_time` and `program_end_time` + +These parameters can be used together to extract a specific clip of a live stream or asset, for example: + +``` +# Format +https://stream.mux.com/${PLAYBACK_ID}.m3u8?program_start_time=${EPOCH_TIME}&program_end_time=${EPOCH_TIME} + +# Example +https://stream.mux.com/sp9WNcgcktsmlvFLKgNm3jjSGRD00RPlq.m3u8?program_start_time=1707740400&program_end_time=1707740460 +``` + +### VOD instant clips + +The start and end time of your trimmed on-demand asset are specified by using the following two parameters: + +#### Using `asset_start_time` + +This parameter accepts relative time and can be set on a Playback URL, and sets the start time of the content within the asset, for example: + +``` +# Format +https://stream.mux.com/${PLAYBACK_ID}.m3u8?asset_start_time=${RELATIVE_TIME} + +# Example +https://stream.mux.com/sp9WNcgcktsmlvFLKgNm3jjSGRD00RPlq.m3u8?asset_start_time=10 +``` + +#### Using `asset_end_time` + +This parameter accepts relative time and can be set on a Playback URL, and sets the end time of the content within the asset, for example: + +``` +# Format +https://stream.mux.com/${PLAYBACK_ID}.m3u8?asset_end_time=${RELATIVE_TIME} + +# Example +https://stream.mux.com/sp9WNcgcktsmlvFLKgNm3jjSGRD00RPlq.m3u8?asset_end_time=20 +``` + +#### Combining `asset_start_time` and `asset_end_time` + +You can also use both of these parameters to create an instant clip of specific portion of your asset, for example: + +``` +# Format +https://stream.mux.com/${PLAYBACK_ID}.m3u8?asset_start_time=${RELATIVE_TIME}&asset_end_time=${RELATIVE_TIME} + +# Example +https://stream.mux.com/sp9WNcgcktsmlvFLKgNm3jjSGRD00RPlq.m3u8?asset_start_time=10&asset_end_time=20 +``` + +## Thumbnail & Storyboard support + +### Images for VOD assets + +To generate images for VOD assets, the `time` [query string parameter](/docs/guides/get-images-from-a-video#thumbnail-query-string-parameters) can be used to retrieve an image from the video, for example: + +``` +# Format +https://image.mux.com/${PLAYBACK_ID}/thumbnail.png?time=${RELATIVE_TIME} + +# Example +https://image.mux.com/sp9WNcgcktsmlvFLKgNm3jjSGRD00RPlq/thumbnail.png?time=15 +``` + +Storyboard generation for VOD assets support these parameters as a way to generate storyboard tiles for frames between the `asset_start_time` and `asset_end_time` values, for example: + +``` +#Format +https://image.mux.com/${PLAYBACK_ID}/storyboard.png?asset_start_time=${RELATIVE_TIME}&asset_end_time=${RELATIVE_TIME} + +# Example +https://image.mux.com/sp9WNcgcktsmlvFLKgNm3jjSGRD00RPlq/storyboard.png?asset_start_time=10&asset_end_time=20 +``` + +### Images for live streams + +For thumbnails, you can now pass an absolute time using the `program_time` parameter, for example: + +``` +# Format +https://image.mux.com/${PLAYBACK_ID}/thumbnail.png?program_time=${EPOCH_TIME} + +# Example +https://image.mux.com/sp9WNcgcktsmlvFLKgNm3jjSGRD00RPlq/thumbnail.png?program_time=1707740460 +``` + +You can pass the same set of [playback modifiers](/docs/guides/modify-playback-behavior) (`program_start_time` and `program_end_time`) on a request for a storyboard and the storyboard will be trimmed appropriately, for example: + +``` +#Format +https://image.mux.com/${PLAYBACK_ID}/storyboard.png?program_start_time=${RELATIVE_TIME}&program_end_time=${RELATIVE_TIME} + +# Example +https://image.mux.com/sp9WNcgcktsmlvFLKgNm3jjSGRD00RPlq/storyboard.png?program_start_time=1707740400&program_end_time=1707740460 + +``` + +## Using instant clipping in Mux Player + +We've also made sure it's easy to pass these parameters to Mux Player when you're using it for playback. + +Instant clipping is supported in Mux Player through two paths: + +### Using Public Playback IDs: Via extra source params + + + This feature was added in mux-player 2.3.0, but we recommend using the latest version at all times. + + +Here's an example of using the extra source params for using the `asset_start_time` and `asset_end_time` parameters with mux-player for both video delivery and storyboards: + +```html + +``` + +Using the extra source params can also be used for instant clipping for live streams for video and storyboards as well: + +```html + +``` + +### Via signed URLs + +When using signed URLs, it's required to include the clipping parameters as [claims inside the respective JWTs](/docs/guides/modify-playback-behavior#jwt-claim-with-signed-playback-url) passed to Mux Player. + +For the playback token and the storyboard token, the following parameters should be injected into the JWT claims: +* `asset_start_time` and/or `asset_end_time` +* `program_start_time` and/or `program_end_time` + +For the thumbnail token, the `program_time` parameter should be injected into the JWT claim. + +Then Mux Player can be loaded in the usual way, passing in the signed tokens: + +```html + +``` + +## Stream security considerations + +We strongly recommend using this feature alongside [signed URLs](/docs/guides/secure-video-playback). When using this feature without signed URLs, it is possible for users to manipulate the manifest playback URL to expose parts of the media that you want to keep hidden. + +## Choosing between asset clipping and instant clipping + +Not sure if you should be [generating a new asset](/docs/guides/create-clips-from-your-videos) when clipping, or using instant clipping for your workflow? Here are some tips that can help you choose the right approach for your product. + +Instant clipping is a great choice when: +* You require a clip to be instantly available +* You need the clips to not incur additional encoding fees +* You need to pre-emptively limit the availability of content to build pre-live workflows for live streaming + +You should use our [asset-based clipping](/docs/guides/create-clips-from-your-videos) when: +* You require frame accuracy in your clips +* You require trimmed [MP4s](/docs/guides/enable-static-mp4-renditions) or [masters](/docs/guides/download-for-offline-editing) diff --git a/docs/guides/developer/create-timeline-hover-previews.mdx b/docs/guides/developer/create-timeline-hover-previews.mdx new file mode 100644 index 0000000..63ce941 --- /dev/null +++ b/docs/guides/developer/create-timeline-hover-previews.mdx @@ -0,0 +1,169 @@ +--- +title: Create timeline hover previews +product: video +description: Learn how to add hover image previews to your player. +--- + +## What are timeline hover previews? + +Timeline hover previews, also known as trick play or scrub bar previews, make player operations like fast-forward, rewind, and seeking more visual to the user. Here it is in action: + + + +Each image (also called a thumbnail or tile) you see when hovering over the scrub bar (or player timeline) on the video player is part of a larger image called a storyboard. +A storyboard is a collection of thumbnails or tiles, created from video frames selected at regular time intervals and are arranged in a grid layout. + +Below image an example storyboard for the video, [Tears of Steel](https://mango.blender.org/), the same video used to demo timeline hover previews above: + + + +## Add timeline hover previews to your player + +There are a few different ways to add this functionality to your players, depending on which methods your chosen player exposes to support timeline hover previews. + +The storyboard image can be requested from the following URL in either `webp`, `jpg`, or `png` format from Mux: + + + +Each storyboard has an associated metadata file and can be used as a `metadata` text track. The storyboard image is referenced from the metadata in this case. + +The storyboard metadata provides the x-axis and y-axis coordinates of each image in the storyboard image and the corresponding time range. The metadata is available in both WebVTT and JSON format. + +Storyboard images will contain 50 tiles within the image if the asset is less than 15 minutes in duration. If the asset is more than 15 minutes, then there will be 100 tiles populated in the storyboard image. +### WebVTT +Most popular video players use WebVTT file for describing individual tiles of the storyboard image. You can request the WebVTT file by making a request to generate a storyboard of the image. + + + +``` +WEBVTT + +00:00:00.000 --> 00:01:06.067 +https://image.mux.com/Dk8pvMnvTeqDk9dy5nqmXz02MM4YtdElW/storyboard.jpg#xywh=0,0,256,160 + +00:01:06.067 --> 00:02:14.067 +https://image.mux.com/Dk8pvMnvTeqDk9dy5nqmXz02MM4YtdElW/storyboard.jpg#xywh=256,0,256,160 + +00:02:14.067 --> 00:03:22.067 +https://image.mux.com/Dk8pvMnvTeqDk9dy5nqmXz02MM4YtdElW/storyboard.jpg#xywh=512,0,256,160 + +00:03:22.067 --> 00:04:28.067 +https://image.mux.com/Dk8pvMnvTeqDk9dy5nqmXz02MM4YtdElW/storyboard.jpg#xywh=768,0,256,160 + +00:04:28.067 --> 00:05:36.067 +https://image.mux.com/Dk8pvMnvTeqDk9dy5nqmXz02MM4YtdElW/storyboard.jpg#xywh=1024,0,256,160 + +00:05:36.067 --> 00:06:44.067 +https://image.mux.com/Dk8pvMnvTeqDk9dy5nqmXz02MM4YtdElW/storyboard.jpg#xywh=0,160,256,160 +``` + +By default, this request will generate a `jpg` for the storyboard image. If you'd like to change the format to `webp`, you can do so by adding `?format=webp` to the end of the request URL: + + + +#### WebVTT Compatible Video Players +The list below shows the various video players supporting the WebVTT files for trick play. If your player isn't listed here, [please reach out](/support), and we'll help where we can! + +- [VideoJS](https://videojs.com/) + [VTT Thumbnails plugin](https://www.npmjs.com/package/videojs-vtt-thumbnails) +- [JW Player](https://docs.jwplayer.com/players/docs/ios-add-preview-thumbnails) +- [THEOplayer](https://www.theoplayer.com/docs/theoplayer/how-to-guides/texttrack/how-to-implement-preview-thumbnails/) +- [Bitmovin](https://bitmovin.com/demos/thumbnail-seeking) +- [Flow Player](https://flowplayer.com/demos/video-thumbnails) +- [Plyr](https://plyr.io) + + +Using a WebVTT file may be limited to HTML5 browser-based video players and may not be supported in Device specific SDKs including iOS and Android. iOS, Android, and other device platforms use a HLS iFrame Playlist. Generating HLS iFrame Playlists is on Mux's roadmap. + + +### JSON +There are many other scenarios for using storyboards. For instance: +- A quick way of previewing the entire video can save the video editor/reviewer's time without requiring a full video playback +- Ease of developing trick play like functionality in Chromeless Video players like [hls.js](https://github.com/video-dev/hls.js/) + +Using a WebVTT file for metadata can be burdensome to implement. Storyboard metadata expressed in an easy to understand & widely supported format like JSON helps in taking advantage of storyboards in new ways. Mux provides the same storyboard metadata in JSON format. + + + +```json +{ + "url": "https://image.mux.com/Dk8pvMnvTeqDk9dy5nqmXz02MM4YtdElW/storyboard.jpg", + "tile_width": 256, + "tile_height": 160, + "duration": 6744.1, + "tiles": [ + { + "start": 0, + "x": 0, + "y": 0 + }, + { + "start": 66.066667, + "x": 256, + "y": 0 + }, + { + "start": 134.066667, + "x": 512, + "y": 0 + }, + { + "start": 202.066667, + "x": 768, + "y": 0 + }, + { + "start": 268.066667, + "x": 1024, + "y": 0 + }, + { + "start": 336.066667, + "x": 0, + "y": 160 + } + ] +} +``` + +By default, this request will generate a `jpg` for the storyboard image. If you'd like to change the format to `webp`, you can do so by adding `?format=webp` to the end of the request URL: + + + +## Cross-Origin Resource Sharing (CORS) requirements +The storyboards URLs use `image.mux.com` hostname and `stream.mux.com` hostname is used for video playback URL. Because the URLs use different hostnames, it is recommended to add `crossorigin` attribute to the [`