diff --git a/core/src/main/java/io/github/thibaultbee/streampack/data/VideoConfig.kt b/core/src/main/java/io/github/thibaultbee/streampack/data/VideoConfig.kt index 469051a5f..529384cb0 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/data/VideoConfig.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/data/VideoConfig.kt @@ -85,11 +85,12 @@ class VideoConfig( /** * Video encoder I-frame interval in seconds. * This is a best effort as few camera can not generate a fixed framerate. - * For live streaming, I-frame interval should be really low. For recording, I-frame interval should be higher. + * For live streaming, a longer interval reduces bandwidth at the cost of resilience to packet loss. + * For recording, I-frame interval should be higher. * A value of 0 means that each frame is an I-frame. * On device with API < 25, this value will be rounded to an integer. So don't expect a precise value and any value < 0.5 will be considered as 0. */ - val gopDuration: Float = 1f // 1s between I frames + val gopDuration: Float = 3f // 3s between I frames for better compression efficiency ) : Config(mimeType, startBitrate, profile) { init { require(mimeType.isVideo) { "MimeType must be video" } @@ -220,10 +221,10 @@ class VideoConfig( fun getBestBitrate(resolution: Size): Int { val numOfPixels = resolution.width * resolution.height return when { - numOfPixels <= 320 * 240 -> 800000 - numOfPixels <= 640 * 480 -> 1000000 - numOfPixels <= 1280 * 720 -> 2000000 - numOfPixels <= 1920 * 1080 -> 3500000 + numOfPixels <= 320 * 240 -> 500000 // Lower for efficiency + numOfPixels <= 640 * 480 -> 800000 // Reduced for 640x480 (our default) + numOfPixels <= 1280 * 720 -> 1500000 // Slightly reduced + numOfPixels <= 1920 * 1080 -> 3000000 else -> 4000000 } } diff --git a/core/src/main/java/io/github/thibaultbee/streampack/internal/encoders/MediaCodecEncoder.kt b/core/src/main/java/io/github/thibaultbee/streampack/internal/encoders/MediaCodecEncoder.kt index 8f064c702..633931979 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/internal/encoders/MediaCodecEncoder.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/internal/encoders/MediaCodecEncoder.kt @@ -16,11 +16,14 @@ package io.github.thibaultbee.streampack.internal.encoders import android.media.MediaCodec +import android.media.MediaCodecInfo import android.media.MediaFormat import android.os.Build +import android.os.Bundle import android.os.Handler import android.os.HandlerThread import io.github.thibaultbee.streampack.data.Config +import io.github.thibaultbee.streampack.data.VideoConfig import io.github.thibaultbee.streampack.error.StreamPackError import io.github.thibaultbee.streampack.internal.data.Frame import io.github.thibaultbee.streampack.internal.events.EventHandler @@ -156,7 +159,25 @@ abstract class MediaCodecEncoder( open fun createMediaFormat(config: Config, withProfileLevel: Boolean) = config.getFormat(withProfileLevel) - open fun extendMediaFormat(config: Config, format: MediaFormat) {} + open fun extendMediaFormat(config: Config, format: MediaFormat) { + // Quality-focused parameters + if (config is VideoConfig) { + try { + // Bitrate mode: prefer quality over constant bitrate + if (config.mimeType == MediaFormat.MIMETYPE_VIDEO_AVC || + config.mimeType == MediaFormat.MIMETYPE_VIDEO_HEVC) { + // Use VBR mode for better quality/size ratio for H.264/H.265 + format.setInteger(MediaFormat.KEY_BITRATE_MODE, + MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR) + } + + // Prioritize quality over speed when using AVC/HEVC for streaming + format.setInteger("quality", 1) + } catch (e: Exception) { + Logger.d(TAG, "Could not set quality parameters: ${e.message}") + } + } + } private fun createCodec(config: Config, withProfileLevel: Boolean): MediaCodec { val format = createMediaFormat(config, withProfileLevel) @@ -180,6 +201,18 @@ abstract class MediaCodecEncoder( codec.setCallback(encoderCallback) } + // Power-efficient encoding parameters + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + try { + // Set operating rate to normal (not low-latency) - more power efficient + val params = Bundle() + params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, _bitrate) + codec.setParameters(params) + } catch (e: Exception) { + Logger.d(TAG, "Could not set encoder parameters: ${e.message}") + } + } + try { codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE) } catch (e: Exception) { diff --git a/core/src/main/java/io/github/thibaultbee/streampack/internal/encoders/VideoMediaCodecEncoder.kt b/core/src/main/java/io/github/thibaultbee/streampack/internal/encoders/VideoMediaCodecEncoder.kt index 9d0b312b1..5ff4a7ac4 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/internal/encoders/VideoMediaCodecEncoder.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/internal/encoders/VideoMediaCodecEncoder.kt @@ -32,6 +32,7 @@ import io.github.thibaultbee.streampack.internal.orientation.ISourceOrientationL import io.github.thibaultbee.streampack.internal.orientation.ISourceOrientationProvider import io.github.thibaultbee.streampack.internal.utils.av.video.DynamicRangeProfile import io.github.thibaultbee.streampack.listeners.OnErrorListener +import io.github.thibaultbee.streampack.logger.Logger import java.util.concurrent.Executors /** @@ -130,10 +131,20 @@ class VideoMediaCodecEncoder( private var eglSurface: EglWindowSurface? = null private var fullFrameRect: FullFrameRect? = null private var textureId = -1 - private val executor = Executors.newSingleThreadExecutor() + // Single thread with minimal priority executor for power savings + private val executor = Executors.newSingleThreadExecutor { r -> + Thread(r).apply { + priority = Thread.MIN_PRIORITY + name = "encoder-power-save-thread" + } + } private var isRunning = false private var surfaceTexture: SurfaceTexture? = null private val stMatrix = FloatArray(16) + + // Power optimization: batch frame processing to reduce wake-ups - strict 24fps cap + private var lastFrameTimeMs = 0L + private val minFrameIntervalMs = 41L // ~24fps max to match video encoding settings private var _inputSurface: Surface? = null val inputSurface: Surface? @@ -244,23 +255,85 @@ class VideoMediaCodecEncoder( } } + // Track how many frames we're dropping + private var totalFramesReceived = 0L + private var totalFramesProcessed = 0L + private var lastLogTime = 0L + + // System time when the first frame was received + private var streamStartTimeMs = 0L + override fun onFrameAvailable(surfaceTexture: SurfaceTexture) { if (!isRunning) { return } - + + // Initialize stream start time if needed + if (streamStartTimeMs == 0L) { + streamStartTimeMs = System.currentTimeMillis() + } + + // Count incoming frames for statistics + totalFramesReceived++ + + // Get system time for frame rate control + val currentTimeMs = System.currentTimeMillis() + val timeSinceLastFrame = currentTimeMs - lastFrameTimeMs + + // CRITICAL: Skip enqueueing to executor if we're falling behind + // This prevents executor queue buildup which is a major cause of latency + if (timeSinceLastFrame < minFrameIntervalMs && lastFrameTimeMs > 0) { + // Skip this frame entirely - don't even queue it + return + } + + // Queue for processing only if we're not backed up executor.execute { synchronized(this) { + // Check running state again after potential queue delay + if (!isRunning) return@synchronized + eglSurface?.let { it.makeCurrent() - surfaceTexture.updateTexImage() + + // Critical: Aggressively flush ALL pending frames to get to latest + // This ensures we stay current even with a burst of frames + var frameCount = 0 + var lastTimestamp: Long + do { + lastTimestamp = surfaceTexture.timestamp + surfaceTexture.updateTexImage() + frameCount++ + } while (frameCount < 20 && // Limit loop iterations for safety + surfaceTexture.timestamp != 0L && + surfaceTexture.timestamp != lastTimestamp) // Stop when no new frames + + // Get latest transform matrix surfaceTexture.getTransformMatrix(stMatrix) - // Use the identity matrix for MVP so our 2x2 FULL_RECTANGLE covers the viewport. + // Draw and send the frame fullFrameRect?.drawFrame(textureId, stMatrix) it.setPresentationTime(surfaceTexture.timestamp) it.swapBuffers() + lastFrameTimeMs = currentTimeMs + totalFramesProcessed++ + + // Release texture image surfaceTexture.releaseTexImage() + + // Log statistics every 5 seconds + if (currentTimeMs - lastLogTime > 5000) { + val streamTimeSeconds = (currentTimeMs - streamStartTimeMs) / 1000.0 + val droppedFrames = totalFramesReceived - totalFramesProcessed + val droppedPercent = if (totalFramesReceived > 0) + (droppedFrames * 100.0 / totalFramesReceived) else 0.0 + + Logger.d(TAG, "Stream stats: Received=${totalFramesReceived}, " + + "Processed=${totalFramesProcessed}, " + + "Dropped=${droppedFrames} (${droppedPercent.toInt()}%), " + + "Avg FPS=${totalFramesProcessed / streamTimeSeconds}") + lastLogTime = currentTimeMs + } } } } @@ -300,5 +373,9 @@ class VideoMediaCodecEncoder( surfaceTexture?.release() surfaceTexture = null } + + companion object { + private const val TAG = "CodecSurface" + } } -} +} \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraController.kt b/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraController.kt index 85405e91a..bbd340f3f 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraController.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraController.kt @@ -20,6 +20,9 @@ import android.content.Context import android.hardware.camera2.* import android.hardware.camera2.CameraDevice.AUDIO_RESTRICTION_NONE import android.hardware.camera2.CameraDevice.AUDIO_RESTRICTION_VIBRATION_SOUND +import android.hardware.camera2.CameraCaptureSession.CaptureCallback +import android.hardware.camera2.TotalCaptureResult +import android.hardware.camera2.CaptureFailure import android.hardware.camera2.params.OutputConfiguration import android.os.Build import android.util.Range @@ -54,23 +57,35 @@ class CameraController( var fpsRangeList = context.getCameraFpsList(cameraId) Logger.i(TAG, "Supported FPS range list: $fpsRangeList") - // Get range that contains FPS - fpsRangeList = - fpsRangeList.filter { it.contains(fps) or it.contains(fps * 1000) } // On Samsung S4 fps range is [4000-30000] instead of [4-30] + // Power optimization - try to use a low FPS range to save power + // First try to find a fixed range at a low FPS (15fps) + val targetLowFps = 15 + val lowFpsFixedRange = fpsRangeList.find { it.lower == it.upper && it.lower == targetLowFps } + + if (lowFpsFixedRange != null) { + Logger.d(TAG, "Found low fixed fps range: $lowFpsFixedRange") + return lowFpsFixedRange + } + + // Try to find a range that includes our target fps + fpsRangeList = fpsRangeList.filter { it.contains(fps) } if (fpsRangeList.isEmpty()) { - throw InvalidParameterException("Failed to find a single FPS range that contains $fps") + // If no range contains our target fps, use the original list + fpsRangeList = context.getCameraFpsList(cameraId) } - - // Get smaller range - var selectedFpsRange = fpsRangeList[0] - fpsRangeList = fpsRangeList.drop(0) - fpsRangeList.forEach { - if ((it.upper - it.lower) < (selectedFpsRange.upper - selectedFpsRange.lower)) { - selectedFpsRange = it - } + + // Look for a range with a lower bound not higher than our target fps + val suitableRanges = fpsRangeList.filter { it.lower <= fps } + if (suitableRanges.isNotEmpty()) { + // Get the range with lower bound closest to our target fps + val selectedRange = suitableRanges.minWith(compareBy { fps - it.lower }) + Logger.d(TAG, "Using range with lower bound close to target fps: $selectedRange") + return selectedRange } - - Logger.d(TAG, "Selected Fps range $selectedFpsRange") + + // Fallback - just get the first range + val selectedFpsRange = fpsRangeList[0] + Logger.d(TAG, "Fallback fps range: $selectedFpsRange") return selectedFpsRange } @@ -110,11 +125,40 @@ class CameraController( } private val captureCallback = object : CameraCaptureSession.CaptureCallback() { + private var frameCount = 0 + private var lastLogTime = System.currentTimeMillis() + + override fun onCaptureCompleted( + session: CameraCaptureSession, + request: CaptureRequest, + result: TotalCaptureResult + ) { + super.onCaptureCompleted(session, request, result) + + // Log frame rate every second to monitor performance + frameCount++ + val currentTime = System.currentTimeMillis() + if (currentTime - lastLogTime >= 1000) { + Logger.d(TAG, "Camera capture framerate: $frameCount fps") + frameCount = 0 + lastLogTime = currentTime + } + } + override fun onCaptureFailed( session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure ) { super.onCaptureFailed(session, request, failure) - Logger.e(TAG, "Capture failed with code ${failure.reason}") + Logger.e(TAG, "Capture failed with code ${failure.reason}") + } + + override fun onCaptureSequenceCompleted( + session: CameraCaptureSession, + sequenceId: Int, + frameNumber: Long + ) { + super.onCaptureSequenceCompleted(session, sequenceId, frameNumber) + Logger.d(TAG, "Capture sequence $sequenceId completed at frame $frameNumber") } } @@ -161,10 +205,32 @@ class CameraController( throw RuntimeException("No target surface") } - return camera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD).apply { - surfaces.forEach { addTarget(it) } - set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange) - threadManager.setRepeatingSingleRequest(captureSession, build(), captureCallback) + // Use PREVIEW template for most camera types + val captureBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW) + + try { + // Add all surfaces + surfaces.forEach { captureBuilder.addTarget(it) } + + // Basic settings - balance power and functionality + captureBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange) + captureBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO) + + // Save power by disabling features that are CPU intensive + captureBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF) + captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) // Auto-focus but continuous video mode uses less CPU than picture mode + captureBuilder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO) // Keep auto white balance for usable image + captureBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_FAST) + captureBuilder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_FAST) + captureBuilder.set(CaptureRequest.HOT_PIXEL_MODE, CaptureRequest.HOT_PIXEL_MODE_FAST) + + // Start the repeating request right away to ensure continuous capture + threadManager.setRepeatingSingleRequest(captureSession, captureBuilder.build(), captureCallback) + + return captureBuilder + } catch (e: Exception) { + Logger.e(TAG, "Error creating camera request session", e) + throw e } } @@ -249,21 +315,43 @@ class CameraController( } fun updateRepeatingSession() { - require(captureSession != null) { "capture session must not be null" } - require(captureRequest != null) { "capture request must not be null" } + try { + if (captureSession == null) { + Logger.e(TAG, "Cannot update repeating session: capture session is null") + return + } + if (captureRequest == null) { + Logger.e(TAG, "Cannot update repeating session: capture request is null") + return + } - threadManager.setRepeatingSingleRequest( - captureSession!!, captureRequest!!.build(), captureCallback - ) + // Build the request and set it as a repeating request to ensure continuous capture + val request = captureRequest!!.build() + threadManager.setRepeatingSingleRequest(captureSession!!, request, captureCallback) + Logger.d(TAG, "Updated repeating request") + } catch (e: Exception) { + Logger.e(TAG, "Error updating repeating session", e) + } } private fun updateBurstSession() { - require(captureSession != null) { "capture session must not be null" } - require(captureRequest != null) { "capture request must not be null" } + try { + if (captureSession == null) { + Logger.e(TAG, "Cannot update burst session: capture session is null") + return + } + if (captureRequest == null) { + Logger.e(TAG, "Cannot update burst session: capture request is null") + return + } - threadManager.captureBurstRequests( - captureSession!!, listOf(captureRequest!!.build()), captureCallback - ) + // Build the request and capture it in burst mode + val request = captureRequest!!.build() + threadManager.captureBurstRequests(captureSession!!, listOf(request), captureCallback) + Logger.d(TAG, "Updated burst request") + } catch (e: Exception) { + Logger.e(TAG, "Error updating burst session", e) + } } fun getSetting(key: CaptureRequest.Key?): T? { diff --git a/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraExecutorManager.kt b/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraExecutorManager.kt index 075d7ab29..5d5a0b934 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraExecutorManager.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraExecutorManager.kt @@ -32,7 +32,13 @@ import java.util.concurrent.Executors * A [ICameraThreadManager] that manages camera API >= 28. */ class CameraExecutorManager : ICameraThreadManager { - private val cameraExecutor = Executors.newSingleThreadExecutor() + // Use a low-priority thread factory to reduce CPU load from camera processing + private val cameraExecutor = Executors.newSingleThreadExecutor { r -> + Thread(r).apply { + priority = Thread.MIN_PRIORITY + name = "camera-low-power-thread" + } + } @RequiresApi(Build.VERSION_CODES.P) @RequiresPermission(Manifest.permission.CAMERA) @@ -60,8 +66,11 @@ class CameraExecutorManager : ICameraThreadManager { outputConfigurations: List, callback: CameraCaptureSession.StateCallback ) { + // Use more efficient session if available on device + val sessionType = SessionConfiguration.SESSION_REGULAR + SessionConfiguration( - SessionConfiguration.SESSION_REGULAR, + sessionType, outputConfigurations, cameraExecutor, callback diff --git a/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraSource.kt b/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraSource.kt index 9d7328c4e..3e8933162 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraSource.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/internal/sources/camera/CameraSource.kt @@ -30,6 +30,7 @@ import io.github.thibaultbee.streampack.internal.utils.extensions.deviceOrientat import io.github.thibaultbee.streampack.internal.utils.extensions.isDevicePortrait import io.github.thibaultbee.streampack.internal.utils.extensions.landscapize import io.github.thibaultbee.streampack.internal.utils.extensions.portraitize +import io.github.thibaultbee.streampack.logger.Logger import io.github.thibaultbee.streampack.utils.CameraSettings import io.github.thibaultbee.streampack.utils.cameraList import io.github.thibaultbee.streampack.utils.defaultCameraId @@ -45,6 +46,9 @@ class CameraSource( ) : IVideoSource { var previewSurface: Surface? = null override var encoderSurface: Surface? = null + + // Set an extremely low-resolution preview size for maximum power saving + var maxPreviewSize: Size = Size(160, 120) // Quarter QQVGA resolution var cameraId: String = context.defaultCameraId get() = cameraController.cameraId ?: field @@ -87,19 +91,66 @@ class CameraSource( @RequiresPermission(Manifest.permission.CAMERA) suspend fun startPreview(cameraId: String = this.cameraId, restartStream: Boolean = false) { - var targets = mutableListOf() - previewSurface?.let { targets.add(it) } - encoderSurface?.let { targets.add(it) } - cameraController.startCamera(cameraId, targets, dynamicRangeProfile.dynamicRange) - - targets = mutableListOf() - previewSurface?.let { targets.add(it) } - if (restartStream) { - encoderSurface?.let { targets.add(it) } + try { + // First, collect all surfaces for camera initialization + var targets = mutableListOf() + val localPreviewSurface = previewSurface + if (localPreviewSurface != null) { + if (localPreviewSurface.isValid) { + targets.add(localPreviewSurface) + Logger.d(TAG, "Adding valid preview surface to camera targets") + } else { + Logger.w(TAG, "Preview surface is invalid, skipping") + } + } else { + Logger.d(TAG, "No preview surface available") + } + + val localEncoderSurface = encoderSurface + if (localEncoderSurface != null) { + if (localEncoderSurface.isValid) { + targets.add(localEncoderSurface) + Logger.d(TAG, "Adding valid encoder surface to camera targets") + } else { + Logger.w(TAG, "Encoder surface is invalid, skipping") + } + } else { + Logger.d(TAG, "No encoder surface available") + } + + if (targets.isEmpty()) { + Logger.e(TAG, "No valid surfaces available for camera preview") + return + } + + // Start the camera with all available surfaces + Logger.i(TAG, "Starting camera $cameraId with ${targets.size} surfaces") + cameraController.startCamera(cameraId, targets, dynamicRangeProfile.dynamicRange) + + // Now create targets for the request session + targets = mutableListOf() + + val surfaceForRequest = previewSurface + if (surfaceForRequest != null && surfaceForRequest.isValid) { + targets.add(surfaceForRequest) + } + + if (restartStream) { + val encoderSurfaceForRequest = encoderSurface + if (encoderSurfaceForRequest != null && encoderSurfaceForRequest.isValid) { + targets.add(encoderSurfaceForRequest) + } + } + + Logger.i(TAG, "Starting request session with ${targets.size} surfaces at $fps fps") + cameraController.startRequestSession(fps, targets) + isPreviewing = true + orientationProvider.cameraId = cameraId + Logger.i(TAG, "Camera preview started successfully") + } catch (e: Exception) { + Logger.e(TAG, "Error starting camera preview", e) + throw e } - cameraController.startRequestSession(fps, targets) - isPreviewing = true - orientationProvider.cameraId = cameraId } fun stopPreview() { @@ -179,4 +230,8 @@ class CameraSource( return Size(max(size.width, size.height), min(size.width, size.height)) } } + + companion object { + private const val TAG = "CameraSource" + } } \ No newline at end of file diff --git a/core/src/main/java/io/github/thibaultbee/streampack/views/PreviewView.kt b/core/src/main/java/io/github/thibaultbee/streampack/views/PreviewView.kt index 1d425b996..a3c82b751 100644 --- a/core/src/main/java/io/github/thibaultbee/streampack/views/PreviewView.kt +++ b/core/src/main/java/io/github/thibaultbee/streampack/views/PreviewView.kt @@ -328,10 +328,11 @@ class PreviewView @JvmOverloads constructor( ): ViewfinderSurfaceRequest { /** * Get the closest available preview size to the view size. + * Using a smaller target size (160x120) to save power. */ val previewSize = getPreviewOutputSize( context.getCameraCharacteristics(camera), - targetViewSize, + Size(160, 120), // Small preview size to save power, but not too small to cause compatibility issues SurfaceHolder::class.java ) diff --git a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/configuration/Configuration.kt b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/configuration/Configuration.kt index afcb4eb7d..60e64e956 100644 --- a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/configuration/Configuration.kt +++ b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/configuration/Configuration.kt @@ -43,17 +43,23 @@ class Configuration(context: Context) { ) { var enable: Boolean = true get() = sharedPref.getBoolean(resources.getString(R.string.video_enable_key), field) + + var powerSavingMode: Boolean = true + get() = sharedPref.getBoolean(resources.getString(R.string.video_power_saving_key), field) + + var disablePreview: Boolean = true + get() = sharedPref.getBoolean(resources.getString(R.string.video_disable_preview_key), field) var encoder: String = MediaFormat.MIMETYPE_VIDEO_AVC get() = sharedPref.getString(resources.getString(R.string.video_encoder_key), field)!! - var fps: Int = 30 + var fps: Int = 15 // Lower framerate to reduce CPU usage get() = sharedPref.getString( resources.getString(R.string.video_fps_key), field.toString() )!!.toInt() - var resolution: Size = Size(1280, 720) + var resolution: Size = Size(640, 480) // VGA resolution - good balance of quality and performance get() { val res = sharedPref.getString( resources.getString(R.string.video_resolution_key), @@ -66,7 +72,7 @@ class Configuration(context: Context) { ) } - var bitrate: Int = 2000 + var bitrate: Int = 250 // Low bitrate (250 kbps) to reduce encoding load get() = sharedPref.getInt(resources.getString(R.string.video_bitrate_key), field) var profile: Int = MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline @@ -99,19 +105,19 @@ class Configuration(context: Context) { var encoder: String = MediaFormat.MIMETYPE_AUDIO_AAC get() = sharedPref.getString(resources.getString(R.string.audio_encoder_key), field)!! - var numberOfChannels: Int = 2 + var numberOfChannels: Int = 1 get() = sharedPref.getString( resources.getString(R.string.audio_number_of_channels_key), field.toString() )!!.toInt() - var bitrate: Int = 128000 + var bitrate: Int = 24000 get() = sharedPref.getString( resources.getString(R.string.audio_bitrate_key), field.toString() )!!.toInt() - var sampleRate: Int = 48000 + var sampleRate: Int = 16000 get() = sharedPref.getString( resources.getString(R.string.audio_sample_rate_key), field.toString() @@ -211,13 +217,13 @@ class Configuration(context: Context) { field )!! - var enableBitrateRegulation: Boolean = false + var enableBitrateRegulation: Boolean = true get() = sharedPref.getBoolean( resources.getString(R.string.server_enable_bitrate_regulation_key), field ) - var videoBitrateRange: Range = Range(300, 5000000) + var videoBitrateRange: Range = Range(100, 500000) get() = Range( sharedPref.getInt( resources.getString(R.string.server_video_min_bitrate_key), diff --git a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewFragment.kt b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewFragment.kt index 4ab7ac19c..85bd8df6c 100644 --- a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewFragment.kt +++ b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewFragment.kt @@ -177,20 +177,28 @@ class PreviewFragment : Fragment() { private fun createStreamer() { viewModel.createStreamer() - // Set camera settings button when camera is started - binding.preview.listener = object : PreviewView.Listener { - override fun onPreviewStarted() { - viewModel.onPreviewStarted() + // Check if preview is disabled in configuration + val configuration = Configuration(requireContext()) + if (!configuration.video.disablePreview) { + // Set camera settings button when camera is started + binding.preview.listener = object : PreviewView.Listener { + override fun onPreviewStarted() { + viewModel.onPreviewStarted() + } + + override fun onZoomRationOnPinchChanged(zoomRatio: Float) { + viewModel.onZoomRationOnPinchChanged() + } } - override fun onZoomRationOnPinchChanged(zoomRatio: Float) { - viewModel.onZoomRationOnPinchChanged() - } + // Wait till streamer exists to set it to the SurfaceView. + viewModel.inflateStreamerView(binding.preview) + binding.preview.visibility = View.VISIBLE + } else { + // Hide the preview view to save CPU resources + binding.preview.visibility = View.GONE } - // Wait till streamer exists to set it to the SurfaceView. - viewModel.inflateStreamerView(binding.preview) - // Wait till streamer exists lifecycle.addObserver(viewModel.streamerLifeCycleObserver) } diff --git a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewViewModel.kt b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewViewModel.kt index a598d3702..1aac8fa8b 100644 --- a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewViewModel.kt +++ b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/ui/main/PreviewViewModel.kt @@ -214,12 +214,20 @@ class PreviewViewModel(private val streamerManager: StreamerManager) : Observabl private fun notifyCameraChanged() { streamerManager.cameraSettings?.let { - // Set optical stabilization first - // Do not set both video and optical stabilization at the same time - if (it.stabilization.availableOptical) { - it.stabilization.enableOptical = true - } else { - it.stabilization.enableVideo = true + // Power saving settings + it.stabilization.enableOptical = false + it.stabilization.enableVideo = false + + // Set low-resolution preview to save power - removed as cameraSource is not directly accessible + // We'll rely on the PreviewView optimization instead + + // Use auto focus instead of continuous to save power + try { + if (it.focus.availableAutoModes.contains(CaptureResult.CONTROL_AF_MODE_AUTO)) { + it.focus.autoMode = CaptureResult.CONTROL_AF_MODE_AUTO + } + } catch (e: Exception) { + Log.w(TAG, "Failed to set focus mode: ${e.message}") } isAutoWhiteBalanceAvailable.postValue(it.whiteBalance.availableAutoModes.size > 1) diff --git a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/utils/StreamerManager.kt b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/utils/StreamerManager.kt index 10ab35419..f6646c4b0 100644 --- a/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/utils/StreamerManager.kt +++ b/demos/camera/src/main/java/io/github/thibaultbee/streampack/app/utils/StreamerManager.kt @@ -166,6 +166,9 @@ class StreamerManager( null } } + + // Expose the private streamer property + fun getStreamer(): IStreamer? = streamer var isMuted: Boolean get() = streamer?.settings?.audio?.isMuted ?: true diff --git a/demos/camera/src/main/res/values/array.xml b/demos/camera/src/main/res/values/array.xml index 06f1b5665..4b776feab 100644 --- a/demos/camera/src/main/res/values/array.xml +++ b/demos/camera/src/main/res/values/array.xml @@ -1,6 +1,7 @@ + 15 24 25 30 diff --git a/demos/camera/src/main/res/values/strings.xml b/demos/camera/src/main/res/values/strings.xml index dcc0e97fb..938224bc7 100644 --- a/demos/camera/src/main/res/values/strings.xml +++ b/demos/camera/src/main/res/values/strings.xml @@ -46,7 +46,7 @@ ts_muxer_provider_key StreamPack Inc Provider name - 1280x720 + 640x480 audio_enable_key Enable audio audio_settings_key @@ -101,6 +101,10 @@ Video minimum bitrate (kb/s) video_enable_key Enable video + video_power_saving_key + Power saving mode + video_disable_preview_key + Disable preview (saves CPU) video_settings_key Warning Set white balance diff --git a/demos/camera/src/main/res/xml/root_preferences.xml b/demos/camera/src/main/res/xml/root_preferences.xml index 96262a56e..b438fd166 100644 --- a/demos/camera/src/main/res/xml/root_preferences.xml +++ b/demos/camera/src/main/res/xml/root_preferences.xml @@ -6,6 +6,16 @@ android:defaultValue="true" app:key="@string/video_enable_key" app:title="@string/video_enable" /> + + + + @@ -171,13 +181,13 @@ app:useSimpleSummaryProvider="true" /> 1000) { // Max once per second + Logger.d(TAG, "RTMP stats: backpressured=${isBackpressured}, " + + "timeSinceLastVideo=${now - lastVideoTimeMs}ms") + lastLogTime = now + } + } + + // Critical latency management: if we're severely backlogged, + // selectively drop packets to catch up + if (isBackpressured) { + // Skip some video frames when backpressured + // We can't easily detect keyframes in this class, so we'll use + // a simple sampling approach - keep 1 out of every 3 packets + if (packetCounter % 3 != 0) { + return + } + } + + // Check if we're falling behind based on video packet timing + if (lastVideoTimeMs > 0) { + val elapsed = now - lastVideoTimeMs + // If time between video packets is very large, we're falling behind + isBackpressured = elapsed > MAX_BACKLOG_MS + } + lastVideoTimeMs = now + } + try { socket.write(packet.buffer) } catch (e: Exception) { diff --git a/stream.sh b/stream.sh new file mode 100755 index 000000000..b10d707d0 --- /dev/null +++ b/stream.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +ffplay -fflags nobuffer -flags low_delay -framedrop -sync ext -listen 1 -i rtmp://0.0.0.0:1935/s/streamKey