|
| 1 | +package com.mobilenext.devicekit |
| 2 | + |
| 3 | +import android.hardware.display.VirtualDisplay |
| 4 | +import android.media.MediaCodec |
| 5 | +import android.media.MediaCodecInfo |
| 6 | +import android.media.MediaFormat |
| 7 | +import android.os.IBinder |
| 8 | +import android.util.Log |
| 9 | +import android.view.Display |
| 10 | +import android.view.Surface |
| 11 | +import java.io.FileDescriptor |
| 12 | +import java.io.FileOutputStream |
| 13 | +import java.io.IOException |
| 14 | +import java.nio.channels.Channels |
| 15 | +import java.util.concurrent.CountDownLatch |
| 16 | +import kotlin.system.exitProcess |
| 17 | + |
| 18 | +class AvcServer(private val bitrate: Int, private val scale: Float, private val fps: Int) { |
| 19 | + companion object { |
| 20 | + private const val TAG = "AvcServer" |
| 21 | + private const val DEFAULT_BITRATE = 10_000_000 // 10 Mbps (Google's default) |
| 22 | + private const val DEFAULT_SCALE = 1.0f |
| 23 | + private const val DEFAULT_FPS = 30 |
| 24 | + private const val MIN_FPS = 1 |
| 25 | + private const val MAX_FPS = 60 |
| 26 | + private const val I_FRAME_INTERVAL = 1 // 1 second |
| 27 | + |
| 28 | + @JvmStatic |
| 29 | + fun main(args: Array<String>) { |
| 30 | + try { |
| 31 | + val (bitrate, scale, fps) = parseArguments(args) |
| 32 | + val server = AvcServer(bitrate, scale, fps) |
| 33 | + server.start() |
| 34 | + } catch (e: Exception) { |
| 35 | + Log.e(TAG, "Failed to start AVC stream", e) |
| 36 | + System.err.println("Error: ${e.message}") |
| 37 | + exitProcess(1) |
| 38 | + } |
| 39 | + } |
| 40 | + |
| 41 | + private fun parseArguments(args: Array<String>): Triple<Int, Float, Int> { |
| 42 | + var bitrate = DEFAULT_BITRATE |
| 43 | + var scale = DEFAULT_SCALE |
| 44 | + var fps = DEFAULT_FPS |
| 45 | + |
| 46 | + var i = 0 |
| 47 | + while (i < args.size) { |
| 48 | + when (args[i]) { |
| 49 | + "--bitrate" -> { |
| 50 | + if (i + 1 < args.size) { |
| 51 | + bitrate = args[i + 1].toIntOrNull()?.coerceAtLeast(100_000) ?: DEFAULT_BITRATE |
| 52 | + i++ |
| 53 | + } |
| 54 | + } |
| 55 | + "--scale" -> { |
| 56 | + if (i + 1 < args.size) { |
| 57 | + scale = args[i + 1].toFloatOrNull()?.coerceIn(0.1f, 2.0f) ?: DEFAULT_SCALE |
| 58 | + i++ |
| 59 | + } |
| 60 | + } |
| 61 | + "--fps" -> { |
| 62 | + if (i + 1 < args.size) { |
| 63 | + val parsedFps = args[i + 1].toIntOrNull() |
| 64 | + if (parsedFps == null) { |
| 65 | + throw IllegalArgumentException("Invalid fps value: ${args[i + 1]}. Must be an integer between $MIN_FPS and $MAX_FPS") |
| 66 | + } |
| 67 | + if (parsedFps < MIN_FPS || parsedFps > MAX_FPS) { |
| 68 | + throw IllegalArgumentException("fps value out of range: $parsedFps. Must be between $MIN_FPS and $MAX_FPS") |
| 69 | + } |
| 70 | + fps = parsedFps |
| 71 | + i++ |
| 72 | + } |
| 73 | + } |
| 74 | + } |
| 75 | + i++ |
| 76 | + } |
| 77 | + |
| 78 | + return Triple(bitrate, scale, fps) |
| 79 | + } |
| 80 | + } |
| 81 | + |
| 82 | + private val shutdownLatch = CountDownLatch(1) |
| 83 | + |
| 84 | + private fun start() { |
| 85 | + try { |
| 86 | + // Register shutdown hook for graceful termination |
| 87 | + Runtime.getRuntime().addShutdownHook(Thread { |
| 88 | + Log.d(TAG, "Shutdown hook triggered") |
| 89 | + shutdown() |
| 90 | + }) |
| 91 | + |
| 92 | + // Start H.264 streaming |
| 93 | + streamAvcFrames() |
| 94 | + |
| 95 | + } catch (e: Exception) { |
| 96 | + Log.e(TAG, "Error in AVC stream", e) |
| 97 | + System.err.println("Error: ${e.message}") |
| 98 | + exitProcess(1) |
| 99 | + } |
| 100 | + } |
| 101 | + |
| 102 | + private fun shutdown() { |
| 103 | + shutdownLatch.countDown() |
| 104 | + } |
| 105 | + |
| 106 | + private fun streamAvcFrames() { |
| 107 | + val displayInfo = DisplayUtils.getDisplayInfo() |
| 108 | + val scaledWidth = (displayInfo.width * scale).toInt() |
| 109 | + val scaledHeight = (displayInfo.height * scale).toInt() |
| 110 | + |
| 111 | + Log.d(TAG, "Starting AVC stream: ${displayInfo.width}x${displayInfo.height} -> ${scaledWidth}x${scaledHeight}") |
| 112 | + Log.d(TAG, "Configuration: bitrate=$bitrate, fps=$fps, I-frame interval=${I_FRAME_INTERVAL}s") |
| 113 | + Log.d(TAG, "Scaled dimensions: width=$scaledWidth, height=$scaledHeight") |
| 114 | + |
| 115 | + // Validate dimensions |
| 116 | + if (scaledWidth <= 0 || scaledHeight <= 0) { |
| 117 | + throw IllegalArgumentException("Invalid dimensions: ${scaledWidth}x${scaledHeight}") |
| 118 | + } |
| 119 | + if (scaledWidth % 2 != 0 || scaledHeight % 2 != 0) { |
| 120 | + Log.w(TAG, "Warning: Dimensions not divisible by 2, may cause issues: ${scaledWidth}x${scaledHeight}") |
| 121 | + } |
| 122 | + |
| 123 | + // Check codec capabilities before attempting to configure |
| 124 | + val codec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC) |
| 125 | + val codecInfo = codec.codecInfo |
| 126 | + val capabilities = codecInfo.getCapabilitiesForType(MediaFormat.MIMETYPE_VIDEO_AVC) |
| 127 | + val videoCapabilities = capabilities.videoCapabilities |
| 128 | + |
| 129 | + Log.d(TAG, "Codec capabilities:") |
| 130 | + Log.d(TAG, " Supported widths: ${videoCapabilities.supportedWidths}") |
| 131 | + Log.d(TAG, " Supported heights: ${videoCapabilities.supportedHeights}") |
| 132 | + Log.d(TAG, " Width alignment: ${videoCapabilities.widthAlignment}") |
| 133 | + Log.d(TAG, " Height alignment: ${videoCapabilities.heightAlignment}") |
| 134 | + |
| 135 | + // Check if dimensions are supported |
| 136 | + if (!videoCapabilities.isSizeSupported(scaledWidth, scaledHeight)) { |
| 137 | + val maxWidth = videoCapabilities.supportedWidths.upper |
| 138 | + val maxHeight = videoCapabilities.supportedHeights.upper |
| 139 | + Log.e(TAG, "Dimensions ${scaledWidth}x${scaledHeight} not supported by codec") |
| 140 | + Log.e(TAG, "Maximum supported: ${maxWidth}x${maxHeight}") |
| 141 | + codec.release() |
| 142 | + throw IllegalArgumentException( |
| 143 | + "Video dimensions ${scaledWidth}x${scaledHeight} exceed codec capabilities. " + |
| 144 | + "Maximum supported: ${maxWidth}x${maxHeight}. " + |
| 145 | + "Try using --scale parameter to reduce resolution (e.g., --scale 0.5)" |
| 146 | + ) |
| 147 | + } |
| 148 | + |
| 149 | + // Configure MediaCodec for H.264 encoding (Google's configuration) |
| 150 | + val format = MediaFormat.createVideoFormat( |
| 151 | + MediaFormat.MIMETYPE_VIDEO_AVC, |
| 152 | + scaledWidth, |
| 153 | + scaledHeight |
| 154 | + ).apply { |
| 155 | + setInteger(MediaFormat.KEY_BIT_RATE, bitrate) |
| 156 | + setInteger(MediaFormat.KEY_FRAME_RATE, fps) |
| 157 | + setInteger(MediaFormat.KEY_CAPTURE_RATE, fps) // Set capture rate to match frame rate |
| 158 | + setFloat(MediaFormat.KEY_OPERATING_RATE, fps.toFloat()) // Set operating rate |
| 159 | + setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL) |
| 160 | + setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface) |
| 161 | + // Use High profile for better VUI support |
| 162 | + setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileHigh) |
| 163 | + // Low latency settings |
| 164 | + setInteger(MediaFormat.KEY_LATENCY, 0) // Request lowest latency |
| 165 | + setInteger(MediaFormat.KEY_PRIORITY, 0) // Realtime priority |
| 166 | + } |
| 167 | + |
| 168 | + Log.d(TAG, "MediaFormat created: $format") |
| 169 | + Log.d(TAG, "Codec created, attempting to configure...") |
| 170 | + |
| 171 | + try { |
| 172 | + codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE) |
| 173 | + Log.d(TAG, "Codec configured successfully") |
| 174 | + |
| 175 | + // Log the actual output format to see what the codec set |
| 176 | + val outputFormat = codec.outputFormat |
| 177 | + Log.d(TAG, "Codec output format: $outputFormat") |
| 178 | + val actualFrameRate = outputFormat.getInteger(MediaFormat.KEY_FRAME_RATE, -1) |
| 179 | + Log.d(TAG, "Actual frame rate in output: $actualFrameRate") |
| 180 | + } catch (e: Exception) { |
| 181 | + Log.e(TAG, "Failed to configure codec with format: $format", e) |
| 182 | + codec.release() |
| 183 | + throw e |
| 184 | + } |
| 185 | + |
| 186 | + // Get input surface from codec |
| 187 | + val inputSurface = codec.createInputSurface() |
| 188 | + |
| 189 | + // Create virtual display to render to codec's input surface |
| 190 | + val virtualDisplay = DisplayUtils.createVirtualDisplay( |
| 191 | + "avc.screen.capture", |
| 192 | + scaledWidth, |
| 193 | + scaledHeight, |
| 194 | + displayInfo.dpi, |
| 195 | + inputSurface |
| 196 | + ) |
| 197 | + |
| 198 | + if (virtualDisplay == null) { |
| 199 | + System.err.println("Error: Failed to create virtual display") |
| 200 | + codec.release() |
| 201 | + exitProcess(1) |
| 202 | + } |
| 203 | + |
| 204 | + // Start codec |
| 205 | + codec.start() |
| 206 | + Log.d(TAG, "AVC encoder started") |
| 207 | + |
| 208 | + val bufferInfo = MediaCodec.BufferInfo() |
| 209 | + val timeout = 10000L // 10ms timeout for lower latency |
| 210 | + |
| 211 | + // Get FileChannel for stdout to write directly from ByteBuffer (zero-copy) |
| 212 | + val stdoutChannel = FileOutputStream(FileDescriptor.out).channel |
| 213 | + |
| 214 | + var frameCount = 0 |
| 215 | + var lastPts = 0L |
| 216 | + var firstPts = 0L |
| 217 | + |
| 218 | + try { |
| 219 | + // Encoding loop - matches Google's libscreen-sharing-agent.so behavior |
| 220 | + while (!Thread.currentThread().isInterrupted) { |
| 221 | + // Check if shutdown requested |
| 222 | + if (shutdownLatch.count == 0L) { |
| 223 | + break |
| 224 | + } |
| 225 | + |
| 226 | + // Dequeue encoded output buffer |
| 227 | + val outputBufferIndex = codec.dequeueOutputBuffer(bufferInfo, timeout) |
| 228 | + |
| 229 | + when { |
| 230 | + outputBufferIndex >= 0 -> { |
| 231 | + val outputBuffer = codec.getOutputBuffer(outputBufferIndex) |
| 232 | + if (outputBuffer != null && bufferInfo.size > 0) { |
| 233 | + // Write encoded H.264 data directly from ByteBuffer to stdout |
| 234 | + // This is ZERO-COPY - ByteBuffer stays in native memory |
| 235 | + // Blocking write provides backpressure (same as Google's SocketWriter) |
| 236 | + outputBuffer.position(bufferInfo.offset) |
| 237 | + outputBuffer.limit(bufferInfo.offset + bufferInfo.size) |
| 238 | + |
| 239 | + // FileChannel.write() from DirectByteBuffer = zero-copy via DMA |
| 240 | + try { |
| 241 | + while (outputBuffer.hasRemaining()) { |
| 242 | + stdoutChannel.write(outputBuffer) |
| 243 | + } |
| 244 | + } catch (e: IOException) { |
| 245 | + // Pipe broken - client disconnected |
| 246 | + Log.d(TAG, "Output pipe broken, shutting down") |
| 247 | + shutdown() |
| 248 | + break |
| 249 | + } |
| 250 | + |
| 251 | + // Log frame info |
| 252 | + val frameType = when { |
| 253 | + (bufferInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 -> "config" |
| 254 | + (bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0 -> "keyframe" |
| 255 | + else -> "frame" |
| 256 | + } |
| 257 | + |
| 258 | + // Track presentation timestamps to calculate actual frame rate |
| 259 | + if (frameType == "config") { |
| 260 | + // Parse first few bytes to check for VUI |
| 261 | + outputBuffer.position(bufferInfo.offset) |
| 262 | + val firstBytes = ByteArray(minOf(20, bufferInfo.size)) |
| 263 | + outputBuffer.get(firstBytes) |
| 264 | + } else { |
| 265 | + if (frameCount == 0) { |
| 266 | + firstPts = bufferInfo.presentationTimeUs |
| 267 | + } |
| 268 | + |
| 269 | + if (frameCount > 0 && frameCount % 60 == 0) { |
| 270 | + val deltaPts = bufferInfo.presentationTimeUs - lastPts |
| 271 | + val totalTime = (bufferInfo.presentationTimeUs - firstPts) / 1_000_000.0 |
| 272 | + val avgFps = frameCount / totalTime |
| 273 | + // Log.d(TAG, "Frame $frameCount: pts=${bufferInfo.presentationTimeUs}µs, delta=${deltaPts}µs, avg_fps=%.2f".format(avgFps)) |
| 274 | + } |
| 275 | + |
| 276 | + lastPts = bufferInfo.presentationTimeUs |
| 277 | + frameCount++ |
| 278 | + } |
| 279 | + |
| 280 | + // Log.v(TAG, "AVC $frameType: ${bufferInfo.size} bytes") |
| 281 | + } |
| 282 | + |
| 283 | + // Release buffer back to codec (enables backpressure when slow) |
| 284 | + codec.releaseOutputBuffer(outputBufferIndex, false) |
| 285 | + } |
| 286 | + outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> { |
| 287 | + val newFormat = codec.outputFormat |
| 288 | + Log.d(TAG, "Output format changed: $newFormat") |
| 289 | + } |
| 290 | + outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER -> { |
| 291 | + // No buffer available, continue (normal) |
| 292 | + } |
| 293 | + else -> { |
| 294 | + Log.w(TAG, "Unexpected output buffer index: $outputBufferIndex") |
| 295 | + } |
| 296 | + } |
| 297 | + } |
| 298 | + } finally { |
| 299 | + Log.d(TAG, "Stopping AVC encoder") |
| 300 | + stdoutChannel.close() |
| 301 | + codec.stop() |
| 302 | + codec.release() |
| 303 | + virtualDisplay.release() |
| 304 | + } |
| 305 | + } |
| 306 | +} |
0 commit comments