Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
135 changes: 135 additions & 0 deletions Extras/stereo_script.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
#!/usr/bin/swift

import Cocoa

// ffmpeg -r 1 -i trim.mov -r 1 output_%04d.bmp

let contents = try? FileManager.default.contentsOfDirectory(atPath: ".")

let images = contents?.filter({ $0.contains(".bmp") }).sorted()

guard let images = images, !images.isEmpty else {
fatalError("Empty list")
}

// Assuming 60 fps constant rate
let timeBetweenFrames: Double = 1/60.0 // seconds
var currentTimestamp: Double = 0 // seconds

var leftCount = 0
var rightCount = 0
var droppedCount = 0

var currentIndex = 0

var leftFramesInfo = [(String, Double)]()
var rightFramesInfo = [(String, Double)]()

for imagePath in images {

currentTimestamp = Double(currentIndex) * timeBetweenFrames
currentIndex += 1

guard let image = NSImage(contentsOfFile: imagePath) else {
print("\(imagePath) dropped")
droppedCount += 1
continue
}

var imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)

// These points and colors are specific for this video. They will look different
// for frames that should be on the right and frames that should be on the left.
guard let cgImage = image.cgImage(forProposedRect: &imageRect, context: nil, hints: nil),
let colors = cgImage.colors(at: [CGPoint(x: 1660, y: 1160), CGPoint(x: 1530, y: 1070), CGPoint(x: 1630, y: 1160), CGPoint(x: 1500, y: 1070)])
else {
print("\(imagePath) dropped")
droppedCount += 1
continue
}

let isLeft = colors[0].redComponent > 0.9 || colors[1].redComponent > 0.9
let isRight = colors[2].redComponent > 0.9 || colors[3].redComponent > 0.9

if isRight && isLeft {
print("\(imagePath) ambiguous")
droppedCount += 1
continue
}

if isRight {
print("\(imagePath) right")
let newPath = String(format: "right_%04d.bmp", rightCount)
rightFramesInfo.append((newPath, currentTimestamp))
rightCount += 1
try? FileManager.default.moveItem(atPath: imagePath, toPath: newPath)
} else if isLeft {
print("\(imagePath) left")
let newPath = String(format: "left_%04d.bmp", leftCount)
leftFramesInfo.append((newPath, currentTimestamp))
leftCount += 1
try? FileManager.default.moveItem(atPath: imagePath, toPath: newPath)
} else {
print("\(imagePath) dropped")
droppedCount += 1
}
}

print("Left: \(leftCount) Right: \(rightCount) Dropped: \(droppedCount)")

var leftConcatString = "ffconcat version 1.0"

// We're dropping the very last frame
for i in 0 ..< leftFramesInfo.count - 1 {
let (fileName, timestamp) = leftFramesInfo[i]
let (_, nextTimestamp) = leftFramesInfo[i + 1]
leftConcatString += "\nfile \(fileName)\nduration \(nextTimestamp - timestamp)"
}

try? leftConcatString.write(toFile: "left.ffconcat", atomically: true, encoding: .utf8)

var rightConcatString = "ffconcat version 1.0"

// We're dropping the very last frame
for i in 0 ..< rightFramesInfo.count - 1 {
let (fileName, timestamp) = rightFramesInfo[i]
let (_, nextTimestamp) = rightFramesInfo[i + 1]
rightConcatString += "\nfile \(fileName)\nduration \(nextTimestamp - timestamp)"
}

try? rightConcatString.write(toFile: "right.ffconcat", atomically: true, encoding: .utf8)

// ffmpeg -i left.ffconcat -r 30 -c:v libx264 -preset slow -crf 23 -vsync 2 left.mp4

// ffmpeg -i right.ffconcat -r 30 -c:v libx264 -preset slow -crf 23 -vsync 2 right.mp4

// After creating the left/right video, add the 3D metadata with:
// ffmpeg -i stereo.mp4 -map 0 -c copy -metadata:s:v stereo_mode=left_right out.mkv

extension CGImage {
func colors(at: [CGPoint]) -> [NSColor]? {
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bytesPerPixel = 4
let bytesPerRow = bytesPerPixel * width
let bitsPerComponent = 8
let bitmapInfo: UInt32 = CGImageAlphaInfo.premultipliedLast.rawValue | CGBitmapInfo.byteOrder32Big.rawValue

guard let context = CGContext(data: nil, width: width, height: height, bitsPerComponent: bitsPerComponent, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo),
let ptr = context.data?.assumingMemoryBound(to: UInt8.self) else {
return nil
}

context.draw(self, in: CGRect(x: 0, y: 0, width: width, height: height))

return at.map { p in
let i = bytesPerRow * Int(p.y) + bytesPerPixel * Int(p.x)

let a = CGFloat(ptr[i + 3]) / 255.0
let r = (CGFloat(ptr[i]) / a) / 255.0
let g = (CGFloat(ptr[i + 1]) / a) / 255.0
let b = (CGFloat(ptr[i + 2]) / a) / 255.0

return NSColor(red: r, green: g, blue: b, alpha: a)
}
}
}
47 changes: 47 additions & 0 deletions RealityMixer/Capture/Misc/CameraPoseSender.swift
Original file line number Diff line number Diff line change
Expand Up @@ -250,3 +250,50 @@ final class TemporaryCalibrationStorage {
self.calibrationPose = pose
}
}

final class ParallaxCamera {
private weak var client: TCPClient?

private let rightEyePosition: Vector3
private let leftEyePosition: Vector3
private let rotation: Quaternion

private var count = 0

init?(client: TCPClient) {
guard let pose = TemporaryCalibrationStorage.shared.calibrationPose else {
return nil
}

let ipd = 0.06 // meters

let rotationMatrix = SCNMatrix4(simd_double4x4(pose.rotation))
let rightVector = Vector3(
Double(rotationMatrix.m11), Double(rotationMatrix.m12), Double(rotationMatrix.m13)
).normalized

self.rightEyePosition = pose.position + (0.5 * ipd) * rightVector
self.leftEyePosition = pose.position - (0.5 * ipd) * rightVector
self.rotation = pose.rotation
self.client = client
}

private func sendCameraUpdate(pose: Pose) {
_ = client?.send(data: CameraPositionPayload(position: pose.position).data)
_ = client?.send(data: CameraRotationPayload(rotation: pose.rotation).data)
}

func update(elapsedTime: TimeInterval) {
if count == 0 {
sendCameraUpdate(
pose: .init(position: rightEyePosition, rotation: rotation)
)
} else {
sendCameraUpdate(
pose: .init(position: leftEyePosition, rotation: rotation)
)
}

count = (count + 1) % 2
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ final class MixedRealityViewController: UIViewController {
true
}

private let cameraPoseSender: CameraPoseSender?
// private let cameraPoseSender: CameraPoseSender?
private let parallaxCamera: ParallaxCamera?

init(
client: TCPClient,
Expand All @@ -52,7 +53,8 @@ final class MixedRealityViewController: UIViewController {
self.configuration = configuration
self.chromaConfiguration = chromaConfiguration
self.factory = ARConfigurationFactory(mrConfiguration: configuration)
self.cameraPoseSender = configuration.enableMovingCamera ? CameraPoseSender(client: client):nil
self.parallaxCamera = ParallaxCamera(client: client)
// self.cameraPoseSender = configuration.enableMovingCamera ? CameraPoseSender(client: client):nil
super.init(nibName: String(describing: type(of: self)), bundle: Bundle(for: type(of: self)))
}

Expand Down Expand Up @@ -279,6 +281,8 @@ final class MixedRealityViewController: UIViewController {
}

@objc func update(with sender: CADisplayLink) {
let interval = sender.targetTimestamp - sender.timestamp
parallaxCamera?.update(elapsedTime: interval)
receiveData()
oculusMRC?.update()
}
Expand Down Expand Up @@ -347,7 +351,7 @@ extension MixedRealityViewController: ARSessionDelegate {
configureForeground(with: frame)
first = false
} else {
cameraPoseSender?.didUpdate(frame: frame)
// cameraPoseSender?.didUpdate(frame: frame)
}

updateMiddle(with: frame.capturedImage)
Expand Down