1616package com .projecttango .experiments .augmentedrealitysample ;
1717
1818import java .util .ArrayList ;
19- import java .util .concurrent .atomic .AtomicBoolean ;
2019
2120import android .app .Activity ;
2221import android .content .Intent ;
2322import android .os .Bundle ;
23+ import android .util .Log ;
24+ import android .view .MotionEvent ;
25+ import android .view .View ;
2426import android .widget .Toast ;
2527
2628import com .google .atap .tangoservice .Tango ;
3234import com .google .atap .tangoservice .TangoPoseData ;
3335import com .google .atap .tangoservice .TangoXyzIjData ;
3436import com .projecttango .rajawali .ar .TangoRajawaliView ;
37+ import com .projecttango .tangosupport .TangoSupport ;
3538
3639/**
3740 * An example showing how to build a very simple augmented reality application in Java.
3841 * It uses Rajawali to do the rendering through the utility classes
3942 * <code>TangoRajawaliRenderer</code> and <code>TangoRajawaliView</code> from TangoUtils.
43+ * It also uses the TangoSupportLibrary to do plane fitting using the PointCloud data. Whenever the
44+ * user clicks on the camera display, plane detection will be done on the surface closest to the
45+ * click location and a 3D object will be placed in the scene anchored in that location.
4046 * <p/>
4147 * TangoRajawaliView is used in the same way as the TangoCamaraPreview: we first need initialize the
4248 * TangoRajawaliView class with the activity's context and connect to the camera we want by using
4955 * The implementation of the 3D world is done by subclassing the Renderer, just like any other
5056 * Rajawali application.
5157 * <p/>
52- * Note that it is important to include the KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION configuration parameter in
53- * order to achieve best results synchronizing the Rajawali virtual world with the RGB camera.
58+ * Note that it is important to include the KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION configuration
59+ * parameter in order to achieve best results synchronizing the Rajawali virtual world with
60+ * the RGB camera.
5461 */
55- public class AugmentedRealityActivity extends Activity {
62+ public class AugmentedRealityActivity extends Activity implements View .OnTouchListener {
63+ private static final String TAG = "AugmentedRealityActiv" ;
5664 private TangoRajawaliView mGLView ;
5765 private AugmentedRealityRenderer mRenderer ;
66+ private PointCloudManager mPointCloudManager ;
5867 private Tango mTango ;
5968 private boolean mIsConnected ;
6069 private boolean mIsPermissionGranted ;
@@ -65,6 +74,7 @@ protected void onCreate(Bundle savedInstanceState) {
6574 mGLView = new TangoRajawaliView (this );
6675 mRenderer = new AugmentedRealityRenderer (this );
6776 mGLView .setSurfaceRenderer (mRenderer );
77+ mGLView .setOnTouchListener (this );
6878 mTango = new Tango (this );
6979 startActivityForResult (
7080 Tango .getRequestPermissionIntent (Tango .PERMISSIONTYPE_MOTION_TRACKING ),
@@ -91,17 +101,17 @@ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
91101 // Augmented reality view and renderer
92102 private void startAugmentedreality () {
93103 if (!mIsConnected ) {
104+ mIsConnected = true ;
94105 // Connect to color camera
95- mGLView .connectToTangoCamera (mTango ,
96- TangoCameraIntrinsics .TANGO_CAMERA_COLOR );
106+ mGLView .connectToTangoCamera (mTango , TangoCameraIntrinsics .TANGO_CAMERA_COLOR );
97107
98108 // Use default configuration for Tango Service, plus low latency IMU integration.
99109 TangoConfig config = mTango .getConfig (TangoConfig .CONFIG_TYPE_DEFAULT );
100110 // NOTE: low latency integration is necessary to achieve a precise alignment of
101111 // virtual objects with the RBG image and produce a good AR effect.
102112 config .putBoolean (TangoConfig .KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION , true );
113+ config .putBoolean (TangoConfig .KEY_BOOLEAN_DEPTH , true );
103114 mTango .connect (config );
104- mIsConnected = true ;
105115
106116 // No need to add any coordinate frame pairs since we are not using
107117 // pose data. So just initialize.
@@ -123,17 +133,55 @@ public void onFrameAvailable(int cameraId) {
123133
124134 @ Override
125135 public void onXyzIjAvailable (TangoXyzIjData xyzIj ) {
126- // We are not using OnPoseAvailable for this app
136+ // Get the device pose at the time the point cloud was acquired
137+ TangoCoordinateFramePair framePair = new TangoCoordinateFramePair (
138+ TangoPoseData .COORDINATE_FRAME_START_OF_SERVICE ,
139+ TangoPoseData .COORDINATE_FRAME_DEVICE );
140+ TangoPoseData cloudPose = mTango .getPoseAtTime (xyzIj .timestamp , framePair );
141+
142+ // Save the cloud and point data for later use
143+ mPointCloudManager .updateXyzIjData (xyzIj , cloudPose );
127144 }
128145
129146 @ Override
130147 public void onTangoEvent (TangoEvent event ) {
131148 // We are not using OnPoseAvailable for this app
132149 }
133150 });
151+
152+ // Get extrinsics from device for use in transforms
153+ // This needs to be done after connecting Tango and listeners
154+ setupExtrinsics ();
155+
156+ // Set-up point cloud plane fitting library helper class
157+ mPointCloudManager = new PointCloudManager (mTango .getCameraIntrinsics (
158+ TangoCameraIntrinsics .TANGO_CAMERA_COLOR ));
159+
134160 }
135161 }
136162
163+ /**
164+ * Calculates and stores the fixed transformations between the device and the various sensors
165+ * to be used later for transformations between frames.
166+ */
167+ private void setupExtrinsics () {
168+ // Create Camera to IMU Transform
169+ TangoCoordinateFramePair framePair = new TangoCoordinateFramePair ();
170+ framePair .baseFrame = TangoPoseData .COORDINATE_FRAME_IMU ;
171+ framePair .targetFrame = TangoPoseData .COORDINATE_FRAME_CAMERA_COLOR ;
172+ TangoPoseData imuTrgbPose = mTango .getPoseAtTime (0.0 , framePair );
173+
174+ // Create Device to IMU Transform
175+ framePair .targetFrame = TangoPoseData .COORDINATE_FRAME_DEVICE ;
176+ TangoPoseData imuTdevicePose = mTango .getPoseAtTime (0.0 , framePair );
177+
178+ // Create Depth camera to IMU Transform
179+ framePair .targetFrame = TangoPoseData .COORDINATE_FRAME_CAMERA_DEPTH ;
180+ TangoPoseData imuTdepthPose = mTango .getPoseAtTime (0.0 , framePair );
181+
182+ mRenderer .setupExtrinsics (imuTdevicePose , imuTrgbPose , imuTdepthPose );
183+ }
184+
137185
138186 @ Override
139187 protected void onPause () {
@@ -152,4 +200,38 @@ protected void onResume() {
152200 startAugmentedreality ();
153201 }
154202 }
203+
204+ @ Override
205+ public boolean onTouch (View view , MotionEvent motionEvent ) {
206+ if (motionEvent .getAction () == MotionEvent .ACTION_UP ) {
207+ // Calculate click location in u,v (0;1) coordinates
208+ float u = motionEvent .getX () / view .getWidth ();
209+ float v = motionEvent .getY () / view .getHeight ();
210+
211+ try {
212+ doFitPlane (u , v );
213+ } catch (Throwable t ) {
214+ Log .e (TAG , "Exception measuring nomral" , t );
215+ }
216+ }
217+ return true ;
218+ }
219+
220+ /**
221+ * Use the TangoSupport library with point cloud data to calculate the plane of
222+ * the world feature pointed at the location the camera is looking at and update the
223+ * renderer to show a 3D object in that location.
224+ */
225+ private void doFitPlane (float u , float v ) {
226+ // Get the current device pose
227+ TangoCoordinateFramePair framePair = new TangoCoordinateFramePair (
228+ TangoPoseData .COORDINATE_FRAME_START_OF_SERVICE ,
229+ TangoPoseData .COORDINATE_FRAME_DEVICE );
230+ TangoPoseData devicePose = mTango .getPoseAtTime (0.0 , framePair );
231+
232+ // Perform plane fitting with the latest available point cloud data
233+ TangoSupport .IntersectionPointPlaneModelPair planeModel =
234+ mPointCloudManager .fitPlane (u , v , devicePose , mRenderer .getPoseCalculator ());
235+ mRenderer .updateObjectPose (planeModel .intersectionPoint , planeModel .planeModel , devicePose );
236+ }
155237}
0 commit comments