Augmented reality (AR) is a live direct or indirect view of a physical, real-world environment whose elements are augmented (or supplemented) by computer-generated sensory input such as sound, video, graphics or GPS data. It is related to a more general concept called mediated reality, in which a view of reality is modified (possibly even diminished rather than augmented) by a computer. As a result, the technology functions by enhancing one’s current perception of reality.
Smartphones and tablets have the power and the hardware capable of enabling developers to write interesting applications that incorporate live camera and video feeds, hyper-accurate sensor data, and other real-time user data in interesting ways.
In this tutorial, I would like to show how to build an AR Android based application, in which we will use camera and display sensor data as an overlay on on top of camera which will be updating as per the sensors values.
Download Sample Code
Have a look on few code snippets,
//MainActivity.java
//ArDisplayView.java
//OverlayView.java
//AndroidManifest.xml
Smartphones and tablets have the power and the hardware capable of enabling developers to write interesting applications that incorporate live camera and video feeds, hyper-accurate sensor data, and other real-time user data in interesting ways.
In this tutorial, I would like to show how to build an AR Android based application, in which we will use camera and display sensor data as an overlay on on top of camera which will be updating as per the sensors values.
- The accelerometer numbers are in SI units (m/s^2, i.e. meters per second squared, where Earth’s gravity is 9.81 m/s^2).
- The magnetic sensor units are in micro-Teslas. As they come in x, y, and z forms we can measure the vector and compare to gravity to determine where (magnetic) north is with respect to the device.
- The gyroscope measures rotation around each axis in radians per second. This can be used to calculate the relative device orientation, too.
Download Sample Code
Have a look on few code snippets,
//MainActivity.java
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 | package com.example.harshalbenake.sensorsdemo_as; import android.app.Activity; import android.os.Bundle; import android.widget.FrameLayout; public class MainActivity extends Activity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); FrameLayout arViewPane = (FrameLayout) findViewById(R.id.ar_view_pane); ArDisplayView arDisplay = new ArDisplayView(getApplicationContext(), this); arViewPane.addView(arDisplay); OverlayView arContent = new OverlayView(getApplicationContext()); arViewPane.addView(arContent); } } |
//ArDisplayView.java
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 | package com.example.harshalbenake.sensorsdemo_as; import java.io.IOException; import java.util.List; import android.app.Activity; import android.content.Context; import android.graphics.ImageFormat; import android.hardware.Camera; import android.hardware.Camera.CameraInfo; import android.hardware.Camera.Size; import android.util.Log; import android.view.Surface; import android.view.SurfaceHolder; import android.view.SurfaceView; public class ArDisplayView extends SurfaceView implements SurfaceHolder.Callback { public static final String DEBUG_TAG = "ArDisplayView Log"; Camera mCamera; SurfaceHolder mHolder; Activity mActivity; public ArDisplayView(Context context, Activity activity) { super(context); mActivity = activity; mHolder = getHolder(); // This value is supposedly deprecated and set "automatically" when // needed. // Without this, the application crashes. mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); // callbacks implemented by ArDisplayView mHolder.addCallback(this); } public void surfaceCreated(SurfaceHolder holder) { Log.d(DEBUG_TAG, "surfaceCreated"); // Grab the camera mCamera = Camera.open(); // Set Display orientation CameraInfo info = new CameraInfo(); Camera.getCameraInfo(CameraInfo.CAMERA_FACING_BACK, info); int rotation = mActivity.getWindowManager().getDefaultDisplay() .getRotation(); int degrees = 0; switch (rotation) { case Surface.ROTATION_0: degrees = 0; break; case Surface.ROTATION_90: degrees = 90; break; case Surface.ROTATION_180: degrees = 180; break; case Surface.ROTATION_270: degrees = 270; break; } mCamera.setDisplayOrientation((info.orientation - degrees + 360) % 360); try { mCamera.setPreviewDisplay(mHolder); } catch (IOException e) { Log.e(DEBUG_TAG, "surfaceCreated exception: ", e); } } public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { Log.d(DEBUG_TAG, "surfaceChanged"); Camera.Parameters params = mCamera.getParameters(); // Find an appropriate preview size that fits the surface List<Size> prevSizes = params.getSupportedPreviewSizes(); for (Size s : prevSizes) { if ((s.height <= height) && (s.width <= width)) { params.setPreviewSize(s.width, s.height); break; } } // Set the preview format //params.setPreviewFormat(ImageFormat.JPEG); // Consider adjusting frame rate to appropriate rate for AR // Confirm the parameters mCamera.setParameters(params); // Begin previewing mCamera.startPreview(); } public void surfaceDestroyed(SurfaceHolder holder) { Log.d(DEBUG_TAG, "surfaceDestroyed"); // Shut down camera preview mCamera.stopPreview(); mCamera.release(); } } |
//OverlayView.java
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 | package com.example.harshalbenake.sensorsdemo_as; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Paint.Align; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.util.Log; import android.view.View; public class OverlayView extends View implements SensorEventListener { public static final String DEBUG_TAG = "OverlayView Log"; String accelData = "Accelerometer Data"; String compassData = "Compass Data"; String gyroData = "Gyro Data"; public OverlayView(Context context) { super(context); SensorManager sensors = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE); Sensor accelSensor = sensors.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); Sensor compassSensor = sensors.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); Sensor gyroSensor = sensors.getDefaultSensor(Sensor.TYPE_GYROSCOPE); boolean isAccelAvailable = sensors.registerListener(this, accelSensor, SensorManager.SENSOR_DELAY_NORMAL); boolean isCompassAvailable = sensors.registerListener(this, compassSensor, SensorManager.SENSOR_DELAY_NORMAL); boolean isGyroAvailable = sensors.registerListener(this, gyroSensor, SensorManager.SENSOR_DELAY_NORMAL); } @Override protected void onDraw(Canvas canvas) { Log.d(DEBUG_TAG, "onDraw"); super.onDraw(canvas); // Draw something fixed (for now) over the camera view Paint contentPaint = new Paint(Paint.ANTI_ALIAS_FLAG); contentPaint.setTextAlign(Align.CENTER); contentPaint.setTextSize(20); contentPaint.setColor(Color.RED); canvas.drawText(accelData, canvas.getWidth()/2, canvas.getHeight()/4, contentPaint); canvas.drawText(compassData, canvas.getWidth()/2, canvas.getHeight()/2, contentPaint); canvas.drawText(gyroData, canvas.getWidth()/2, (canvas.getHeight()*3)/4, contentPaint); } public void onAccuracyChanged(Sensor arg0, int arg1) { Log.d(DEBUG_TAG, "onAccuracyChanged"); } public void onSensorChanged(SensorEvent event) { Log.d(DEBUG_TAG, "onSensorChanged"); StringBuilder msg = new StringBuilder(event.sensor.getName()).append(" "); for(float value: event.values) { msg.append("[").append(value).append("]"); } switch(event.sensor.getType()) { case Sensor.TYPE_ACCELEROMETER: accelData = msg.toString(); break; case Sensor.TYPE_GYROSCOPE: gyroData = msg.toString(); break; case Sensor.TYPE_MAGNETIC_FIELD: compassData = msg.toString(); break; } this.invalidate(); } } |
//AndroidManifest.xml
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 | <?xml version="1.0" encoding="utf-8"?> <manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.example.harshalbenake.sensorsdemo_as"> <uses-sdk android:minSdkVersion="10" /> <uses-permission android:name="android.permission.ACCESS_FINE_LOCATION"></uses-permission> <uses-permission android:name="android.permission.CAMERA"></uses-permission> <uses-feature android:required="true" android:name="android.hardware.camera"></uses-feature> <uses-feature android:required="false" android:name="android.hardware.camera.autofocus"></uses-feature> <uses-feature android:required="true" android:name="android.hardware.location.gps"></uses-feature> <uses-feature android:required="true" android:name="android.hardware.sensor.accelerometer"></uses-feature> <uses-feature android:required="true" android:name="android.hardware.sensor.compass"></uses-feature> <uses-feature android:required="true" android:name="android.hardware.sensor.gyroscope"></uses-feature> <application android:allowBackup="true" android:icon="@mipmap/ic_launcher" android:label="@string/app_name" android:supportsRtl="true" android:theme="@style/AppTheme"> <activity android:name=".MainActivity"> <intent-filter> <action android:name="android.intent.action.MAIN" /> <category android:name="android.intent.category.LAUNCHER" /> </intent-filter> </activity> </application> </manifest> |
No comments:
Post a Comment