ARCore brings powerful augmented reality capabilities to Android applications. This comprehensive guide covers ARCore integration, 3D rendering, plane detection, and interactive AR experiences using Java.
ARCore Architecture Overview
ARCore Processing Pipeline: ┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐ │ Camera Input │ -> │ Motion Tracking │ -> │ Plane Detection │ │ │ │ │ │ │ └─────────────────┘ └──────────────────┘ └─────────────────┘ │ │ │ ┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐ │ Light │ │ Environmental │ │ Anchor │ │ Estimation │ │ Understanding │ │ Management │ └─────────────────┘ └──────────────────┘ └─────────────────┘
Project Setup and Dependencies
1. Android Manifest Configuration
<!-- AndroidManifest.xml --> <uses-permission android:name="android.permission.CAMERA" /> <uses-permission android:name="android.permission.INTERNET" /> <uses-feature android:name="android.hardware.camera.ar" android:required="true" /> <application> <!-- ARCore requirement --> <meta-data android:name="com.google.ar.core" android:value="required" /> <!-- Sceneform requirement --> <meta-data android:name="com.google.ar.core.min_apk_version" android:value="200304000" /> </application>
2. Gradle Dependencies
// app/build.gradle
android {
compileSdkVersion 33
defaultConfig {
minSdkVersion 24
targetSdkVersion 33
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
// ARCore
implementation 'com.google.ar:core:1.36.0'
// Sceneform
implementation 'com.google.ar.sceneform:core:1.17.1'
implementation 'com.google.ar.sceneform:animation:1.17.1'
implementation 'com.google.ar.sceneform:filament-android:1.17.1'
// UI Components
implementation 'androidx.appcompat:appcompat:1.6.1'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
// Permission handling
implementation 'com.karumi:dexter:6.2.3'
// 3D Model loading
implementation 'com.github.appoly:ARCore-Location:1.2.0'
}
Core ARCore Implementation
1. Base AR Activity
package com.arcore.app;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.util.Log;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import com.google.ar.core.*;
import com.google.ar.core.exceptions.*;
import com.google.ar.sceneform.ArSceneView;
import com.google.ar.sceneform.SceneView;
import com.google.ar.sceneform.Scene;
import com.google.ar.sceneform.ux.ArFragment;
import com.google.ar.sceneform.ux.BaseArFragment;
import java.util.EnumSet;
import java.util.Set;
public class BaseARActivity extends AppCompatActivity {
private static final String TAG = "BaseARActivity";
protected ArSceneView arSceneView;
protected Scene arScene;
protected Config arConfig;
protected Session arSession;
protected boolean installRequested;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_ar);
initializeARComponents();
setupARSession();
}
protected void initializeARComponents() {
arSceneView = findViewById(R.id.ar_scene_view);
arScene = arSceneView.getScene();
// Configure AR session
arConfig = new Config(arSession);
arConfig.setPlaneFindingMode(Config.PlaneFindingMode.HORIZONTAL_AND_VERTICAL);
arConfig.setLightEstimationMode(Config.LightEstimationMode.ENVIRONMENTAL_HDR);
// Set focus mode
arConfig.setFocusMode(Config.FocusMode.AUTO);
}
protected void setupARSession() {
// Check if ARCore is supported
Exception exception = null;
String message = null;
try {
switch (ArCoreApk.getInstance().requestInstall(this, !installRequested)) {
case INSTALL_REQUESTED:
installRequested = true;
return;
case INSTALLED:
break;
}
// Create AR session
arSession = new Session(this);
arSession.configure(arConfig);
} catch (UnavailableArcoreNotInstalledException e) {
message = "Please install ARCore";
exception = e;
} catch (UnavailableUserDeclinedInstallationException e) {
message = "Please install ARCore";
exception = e;
} catch (UnavailableApkTooOldException e) {
message = "Please update ARCore";
exception = e;
} catch (UnavailableSdkTooOldException e) {
message = "Please update this app";
exception = e;
} catch (UnavailableDeviceNotCompatibleException e) {
message = "This device does not support AR";
exception = e;
} catch (Exception e) {
message = "Failed to create AR session";
exception = e;
}
if (message != null) {
Log.e(TAG, "Exception creating session", exception);
Toast.makeText(this, message, Toast.LENGTH_LONG).show();
return;
}
// Set up scene view
arSceneView.setupSession(arSession);
// Add session listeners
arSession.setCameraConfig(arSession.getCameraConfig());
}
@Override
protected void onResume() {
super.onResume();
if (arSession != null) {
try {
arSession.resume();
arSceneView.resume();
} catch (CameraNotAvailableException e) {
Log.e(TAG, "Camera not available", e);
Toast.makeText(this, "Camera not available", Toast.LENGTH_LONG).show();
finish();
}
}
}
@Override
protected void onPause() {
super.onPause();
if (arSession != null) {
arSceneView.pause();
arSession.pause();
}
}
@Override
protected void onDestroy() {
super.onDestroy();
if (arSession != null) {
arSession.close();
}
if (arSceneView != null) {
arSceneView.destroy();
}
}
protected boolean isARSupported() {
return ArCoreApk.getInstance().checkAvailability(this).isSupported();
}
}
2. Advanced AR Session Manager
package com.arcore.manager;
import android.content.Context;
import android.util.Log;
import com.google.ar.core.*;
import com.google.ar.core.exceptions.*;
import java.util.concurrent.ArrayBlockingQueue;
public class ARSessionManager {
private static final String TAG = "ARSessionManager";
private Session arSession;
private Config arConfig;
private Context context;
private ARSessionListener sessionListener;
// Thread-safe queue for AR frame processing
private ArrayBlockingQueue<Frame> frameQueue = new ArrayBlockingQueue<>(10);
public ARSessionManager(Context context, ARSessionListener listener) {
this.context = context;
this.sessionListener = listener;
}
public boolean initializeSession() {
try {
// Create AR session
arSession = new Session(context);
// Configure session
arConfig = new Config(arSession);
configureSession(arConfig);
// Apply configuration
arSession.configure(arConfig);
// Set up session listeners
setupSessionListeners();
Log.i(TAG, "AR session initialized successfully");
return true;
} catch (Exception e) {
Log.e(TAG, "Failed to initialize AR session", e);
if (sessionListener != null) {
sessionListener.onSessionError("Failed to initialize AR: " + e.getMessage());
}
return false;
}
}
private void configureSession(Config config) {
// Enable plane detection
config.setPlaneFindingMode(Config.PlaneFindingMode.HORIZONTAL_AND_VERTICAL);
// Enable light estimation
config.setLightEstimationMode(Config.LightEstimationMode.ENVIRONMENTAL_HDR);
// Enable cloud anchors if needed
config.setCloudAnchorMode(Config.CloudAnchorMode.ENABLED);
// Set focus mode
config.setFocusMode(Config.FocusMode.AUTO);
// Enable depth if available
if (arSession.isDepthModeSupported(Config.DepthMode.AUTOMATIC)) {
config.setDepthMode(Config.DepthMode.AUTOMATIC);
}
}
private void setupSessionListeners() {
arSession.setCameraConfig(arSession.getCameraConfig());
// Add frame listener for custom processing
new Thread(this::processFrames).start();
}
public Frame acquireLatestFrame() {
try {
return arSession.update();
} catch (Exception e) {
Log.e(TAG, "Error acquiring frame", e);
return null;
}
}
public Anchor createAnchor(Pose pose) {
if (arSession != null) {
return arSession.createAnchor(pose);
}
return null;
}
public Trackable[] getAllTrackables() {
if (arSession != null) {
return arSession.getAllTrackables(Trackable.class).toArray(new Trackable[0]);
}
return new Trackable[0];
}
public Plane[] getDetectedPlanes() {
if (arSession != null) {
return arSession.getAllTrackables(Plane.class).toArray(new Plane[0]);
}
return new Plane[0];
}
public void resume() {
if (arSession != null) {
try {
arSession.resume();
} catch (CameraNotAvailableException e) {
Log.e(TAG, "Camera not available on resume", e);
if (sessionListener != null) {
sessionListener.onSessionError("Camera not available");
}
}
}
}
public void pause() {
if (arSession != null) {
arSession.pause();
}
}
public void close() {
if (arSession != null) {
arSession.close();
arSession = null;
}
}
public boolean isDepthSupported() {
return arSession != null &&
arSession.isDepthModeSupported(Config.DepthMode.AUTOMATIC);
}
public boolean isEnvironmentalHDREsupported() {
return arSession != null &&
arSession.isLightEstimationModeSupported(Config.LightEstimationMode.ENVIRONMENTAL_HDR);
}
private void processFrames() {
while (!Thread.currentThread().isInterrupted()) {
try {
Frame frame = frameQueue.take();
processARFrame(frame);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
} catch (Exception e) {
Log.e(TAG, "Error processing frame", e);
}
}
}
private void processARFrame(Frame frame) {
// Custom frame processing logic
Camera camera = frame.getCamera();
TrackingState trackingState = camera.getTrackingState();
if (trackingState == TrackingState.TRACKING) {
// Process tracked frame
processTrackedFrame(frame, camera);
}
}
private void processTrackedFrame(Frame frame, Camera camera) {
// Implement custom tracking logic
LightEstimate lightEstimate = frame.getLightEstimate();
if (lightEstimate != null) {
float[] environmentalHDR = lightEstimate.getEnvironmentalHdrAmbientSphericalHarmonics();
float[] mainLightDirection = lightEstimate.getEnvironmentalHdrMainLightDirection();
float mainLightIntensity = lightEstimate.getEnvironmentalHdrMainLightIntensity();
// Update lighting in scene
updateSceneLighting(environmentalHDR, mainLightDirection, mainLightIntensity);
}
}
private void updateSceneLighting(float[] sphericalHarmonics,
float[] mainLightDirection,
float mainLightIntensity) {
// Update scene lighting based on environmental HDR data
if (sessionListener != null) {
sessionListener.onLightingUpdated(sphericalHarmonics, mainLightDirection, mainLightIntensity);
}
}
public interface ARSessionListener {
void onSessionError(String errorMessage);
void onPlaneDetected(Plane plane);
void onAnchorCreated(Anchor anchor);
void onLightingUpdated(float[] sphericalHarmonics, float[] mainLightDirection, float mainLightIntensity);
}
}
3D Object Rendering and Management
1. 3D Object Manager
package com.arcore.renderer;
import android.content.Context;
import android.util.Log;
import com.google.ar.core.Anchor;
import com.google.ar.core.Pose;
import com.google.ar.sceneform.AnchorNode;
import com.google.ar.sceneform.Node;
import com.google.ar.sceneform.NodeParent;
import com.google.ar.sceneform.math.Vector3;
import com.google.ar.sceneform.rendering.ModelRenderable;
import com.google.ar.sceneform.rendering.Renderable;
import com.google.ar.sceneform.rendering.Texture;
import com.google.ar.sceneform.rendering.Material;
import com.google.ar.sceneform.rendering.MaterialFactory;
import com.google.ar.sceneform.rendering.Color;
import java.util.concurrent.CompletableFuture;
import java.util.HashMap;
import java.util.Map;
public class Object3DManager {
private static final String TAG = "Object3DManager";
private Context context;
private Map<String, Renderable> objectCache;
private Map<String, Material> materialCache;
public Object3DManager(Context context) {
this.context = context;
this.objectCache = new HashMap<>();
this.materialCache = new HashMap<>();
initializeDefaultMaterials();
}
private void initializeDefaultMaterials() {
// Create default materials
createMaterial("default_red", Color.red());
createMaterial("default_blue", Color.blue());
createMaterial("default_green", Color.green());
createMaterial("default_white", Color.white());
}
public CompletableFuture<ModelRenderable> loadModel(int modelResourceId) {
return ModelRenderable.builder()
.setSource(context, modelResourceId)
.build()
.thenApply(renderable -> {
Log.i(TAG, "3D model loaded successfully");
return renderable;
})
.exceptionally(throwable -> {
Log.e(TAG, "Failed to load 3D model", throwable);
return null;
});
}
public CompletableFuture<Material> createMaterial(String materialName, Color color) {
return MaterialFactory.makeOpaqueWithColor(context, color)
.thenApply(material -> {
materialCache.put(materialName, material);
return material;
})
.exceptionally(throwable -> {
Log.e(TAG, "Failed to create material: " + materialName, throwable);
return null;
});
}
public AnchorNode placeObjectAtAnchor(Anchor anchor, Renderable renderable) {
AnchorNode anchorNode = new AnchorNode(anchor);
Node objectNode = new Node();
objectNode.setRenderable(renderable);
objectNode.setParent(anchorNode);
return anchorNode;
}
public AnchorNode placeObjectAtPose(Pose pose, Renderable renderable, NodeParent scene) {
AnchorNode anchorNode = new AnchorNode();
anchorNode.setWorldPosition(new Vector3(
pose.tx(), pose.ty(), pose.tz()
));
anchorNode.setWorldRotation(pose.getRotationQuaternion());
Node objectNode = new Node();
objectNode.setRenderable(renderable);
objectNode.setParent(anchorNode);
if (scene != null) {
anchorNode.setParent(scene);
}
return anchorNode;
}
public void scaleObject(Node objectNode, float scale) {
objectNode.setWorldScale(new Vector3(scale, scale, scale));
}
public void rotateObject(Node objectNode, float degreesX, float degreesY, float degreesZ) {
// Convert degrees to quaternion rotation
// Implementation depends on specific rotation requirements
}
public void animateObject(Node objectNode, Vector3 startPosition, Vector3 endPosition,
long duration) {
// Implement object animation
objectNode.setWorldPosition(startPosition);
// Use ValueAnimator or similar for smooth animation
// This is a simplified placeholder
new android.animation.ValueAnimator().setDuration(duration);
}
public void applyMaterialToObject(Node objectNode, String materialName) {
Material material = materialCache.get(materialName);
if (material != null && objectNode.getRenderable() != null) {
objectNode.getRenderable().setMaterial(material);
}
}
public void cleanup() {
objectCache.clear();
materialCache.clear();
}
}
2. Interactive AR Object
package com.arcore.objects;
import android.view.MotionEvent;
import com.google.ar.sceneform.Node;
import com.google.ar.sceneform.math.Quaternion;
import com.google.ar.sceneform.math.Vector3;
import com.google.ar.sceneform.rendering.Renderable;
public class InteractiveObject extends Node {
public interface InteractionListener {
void onObjectTapped(InteractiveObject object);
void onObjectDragged(InteractiveObject object, Vector3 newPosition);
void onObjectRotated(InteractiveObject object, Quaternion newRotation);
}
private InteractionListener interactionListener;
private boolean isDraggable = true;
private boolean isRotatable = true;
private boolean isScalable = true;
private Vector3 originalScale;
public InteractiveObject() {
super();
setOnTapListener((hitResult, motionEvent) -> {
if (interactionListener != null) {
interactionListener.onObjectTapped(this);
}
});
}
@Override
public boolean onTouchEvent(HitTestResult hitTestResult, MotionEvent motionEvent) {
if (motionEvent.getAction() == MotionEvent.ACTION_MOVE) {
handleDrag(motionEvent);
return true;
}
return super.onTouchEvent(hitTestResult, motionEvent);
}
private void handleDrag(MotionEvent motionEvent) {
if (!isDraggable || getParent() == null) {
return;
}
// Get world position from screen coordinates
// This is simplified - actual implementation would use hit testing
if (interactionListener != null) {
interactionListener.onObjectDragged(this, getWorldPosition());
}
}
public void setDraggable(boolean draggable) {
this.isDraggable = draggable;
}
public void setRotatable(boolean rotatable) {
this.isRotatable = rotatable;
}
public void setScalable(boolean scalable) {
this.isScalable = scalable;
}
public void setInteractionListener(InteractionListener listener) {
this.interactionListener = listener;
}
public void scale(float scaleFactor) {
if (isScalable) {
if (originalScale == null) {
originalScale = getWorldScale();
}
Vector3 newScale = new Vector3(
originalScale.x * scaleFactor,
originalScale.y * scaleFactor,
originalScale.z * scaleFactor
);
setWorldScale(newScale);
}
}
public void resetScale() {
if (originalScale != null) {
setWorldScale(originalScale);
}
}
public void rotate(float degreesX, float degreesY, float degreesZ) {
if (isRotatable) {
Quaternion currentRotation = getWorldRotation();
Quaternion additionalRotation = Quaternion.axisAngle(
new Vector3(degreesX, degreesY, degreesZ), 1.0f
);
setWorldRotation(Quaternion.multiply(currentRotation, additionalRotation));
if (interactionListener != null) {
interactionListener.onObjectRotated(this, getWorldRotation());
}
}
}
}
Plane Detection and Visualization
1. Plane Visualization Manager
package com.arcore.planes;
import android.content.Context;
import android.util.Log;
import com.google.ar.core.Plane;
import com.google.ar.core.Pose;
import com.google.ar.core.TrackingState;
import com.google.ar.sceneform.AnchorNode;
import com.google.ar.sceneform.Node;
import com.google.ar.sceneform.math.Vector3;
import com.google.ar.sceneform.rendering.Material;
import com.google.ar.sceneform.rendering.MaterialFactory;
import com.google.ar.sceneform.rendering.Color;
import com.google.ar.sceneform.rendering.ShapeFactory;
import com.google.ar.sceneform.rendering.ModelRenderable;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
public class PlaneVisualizationManager {
private static final String TAG = "PlaneVisualizationManager";
private Context context;
private Map<Plane, AnchorNode> planeNodes;
private Material horizontalPlaneMaterial;
private Material verticalPlaneMaterial;
private boolean showPlanes = true;
public PlaneVisualizationManager(Context context) {
this.context = context;
this.planeNodes = new HashMap<>();
initializeMaterials();
}
private void initializeMaterials() {
// Create materials for different plane types
MaterialFactory.makeTransparentWithColor(context, new Color(0, 1, 0, 0.3f))
.thenAccept(material -> horizontalPlaneMaterial = material)
.exceptionally(throwable -> {
Log.e(TAG, "Failed to create horizontal plane material", throwable);
return null;
});
MaterialFactory.makeTransparentWithColor(context, new Color(0, 0, 1, 0.3f))
.thenAccept(material -> verticalPlaneMaterial = material)
.exceptionally(throwable -> {
Log.e(TAG, "Failed to create vertical plane material", throwable);
return null;
});
}
public void updatePlaneVisualization(Plane plane, AnchorNode anchorNode) {
if (!showPlanes) return;
if (plane.getTrackingState() != TrackingState.TRACKING) {
removePlaneVisualization(plane);
return;
}
if (!planeNodes.containsKey(plane)) {
createPlaneVisualization(plane, anchorNode);
} else {
updateExistingPlane(plane);
}
}
private void createPlaneVisualization(Plane plane, AnchorNode anchorNode) {
Node planeNode = new Node();
planeNode.setParent(anchorNode);
// Create plane geometry based on plane type and extent
Pose centerPose = plane.getCenterPose();
float[] extent = plane.getExtentXZ();
ModelRenderable planeRenderable = ShapeFactory.makeCube(
new Vector3(extent[0], 0.01f, extent[1]),
new Vector3(0, 0, 0),
getMaterialForPlane(plane)
);
planeNode.setRenderable(planeRenderable);
planeNode.setWorldPosition(new Vector3(
centerPose.tx(), centerPose.ty(), centerPose.tz()
));
planeNodes.put(plane, anchorNode);
}
private void updateExistingPlane(Plane plane) {
AnchorNode anchorNode = planeNodes.get(plane);
if (anchorNode != null) {
Pose centerPose = plane.getCenterPose();
anchorNode.setWorldPosition(new Vector3(
centerPose.tx(), centerPose.ty(), centerPose.tz()
));
// Update plane size if needed
updatePlaneSize(plane, anchorNode);
}
}
private void updatePlaneSize(Plane plane, AnchorNode anchorNode) {
// Update the visual representation when plane size changes
float[] extent = plane.getExtentXZ();
// Implementation would update the renderable's size
}
private Material getMaterialForPlane(Plane plane) {
switch (plane.getType()) {
case HORIZONTAL_DOWNWARD_FACING:
case HORIZONTAL_UPWARD_FACING:
return horizontalPlaneMaterial;
case VERTICAL:
return verticalPlaneMaterial;
default:
return horizontalPlaneMaterial;
}
}
public void removePlaneVisualization(Plane plane) {
AnchorNode anchorNode = planeNodes.remove(plane);
if (anchorNode != null) {
anchorNode.setParent(null);
}
}
public void setPlaneVisibility(boolean visible) {
this.showPlanes = visible;
for (AnchorNode node : planeNodes.values()) {
node.setEnabled(visible);
}
}
public void cleanup() {
for (AnchorNode node : planeNodes.values()) {
node.setParent(null);
}
planeNodes.clear();
}
}
Advanced AR Features
1. Image Recognition and Tracking
package com.arcore.tracking;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Log;
import com.google.ar.core.AugmentedImage;
import com.google.ar.core.AugmentedImageDatabase;
import com.google.ar.core.Config;
import com.google.ar.core.Session;
import com.google.ar.core.TrackingState;
import com.google.ar.sceneform.AnchorNode;
import com.google.ar.sceneform.Node;
import com.google.ar.sceneform.math.Vector3;
import com.google.ar.sceneform.rendering.ModelRenderable;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
public class ImageTrackingManager {
private static final String TAG = "ImageTrackingManager";
private Context context;
private Session arSession;
private AugmentedImageDatabase imageDatabase;
private Map<String, ModelRenderable> trackedImageModels;
private Map<AugmentedImage, AnchorNode> trackedImageNodes;
public ImageTrackingManager(Context context, Session session) {
this.context = context;
this.arSession = session;
this.trackedImageModels = new HashMap<>();
this.trackedImageNodes = new HashMap<>();
this.imageDatabase = new AugmentedImageDatabase(arSession);
}
public boolean addTrackedImage(String imageName, int imageResourceId, ModelRenderable model) {
try {
// Load image bitmap
InputStream is = context.getResources().openRawResource(imageResourceId);
Bitmap bitmap = BitmapFactory.decodeStream(is);
// Add to database
int index = imageDatabase.addImage(imageName, bitmap);
// Store model reference
trackedImageModels.put(imageName, model);
Log.i(TAG, "Added tracked image: " + imageName + " at index: " + index);
return true;
} catch (IOException e) {
Log.e(TAG, "Failed to add tracked image: " + imageName, e);
return false;
}
}
public void configureSessionForImageTracking(Config config) {
config.setAugmentedImageDatabase(imageDatabase);
config.setPlaneFindingMode(Config.PlaneFindingMode.DISABLED);
}
public void updateTrackedImages(AugmentedImage[] updatedImages) {
for (AugmentedImage image : updatedImages) {
switch (image.getTrackingState()) {
case TRACKING:
if (!trackedImageNodes.containsKey(image)) {
onImageTracked(image);
}
break;
case STOPPED:
onImageTrackingStopped(image);
break;
case PAUSED:
// Image tracking paused, keep the node but don't update
break;
}
}
}
private void onImageTracked(AugmentedImage image) {
String imageName = image.getName();
ModelRenderable model = trackedImageModels.get(imageName);
if (model != null) {
// Create anchor at image center
AnchorNode anchorNode = new AnchorNode(image.createAnchor(image.getCenterPose()));
// Create object node
Node objectNode = new Node();
objectNode.setRenderable(model);
objectNode.setParent(anchorNode);
// Position object above the image
objectNode.setWorldPosition(new Vector3(0, image.getExtentX() * 0.5f, 0));
trackedImageNodes.put(image, anchorNode);
Log.i(TAG, "Started tracking image: " + imageName);
}
}
private void onImageTrackingStopped(AugmentedImage image) {
AnchorNode anchorNode = trackedImageNodes.remove(image);
if (anchorNode != null) {
anchorNode.setParent(null);
Log.i(TAG, "Stopped tracking image: " + image.getName());
}
}
public void cleanup() {
for (AnchorNode node : trackedImageNodes.values()) {
node.setParent(null);
}
trackedImageNodes.clear();
trackedImageModels.clear();
}
}
2. Cloud Anchors for Multi-user AR
package com.arcore.cloud;
import android.util.Log;
import com.google.ar.core.Anchor;
import com.google.ar.core.Anchor.CloudAnchorState;
import com.google.ar.core.Session;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
public class CloudAnchorManager {
private static final String TAG = "CloudAnchorManager";
private Session arSession;
private Map<String, Anchor> hostedAnchors;
private Map<String, CompletableFuture<String>> hostingFutures;
private Map<String, CompletableFuture<Anchor>> resolvingFutures;
public CloudAnchorManager(Session session) {
this.arSession = session;
this.hostedAnchors = new HashMap<>();
this.hostingFutures = new HashMap<>();
this.resolvingFutures = new HashMap<>();
}
public CompletableFuture<String> hostCloudAnchor(Anchor localAnchor, int ttlDays) {
CompletableFuture<String> future = new CompletableFuture<>();
Anchor cloudAnchor = arSession.hostCloudAnchorWithTtl(localAnchor, ttlDays);
String anchorId = cloudAnchor.getCloudAnchorId();
hostedAnchors.put(anchorId, cloudAnchor);
hostingFutures.put(anchorId, future);
// Check anchor state periodically
checkCloudAnchorState(cloudAnchor);
return future;
}
public CompletableFuture<Anchor> resolveCloudAnchor(String cloudAnchorId) {
CompletableFuture<Anchor> future = new CompletableFuture<>();
Anchor cloudAnchor = arSession.resolveCloudAnchor(cloudAnchorId);
resolvingFutures.put(cloudAnchorId, future);
// Check anchor state periodically
checkCloudAnchorState(cloudAnchor);
return future;
}
private void checkCloudAnchorState(Anchor cloudAnchor) {
CloudAnchorState state = cloudAnchor.getCloudAnchorState();
switch (state) {
case SUCCESS:
onCloudAnchorSuccess(cloudAnchor);
break;
case ERROR_INTERNAL:
case ERROR_NOT_AUTHORIZED:
case ERROR_SERVICE_UNAVAILABLE:
onCloudAnchorError(cloudAnchor, state);
break;
case TASK_IN_PROGRESS:
// Continue checking
scheduleStateCheck(cloudAnchor);
break;
}
}
private void onCloudAnchorSuccess(Anchor cloudAnchor) {
String anchorId = cloudAnchor.getCloudAnchorId();
CompletableFuture<String> hostingFuture = hostingFutures.remove(anchorId);
if (hostingFuture != null) {
hostingFuture.complete(anchorId);
}
CompletableFuture<Anchor> resolvingFuture = resolvingFutures.remove(anchorId);
if (resolvingFuture != null) {
resolvingFuture.complete(cloudAnchor);
}
Log.i(TAG, "Cloud anchor operation successful: " + anchorId);
}
private void onCloudAnchorError(Anchor cloudAnchor, CloudAnchorState errorState) {
String anchorId = cloudAnchor.getCloudAnchorId();
String errorMessage = "Cloud anchor error: " + errorState.toString();
CompletableFuture<String> hostingFuture = hostingFutures.remove(anchorId);
if (hostingFuture != null) {
hostingFuture.completeExceptionally(new RuntimeException(errorMessage));
}
CompletableFuture<Anchor> resolvingFuture = resolvingFutures.remove(anchorId);
if (resolvingFuture != null) {
resolvingFuture.completeExceptionally(new RuntimeException(errorMessage));
}
Log.e(TAG, errorMessage);
}
private void scheduleStateCheck(Anchor cloudAnchor) {
// Schedule next state check (simplified)
new android.os.Handler().postDelayed(() -> {
checkCloudAnchorState(cloudAnchor);
}, 500);
}
public void cleanup() {
for (Anchor anchor : hostedAnchors.values()) {
anchor.detach();
}
hostedAnchors.clear();
hostingFutures.clear();
resolvingFutures.clear();
}
}
Complete AR Application Example
1. Main AR Activity
package com.arcore.app;
import android.os.Bundle;
import android.widget.Button;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import com.google.ar.core.*;
import com.google.ar.sceneform.ArSceneView;
import com.google.ar.sceneform.Scene;
import com.google.ar.sceneform.ux.ArFragment;
import com.arcore.manager.ARSessionManager;
import com.arcore.renderer.Object3DManager;
import com.arcore.planes.PlaneVisualizationManager;
import com.arcore.tracking.ImageTrackingManager;
public class MainARActivity extends AppCompatActivity
implements ARSessionManager.ARSessionListener {
private ArSceneView arSceneView;
private ArFragment arFragment;
private Button placeObjectButton;
private Button clearButton;
private ARSessionManager sessionManager;
private Object3DManager objectManager;
private PlaneVisualizationManager planeManager;
private ImageTrackingManager imageTrackingManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main_ar);
initializeUI();
initializeARComponents();
}
private void initializeUI() {
arFragment = (ArFragment) getSupportFragmentManager().findFragmentById(R.id.ar_fragment);
arSceneView = arFragment.getArSceneView();
placeObjectButton = findViewById(R.id.btn_place_object);
clearButton = findViewById(R.id.btn_clear);
placeObjectButton.setOnClickListener(v -> placeSampleObject());
clearButton.setOnClickListener(v -> clearScene());
}
private void initializeARComponents() {
// Initialize session manager
sessionManager = new ARSessionManager(this, this);
if (!sessionManager.initializeSession()) {
Toast.makeText(this, "ARCore not supported", Toast.LENGTH_LONG).show();
finish();
return;
}
// Initialize other managers
objectManager = new Object3DManager(this);
planeManager = new PlaneVisualizationManager(this);
// Set up AR scene
setupARScene();
}
private void setupARScene() {
Scene scene = arSceneView.getScene();
// Add plane detection
arFragment.setOnTapArPlaneListener((hitResult, plane, motionEvent) -> {
onPlaneTapped(hitResult, plane);
});
// Set up image tracking if needed
imageTrackingManager = new ImageTrackingManager(this, sessionManager.getSession());
// Add tracked images here
}
private void onPlaneTapped(HitResult hitResult, Plane plane) {
// Create anchor at tapped position
Anchor anchor = hitResult.createAnchor();
placeObjectAtAnchor(anchor);
}
private void placeSampleObject() {
// Place object at screen center
Frame frame = sessionManager.acquireLatestFrame();
if (frame != null && frame.getCamera().getTrackingState() == TrackingState.TRACKING) {
// Implementation for placing object at screen center
}
}
private void placeObjectAtAnchor(Anchor anchor) {
// Load and place a 3D object
objectManager.loadModel(R.raw.sample_object)
.thenAccept(renderable -> {
objectManager.placeObjectAtAnchor(anchor, renderable);
Toast.makeText(this, "Object placed", Toast.LENGTH_SHORT).show();
})
.exceptionally(throwable -> {
Toast.makeText(this, "Failed to load object", Toast.LENGTH_SHORT).show();
return null;
});
}
private void clearScene() {
// Clear all placed objects
Scene scene = arSceneView.getScene();
// Implementation to clear scene nodes
}
// ARSessionManager callbacks
@Override
public void onSessionError(String errorMessage) {
runOnUiThread(() -> Toast.makeText(this, errorMessage, Toast.LENGTH_LONG).show());
}
@Override
public void onPlaneDetected(Plane plane) {
// Handle plane detection
}
@Override
public void onAnchorCreated(Anchor anchor) {
// Handle anchor creation
}
@Override
public void onLightingUpdated(float[] sphericalHarmonics, float[] mainLightDirection,
float mainLightIntensity) {
// Update scene lighting
}
@Override
protected void onResume() {
super.onResume();
sessionManager.resume();
}
@Override
protected void onPause() {
super.onPause();
sessionManager.pause();
}
@Override
protected void onDestroy() {
super.onDestroy();
sessionManager.close();
objectManager.cleanup();
planeManager.cleanup();
if (imageTrackingManager != null) {
imageTrackingManager.cleanup();
}
}
}
Performance Optimization
1. AR Performance Monitor
package com.arcore.performance;
import android.util.Log;
import com.google.ar.core.Frame;
import com.google.ar.core.Session;
import java.util.concurrent.atomic.AtomicLong;
public class ARPerformanceMonitor {
private static final String TAG = "ARPerformanceMonitor";
private AtomicLong frameCount = new AtomicLong();
private AtomicLong totalFrameTime = new AtomicLong();
private long startTime;
private boolean monitoring = false;
public void startMonitoring() {
frameCount.set(0);
totalFrameTime.set(0);
startTime = System.currentTimeMillis();
monitoring = true;
}
public void stopMonitoring() {
monitoring = false;
}
public void recordFrame(Frame frame) {
if (!monitoring) return;
frameCount.incrementAndGet();
// Calculate frame processing time
long frameTimestamp = frame.getTimestamp();
// Implementation would track frame processing performance
}
public PerformanceStats getStats() {
long currentTime = System.currentTimeMillis();
long monitoringDuration = currentTime - startTime;
long frames = frameCount.get();
double fps = monitoringDuration > 0 ? (frames * 1000.0) / monitoringDuration : 0;
return new PerformanceStats(fps, frames, monitoringDuration);
}
public static class PerformanceStats {
public final double framesPerSecond;
public final long totalFrames;
public final long monitoringDurationMs;
public PerformanceStats(double fps, long totalFrames, long duration) {
this.framesPerSecond = fps;
this.totalFrames = totalFrames;
this.monitoringDurationMs = duration;
}
}
}
Conclusion
ARCore with Java enables powerful augmented reality experiences:
Key ARCore Features:
- Motion Tracking - Device position and orientation
- Environmental Understanding - Plane detection and lighting
- Light Estimation - Real-world lighting matching
- Cloud Anchors - Multi-user shared experiences
- Image Tracking - Marker-based AR
Best Practices:
- Optimize 3D models for mobile rendering
- Implement proper session management
- Handle ARCore availability checks
- Use appropriate plane detection modes
- Implement fallback for unsupported features
Use Cases:
- Product visualization - Furniture, retail
- Education - Interactive learning experiences
- Gaming - Immersive AR games
- Navigation - Indoor wayfinding
- Maintenance - Industrial AR instructions
ARCore's robust feature set combined with Java's development ecosystem creates compelling AR applications that bridge digital and physical worlds seamlessly.