This guide covers building a complete face detection desktop application using Java with OpenCV, JavaFX, and computer vision capabilities.
Project Setup and Dependencies
Step 1: Maven Configuration
<?xml version="1.0" encoding="UTF-8"?>
<project>
<properties>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<javafx.version>21</javafx.version>
<opencv.version>4.8.0</opencv.version>
</properties>
<dependencies>
<!-- JavaFX -->
<dependency>
<groupId>org.openjfx</groupId>
<artifactId>javafx-controls</artifactId>
<version>${javafx.version}</version>
</dependency>
<dependency>
<groupId>org.openjfx</groupId>
<artifactId>javafx-fxml</artifactId>
<version>${javafx.version}</version>
</dependency>
<dependency>
<groupId>org.openjfx</groupId>
<artifactId>javafx-swing</artifactId>
<version>${javafx.version}</version>
</dependency>
<!-- OpenCV -->
<dependency>
<groupId>org.openpnp</groupId>
<artifactId>opencv</artifactId>
<version>${opencv.version}</version>
</dependency>
<!-- Image Processing -->
<dependency>
<groupId>com.twelvemonkeys.imageio</groupId>
<artifactId>imageio-core</artifactId>
<version>3.9.4</version>
</dependency>
<dependency>
<groupId>com.twelvemonkeys.imageio</groupId>
<artifactId>imageio-jpeg</artifactId>
<version>3.9.4</version>
</dependency>
<!-- Utilities -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.15.2</version>
</dependency>
<!-- Logging -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>2.0.7</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.4.8</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.openjfx</groupId>
<artifactId>javafx-maven-plugin</artifactId>
<version>0.0.8</version>
<configuration>
<mainClass>com.example.facedetection.MainApp</mainClass>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>3.3.1</version>
<configuration>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
</plugins>
</build>
</project>
Core Face Detection Engine
Step 2: OpenCV Face Detection Service
package com.example.facedetection.core;
import org.opencv.core.*;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.videoio.VideoCapture;
import org.opencv.videoio.Videoio;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
public class FaceDetectionService {
private CascadeClassifier faceClassifier;
private CascadeClassifier eyeClassifier;
private CascadeClassifier smileClassifier;
private static FaceDetectionService instance;
public static synchronized FaceDetectionService getInstance() {
if (instance == null) {
instance = new FaceDetectionService();
}
return instance;
}
private FaceDetectionService() {
initializeOpenCV();
loadClassifiers();
}
private void initializeOpenCV() {
try {
// Load OpenCV native library
nu.pattern.OpenCV.loadLocally();
System.out.println("OpenCV loaded successfully: " + Core.VERSION);
} catch (Exception e) {
System.err.println("Failed to load OpenCV: " + e.getMessage());
throw new RuntimeException("OpenCV initialization failed", e);
}
}
private void loadClassifiers() {
try {
// Load Haar cascade classifiers
String basePath = "src/main/resources/haarcascades/";
// Face detector
faceClassifier = new CascadeClassifier();
if (!faceClassifier.load(basePath + "haarcascade_frontalface_default.xml")) {
// Try loading from classpath as fallback
faceClassifier.load(getClass().getResource("/haarcascades/haarcascade_frontalface_default.xml").getPath());
}
// Eye detector
eyeClassifier = new CascadeClassifier();
if (!eyeClassifier.load(basePath + "haarcascade_eye.xml")) {
eyeClassifier.load(getClass().getResource("/haarcascades/haarcascade_eye.xml").getPath());
}
// Smile detector
smileClassifier = new CascadeClassifier();
if (!smileClassifier.load(basePath + "haarcascade_smile.xml")) {
smileClassifier.load(getClass().getResource("/haarcascades/haarcascade_smile.xml").getPath());
}
System.out.println("All classifiers loaded successfully");
} catch (Exception e) {
System.err.println("Failed to load classifiers: " + e.getMessage());
throw new RuntimeException("Classifier loading failed", e);
}
}
public DetectionResult detectFaces(Mat image) {
return detectFaces(image, true, true, true);
}
public DetectionResult detectFaces(Mat image, boolean detectEyes, boolean detectSmiles, boolean drawAnnotations) {
List<Rect> faces = new ArrayList<>();
List<Rect> eyes = new ArrayList<>();
List<Rect> smiles = new ArrayList<>();
Mat processedImage = image.clone();
Mat grayImage = new Mat();
try {
// Convert to grayscale for better detection
if (processedImage.channels() > 1) {
Imgproc.cvtColor(processedImage, grayImage, Imgproc.COLOR_BGR2GRAY);
} else {
grayImage = processedImage;
}
// Equalize histogram to improve contrast
Imgproc.equalizeHist(grayImage, grayImage);
// Detect faces
MatOfRect faceDetections = new MatOfRect();
faceClassifier.detectMultiScale(
grayImage,
faceDetections,
1.1, // scale factor
3, // min neighbors
0, // flags
new Size(30, 30), // min size
new Size(500, 500) // max size
);
faces = faceDetections.toList();
// Detect eyes and smiles within each face
for (Rect face : faces) {
Mat faceROI = grayImage.submat(face);
if (detectEyes) {
MatOfRect eyeDetections = new MatOfRect();
eyeClassifier.detectMultiScale(
faceROI,
eyeDetections,
1.1,
2,
0,
new Size(20, 20),
new Size(80, 80)
);
for (Rect eye : eyeDetections.toList()) {
Rect absoluteEye = new Rect(
face.x + eye.x,
face.y + eye.y,
eye.width,
eye.height
);
eyes.add(absoluteEye);
}
}
if (detectSmiles) {
MatOfRect smileDetections = new MatOfRect();
smileClassifier.detectMultiScale(
faceROI,
smileDetections,
1.8,
20,
0,
new Size(25, 15),
new Size(100, 50)
);
for (Rect smile : smileDetections.toList()) {
Rect absoluteSmile = new Rect(
face.x + smile.x,
face.y + smile.y,
smile.width,
smile.height
);
smiles.add(absoluteSmile);
}
}
}
// Draw annotations if requested
if (drawAnnotations) {
drawDetectionAnnotations(processedImage, faces, eyes, smiles);
}
return new DetectionResult(processedImage, faces, eyes, smiles);
} finally {
grayImage.release();
}
}
private void drawDetectionAnnotations(Mat image, List<Rect> faces, List<Rect> eyes, List<Rect> smiles) {
// Draw face rectangles (blue)
for (Rect face : faces) {
Imgproc.rectangle(
image,
new Point(face.x, face.y),
new Point(face.x + face.width, face.y + face.height),
new Scalar(255, 0, 0), // Blue
3
);
// Add face label
Imgproc.putText(
image,
"Face",
new Point(face.x, face.y - 10),
Imgproc.FONT_HERSHEY_SIMPLEX,
0.8,
new Scalar(255, 0, 0),
2
);
}
// Draw eye circles (green)
for (Rect eye : eyes) {
Point center = new Point(eye.x + eye.width / 2, eye.y + eye.height / 2);
int radius = (int) Math.round((eye.width + eye.height) * 0.25);
Imgproc.circle(
image,
center,
radius,
new Scalar(0, 255, 0), // Green
2
);
}
// Draw smile rectangles (red)
for (Rect smile : smiles) {
Imgproc.rectangle(
image,
new Point(smile.x, smile.y),
new Point(smile.x + smile.width, smile.y + smile.height),
new Scalar(0, 0, 255), // Red
2
);
}
}
public BufferedImage matToBufferedImage(Mat mat) {
try {
MatOfByte mob = new MatOfByte();
Imgproc.cvtColor(mat, mat, Imgproc.COLOR_BGR2RGB);
Imgcodecs.imencode(".jpg", mat, mob);
byte[] byteArray = mob.toArray();
InputStream in = new ByteArrayInputStream(byteArray);
return ImageIO.read(in);
} catch (IOException e) {
throw new RuntimeException("Failed to convert Mat to BufferedImage", e);
}
}
public Mat bufferedImageToMat(BufferedImage image) {
try {
Mat mat = new Mat(image.getHeight(), image.getWidth(), CvType.CV_8UC3);
byte[] data = ((java.awt.image.DataBufferByte) image.getRaster().getDataBuffer()).getData();
mat.put(0, 0, data);
return mat;
} catch (Exception e) {
throw new RuntimeException("Failed to convert BufferedImage to Mat", e);
}
}
public static class DetectionResult {
private final Mat processedImage;
private final List<Rect> faces;
private final List<Rect> eyes;
private final List<Rect> smiles;
public DetectionResult(Mat processedImage, List<Rect> faces, List<Rect> eyes, List<Rect> smiles) {
this.processedImage = processedImage;
this.faces = faces;
this.eyes = eyes;
this.smiles = smiles;
}
// Getters
public Mat getProcessedImage() { return processedImage; }
public List<Rect> getFaces() { return faces; }
public List<Rect> getEyes() { return eyes; }
public List<Rect> getSmiles() { return smiles; }
public int getFaceCount() { return faces.size(); }
public int getEyeCount() { return eyes.size(); }
public int getSmileCount() { return smiles.size(); }
}
}
Webcam Capture Service
Step 3: Real-time Camera Integration
package com.example.facedetection.core;
import org.opencv.core.Mat;
import org.opencv.videoio.VideoCapture;
import org.opencv.videoio.Videoio;
import javafx.animation.AnimationTimer;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import java.awt.image.BufferedImage;
import java.util.concurrent.atomic.AtomicBoolean;
public class WebcamService {
private VideoCapture capture;
private AtomicBoolean isRunning;
private FaceDetectionService faceDetector;
private int cameraIndex;
public WebcamService() {
this(0); // Default camera
}
public WebcamService(int cameraIndex) {
this.cameraIndex = cameraIndex;
this.isRunning = new AtomicBoolean(false);
this.faceDetector = FaceDetectionService.getInstance();
initializeCamera();
}
private void initializeCamera() {
try {
capture = new VideoCapture(cameraIndex);
if (!capture.isOpened()) {
throw new RuntimeException("Cannot open camera: " + cameraIndex);
}
// Set camera properties
capture.set(Videoio.CAP_PROP_FRAME_WIDTH, 640);
capture.set(Videoio.CAP_PROP_FRAME_HEIGHT, 480);
capture.set(Videoio.CAP_PROP_FPS, 30);
System.out.println("Camera initialized: " + cameraIndex);
System.out.println("Resolution: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH) +
"x" + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));
System.out.println("FPS: " + capture.get(Videoio.CAP_PROP_FPS));
} catch (Exception e) {
throw new RuntimeException("Failed to initialize camera: " + e.getMessage(), e);
}
}
public void startCapture(ImageView imageView, boolean detectFaces) {
if (isRunning.get()) {
return;
}
isRunning.set(true);
AnimationTimer timer = new AnimationTimer() {
@Override
public void handle(long now) {
if (!isRunning.get()) {
this.stop();
return;
}
Mat frame = new Mat();
if (capture.read(frame)) {
if (!frame.empty()) {
try {
Mat processedFrame;
if (detectFaces) {
FaceDetectionService.DetectionResult result =
faceDetector.detectFaces(frame, true, true, true);
processedFrame = result.getProcessedImage();
} else {
processedFrame = frame;
}
// Convert to JavaFX Image
BufferedImage bufferedImage = faceDetector.matToBufferedImage(processedFrame);
Image fxImage = convertToFxImage(bufferedImage);
// Update ImageView on JavaFX thread
javafx.application.Platform.runLater(() -> {
imageView.setImage(fxImage);
});
processedFrame.release();
} catch (Exception e) {
System.err.println("Error processing frame: " + e.getMessage());
}
}
frame.release();
} else {
System.err.println("Failed to capture frame");
this.stop();
}
}
};
timer.start();
}
public void stopCapture() {
isRunning.set(false);
}
public Mat captureSingleFrame() {
Mat frame = new Mat();
if (capture.read(frame) && !frame.empty()) {
return frame;
}
frame.release();
return null;
}
public Mat captureSingleFrameWithDetection() {
Mat frame = captureSingleFrame();
if (frame != null) {
FaceDetectionService.DetectionResult result = faceDetector.detectFaces(frame);
Mat processed = result.getProcessedImage();
frame.release();
return processed;
}
return null;
}
private Image convertToFxImage(BufferedImage image) {
java.io.ByteArrayOutputStream out = new java.io.ByteArrayOutputStream();
try {
javax.imageio.ImageIO.write(image, "png", out);
return new Image(new java.io.ByteArrayInputStream(out.toByteArray()));
} catch (Exception e) {
throw new RuntimeException("Failed to convert image", e);
}
}
public void setCameraResolution(int width, int height) {
capture.set(Videoio.CAP_PROP_FRAME_WIDTH, width);
capture.set(Videoio.CAP_PROP_FRAME_HEIGHT, height);
}
public void setFrameRate(double fps) {
capture.set(Videoio.CAP_PROP_FPS, fps);
}
public void release() {
stopCapture();
if (capture != null) {
capture.release();
}
}
public boolean isRunning() {
return isRunning.get();
}
public static List<Integer> getAvailableCameras() {
List<Integer> cameras = new ArrayList<>();
for (int i = 0; i < 10; i++) {
VideoCapture testCapture = new VideoCapture(i);
if (testCapture.isOpened()) {
cameras.add(i);
testCapture.release();
}
}
return cameras;
}
}
JavaFX Main Application
Step 4: Main Application UI
package com.example.facedetection;
import com.example.facedetection.core.FaceDetectionService;
import com.example.facedetection.core.WebcamService;
import javafx.application.Application;
import javafx.application.Platform;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.*;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.*;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import org.opencv.core.Mat;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.List;
public class MainApp extends Application {
private Stage primaryStage;
private BorderPane rootLayout;
// Services
private FaceDetectionService faceDetector;
private WebcamService webcamService;
// UI Components
private ImageView imageView;
private Label statusLabel;
private ProgressIndicator progressIndicator;
private Button startWebcamButton;
private Button stopWebcamButton;
private Button loadImageButton;
private Button detectFacesButton;
private Button saveImageButton;
private ComboBox<Integer> cameraComboBox;
private CheckBox detectEyesCheckBox;
private CheckBox detectSmilesCheckBox;
// Current state
private boolean webcamActive = false;
private Mat currentImage;
@Override
public void init() throws Exception {
super.init();
// Initialize services
faceDetector = FaceDetectionService.getInstance();
// Get available cameras
List<Integer> availableCameras = WebcamService.getAvailableCameras();
if (!availableCameras.isEmpty()) {
webcamService = new WebcamService(availableCameras.get(0));
}
}
@Override
public void start(Stage primaryStage) {
this.primaryStage = primaryStage;
this.primaryStage.setTitle("Face Detection Application");
initializeRootLayout();
showMainInterface();
primaryStage.setOnCloseRequest(event -> {
shutdown();
});
primaryStage.show();
}
private void initializeRootLayout() {
rootLayout = new BorderPane();
Scene scene = new Scene(rootLayout, 1200, 800);
scene.getStylesheets().add(getClass().getResource("/styles/main.css").toExternalForm());
primaryStage.setScene(scene);
}
private void showMainInterface() {
// Create main layout
VBox mainContainer = new VBox(20);
mainContainer.setPadding(new Insets(20));
mainContainer.setAlignment(Pos.TOP_CENTER);
// Create title
Label titleLabel = new Label("Face Detection Application");
titleLabel.getStyleClass().add("title-label");
// Create image display area
imageView = new ImageView();
imageView.setPreserveRatio(true);
imageView.setFitWidth(800);
imageView.setFitHeight(600);
imageView.setStyle("-fx-border-color: #cccccc; -fx-border-width: 2px; -fx-background-color: #f8f8f8;");
// Create control panel
HBox controlPanel = createControlPanel();
// Create status bar
HBox statusBar = createStatusBar();
mainContainer.getChildren().addAll(
titleLabel,
imageView,
controlPanel,
statusBar
);
rootLayout.setCenter(mainContainer);
// Set initial status
updateStatus("Ready to detect faces");
}
private HBox createControlPanel() {
HBox controlPanel = new HBox(15);
controlPanel.setAlignment(Pos.CENTER);
controlPanel.setPadding(new Insets(15));
controlPanel.setStyle("-fx-background-color: #f0f0f0; -fx-border-color: #dddddd; -fx-border-radius: 5px;");
// Webcam controls
Label webcamLabel = new Label("Webcam:");
cameraComboBox = new ComboBox<>();
cameraComboBox.setPrefWidth(80);
List<Integer> cameras = WebcamService.getAvailableCameras();
cameraComboBox.getItems().addAll(cameras);
if (!cameras.isEmpty()) {
cameraComboBox.setValue(cameras.get(0));
}
startWebcamButton = new Button("Start Webcam");
startWebcamButton.setOnAction(e -> startWebcam());
stopWebcamButton = new Button("Stop Webcam");
stopWebcamButton.setOnAction(e -> stopWebcam());
stopWebcamButton.setDisable(true);
// Image controls
loadImageButton = new Button("Load Image");
loadImageButton.setOnAction(e -> loadImage());
detectFacesButton = new Button("Detect Faces");
detectFacesButton.setOnAction(e -> detectFacesInImage());
detectFacesButton.setDisable(true);
saveImageButton = new Button("Save Result");
saveImageButton.setOnAction(e -> saveImage());
saveImageButton.setDisable(true);
// Detection options
detectEyesCheckBox = new CheckBox("Detect Eyes");
detectEyesCheckBox.setSelected(true);
detectSmilesCheckBox = new CheckBox("Detect Smiles");
detectSmilesCheckBox.setSelected(true);
// Layout
VBox webcamBox = new VBox(5, webcamLabel, cameraComboBox, startWebcamButton, stopWebcamButton);
VBox imageBox = new VBox(5, loadImageButton, detectFacesButton, saveImageButton);
VBox optionsBox = new VBox(5, detectEyesCheckBox, detectSmilesCheckBox);
controlPanel.getChildren().addAll(webcamBox, new Separator(), imageBox, new Separator(), optionsBox);
return controlPanel;
}
private HBox createStatusBar() {
HBox statusBar = new HBox(10);
statusBar.setAlignment(Pos.CENTER_LEFT);
statusBar.setPadding(new Insets(10));
statusBar.setStyle("-fx-background-color: #e8e8e8; -fx-border-color: #cccccc; -fx-border-width: 1px 0 0 0;");
statusLabel = new Label("Ready");
statusLabel.setStyle("-fx-font-weight: bold;");
progressIndicator = new ProgressIndicator();
progressIndicator.setVisible(false);
progressIndicator.setPrefSize(20, 20);
statusBar.getChildren().addAll(progressIndicator, statusLabel);
HBox.setHgrow(statusLabel, Priority.ALWAYS);
return statusBar;
}
private void startWebcam() {
if (webcamService == null) {
showError("No webcam available");
return;
}
try {
Integer selectedCamera = cameraComboBox.getValue();
if (selectedCamera != null && selectedCamera != webcamService.getCameraIndex()) {
webcamService.release();
webcamService = new WebcamService(selectedCamera);
}
webcamService.startCapture(imageView, true);
webcamActive = true;
startWebcamButton.setDisable(true);
stopWebcamButton.setDisable(false);
loadImageButton.setDisable(true);
detectFacesButton.setDisable(true);
updateStatus("Webcam active - detecting faces in real-time");
} catch (Exception e) {
showError("Failed to start webcam: " + e.getMessage());
}
}
private void stopWebcam() {
if (webcamService != null) {
webcamService.stopCapture();
webcamActive = false;
startWebcamButton.setDisable(false);
stopWebcamButton.setDisable(true);
loadImageButton.setDisable(false);
// Clear image view
imageView.setImage(null);
updateStatus("Webcam stopped");
}
}
private void loadImage() {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Open Image File");
fileChooser.getExtensionFilters().addAll(
new FileChooser.ExtensionFilter("Image Files", "*.png", "*.jpg", "*.jpeg", "*.bmp", "*.gif"),
new FileChooser.ExtensionFilter("All Files", "*.*")
);
File selectedFile = fileChooser.showOpenDialog(primaryStage);
if (selectedFile != null) {
try {
showProgress(true);
updateStatus("Loading image...");
// Load image in background thread
new Thread(() -> {
try {
BufferedImage bufferedImage = ImageIO.read(selectedFile);
Image fxImage = convertToFxImage(bufferedImage);
// Convert to OpenCV Mat
currentImage = faceDetector.bufferedImageToMat(bufferedImage);
Platform.runLater(() -> {
imageView.setImage(fxImage);
detectFacesButton.setDisable(false);
saveImageButton.setDisable(true);
updateStatus("Image loaded: " + selectedFile.getName());
showProgress(false);
});
} catch (Exception e) {
Platform.runLater(() -> {
showError("Failed to load image: " + e.getMessage());
showProgress(false);
});
}
}).start();
} catch (Exception e) {
showError("Failed to load image: " + e.getMessage());
showProgress(false);
}
}
}
private void detectFacesInImage() {
if (currentImage == null) {
showError("No image loaded");
return;
}
showProgress(true);
updateStatus("Detecting faces...");
new Thread(() -> {
try {
boolean detectEyes = detectEyesCheckBox.isSelected();
boolean detectSmiles = detectSmilesCheckBox.isSelected();
FaceDetectionService.DetectionResult result =
faceDetector.detectFaces(currentImage, detectEyes, detectSmiles, true);
BufferedImage processedImage = faceDetector.matToBufferedImage(result.getProcessedImage());
Image fxImage = convertToFxImage(processedImage);
Platform.runLater(() -> {
imageView.setImage(fxImage);
saveImageButton.setDisable(false);
String status = String.format("Detection complete: %d faces, %d eyes, %d smiles found",
result.getFaceCount(), result.getEyeCount(), result.getSmileCount());
updateStatus(status);
showProgress(false);
});
} catch (Exception e) {
Platform.runLater(() -> {
showError("Face detection failed: " + e.getMessage());
showProgress(false);
});
}
}).start();
}
private void saveImage() {
if (imageView.getImage() == null) {
showError("No image to save");
return;
}
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Save Processed Image");
fileChooser.getExtensionFilters().addAll(
new FileChooser.ExtensionFilter("PNG Image", "*.png"),
new FileChooser.ExtensionFilter("JPEG Image", "*.jpg"),
new FileChooser.ExtensionFilter("All Files", "*.*")
);
File file = fileChooser.showSaveDialog(primaryStage);
if (file != null) {
try {
// Implementation for saving image would go here
// This would involve converting the ImageView content back to a file
updateStatus("Image saved: " + file.getName());
} catch (Exception e) {
showError("Failed to save image: " + e.getMessage());
}
}
}
private Image convertToFxImage(BufferedImage image) {
java.io.ByteArrayOutputStream out = new java.io.ByteArrayOutputStream();
try {
ImageIO.write(image, "png", out);
return new Image(new java.io.ByteArrayInputStream(out.toByteArray()));
} catch (Exception e) {
throw new RuntimeException("Failed to convert image", e);
}
}
private void updateStatus(String message) {
statusLabel.setText(message);
}
private void showProgress(boolean show) {
progressIndicator.setVisible(show);
}
private void showError(String message) {
Alert alert = new Alert(Alert.AlertType.ERROR);
alert.setTitle("Error");
alert.setHeaderText("An error occurred");
alert.setContentText(message);
alert.showAndWait();
}
private void shutdown() {
if (webcamService != null) {
webcamService.release();
}
}
public static void main(String[] args) {
launch(args);
}
}
Advanced Features
Step 5: Face Recognition and Analytics
package com.example.facedetection.advanced;
import org.opencv.core.*;
import org.opencv.face.FaceRecognizer;
import org.opencv.face.LBPHFaceRecognizer;
import org.opencv.imgproc.Imgproc;
import java.io.File;
import java.util.*;
public class FaceRecognitionService {
private FaceRecognizer faceRecognizer;
private Map<Integer, String> labelMap;
private int nextLabelId;
private boolean isTrained;
public FaceRecognitionService() {
this.faceRecognizer = LBPHFaceRecognizer.create();
this.labelMap = new HashMap<>();
this.nextLabelId = 0;
this.isTrained = false;
}
public void trainFromDirectory(String directoryPath) {
List<Mat> images = new ArrayList<>();
List<Integer> labels = new ArrayList<>();
File rootDir = new File(directoryPath);
if (!rootDir.exists() || !rootDir.isDirectory()) {
throw new IllegalArgumentException("Directory does not exist: " + directoryPath);
}
// Process each subdirectory (each represents a person)
for (File personDir : rootDir.listFiles(File::isDirectory)) {
String personName = personDir.getName();
int labelId = getOrCreateLabel(personName);
// Process each image in the person's directory
for (File imageFile : personDir.listFiles((dir, name) ->
name.toLowerCase().endsWith(".jpg") ||
name.toLowerCase().endsWith(".png") ||
name.toLowerCase().endsWith(".jpeg"))) {
try {
Mat image = loadAndPreprocessImage(imageFile.getAbsolutePath());
images.add(image);
labels.add(labelId);
} catch (Exception e) {
System.err.println("Failed to process image: " + imageFile.getName() + " - " + e.getMessage());
}
}
}
if (images.isEmpty()) {
throw new IllegalStateException("No training images found");
}
// Train the recognizer
MatOfInt labelsMat = new MatOfInt();
labelsMat.fromList(labels);
faceRecognizer.train(images, labelsMat);
isTrained = true;
// Clean up
images.forEach(Mat::release);
labelsMat.release();
System.out.println("Training completed. " + labelMap.size() + " persons, " + images.size() + " images");
}
public RecognitionResult recognizeFace(Mat faceImage) {
if (!isTrained) {
throw new IllegalStateException("Recognizer is not trained");
}
Mat processedFace = preprocessFace(faceImage);
int[] label = new int[1];
double[] confidence = new double[1];
faceRecognizer.predict(processedFace, label, confidence);
String personName = labelMap.getOrDefault(label[0], "Unknown");
processedFace.release();
return new RecognitionResult(personName, confidence[0], label[0]);
}
private Mat loadAndPreprocessImage(String imagePath) {
Mat image = Imgcodecs.imread(imagePath, Imgcodecs.IMREAD_GRAYSCALE);
if (image.empty()) {
throw new RuntimeException("Failed to load image: " + imagePath);
}
return preprocessFace(image);
}
private Mat preprocessFace(Mat faceImage) {
Mat processed = new Mat();
// Resize to standard size
Imgproc.resize(faceImage, processed, new Size(100, 100));
// Equalize histogram
Imgproc.equalizeHist(processed, processed);
return processed;
}
private int getOrCreateLabel(String personName) {
for (Map.Entry<Integer, String> entry : labelMap.entrySet()) {
if (entry.getValue().equals(personName)) {
return entry.getKey();
}
}
int newLabel = nextLabelId++;
labelMap.put(newLabel, personName);
return newLabel;
}
public void saveModel(String modelPath) {
if (!isTrained) {
throw new IllegalStateException("No model to save - recognizer is not trained");
}
faceRecognizer.save(modelPath);
// Save label mapping
try (java.io.ObjectOutputStream out = new java.io.ObjectOutputStream(
new java.io.FileOutputStream(modelPath + ".labels"))) {
out.writeObject(labelMap);
} catch (Exception e) {
throw new RuntimeException("Failed to save label mapping", e);
}
}
@SuppressWarnings("unchecked")
public void loadModel(String modelPath) {
faceRecognizer.read(modelPath);
// Load label mapping
try (java.io.ObjectInputStream in = new java.io.ObjectInputStream(
new java.io.FileInputStream(modelPath + ".labels"))) {
labelMap = (Map<Integer, String>) in.readObject();
nextLabelId = labelMap.keySet().stream().max(Integer::compareTo).orElse(0) + 1;
isTrained = true;
} catch (Exception e) {
throw new RuntimeException("Failed to load label mapping", e);
}
}
public boolean isTrained() {
return isTrained;
}
public Map<Integer, String> getLabelMap() {
return Collections.unmodifiableMap(labelMap);
}
public static class RecognitionResult {
private final String personName;
private final double confidence;
private final int label;
public RecognitionResult(String personName, double confidence, int label) {
this.personName = personName;
this.confidence = confidence;
this.label = label;
}
// Getters
public String getPersonName() { return personName; }
public double getConfidence() { return confidence; }
public int getLabel() { return label; }
public boolean isConfident(double threshold) {
return confidence < threshold;
}
@Override
public String toString() {
return String.format("%s (%.2f confidence)", personName, confidence);
}
}
}
CSS Styling
Step 6: Application Styling
```css
/* resources/styles/main.css */
.root {
-fx-font-family: "Segoe UI", Arial, sans-serif;
-fx-font-size: 14