JavaFX with OpenCV: Computer Vision Desktop Applications

This comprehensive guide covers integrating OpenCV with JavaFX to create powerful computer vision applications with modern user interfaces.

Project Setup and Dependencies

Maven Configuration

<!-- pom.xml -->
<project>
<properties>
<maven.compiler.release>17</maven.compiler.release>
<javafx.version>21</javafx.version>
<opencv.version>4.8.0</opencv.version>
</properties>
<dependencies>
<!-- JavaFX -->
<dependency>
<groupId>org.openjfx</groupId>
<artifactId>javafx-controls</artifactId>
<version>${javafx.version}</version>
</dependency>
<dependency>
<groupId>org.openjfx</groupId>
<artifactId>javafx-fxml</artifactId>
<version>${javafx.version}</version>
</dependency>
<!-- OpenCV -->
<dependency>
<groupId>org.openpnp</groupId>
<artifactId>opencv</artifactId>
<version>${opencv.version}</version>
</dependency>
<!-- ImageIO extensions for more formats -->
<dependency>
<groupId>com.twelvemonkeys.imageio</groupId>
<artifactId>imageio-core</artifactId>
<version>3.10.1</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.openjfx</groupId>
<artifactId>javafx-maven-plugin</artifactId>
<version>0.0.8</version>
<configuration>
<mainClass>com.example.opencvapp.OpenCVApp</mainClass>
</configuration>
</plugin>
</plugins>
</build>
</project>

Core OpenCV-JavaFX Integration

OpenCV Initialization and Utility Classes

package com.example.opencvapp.utils;
import org.opencv.core.*;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.videoio.VideoCapture;
import javafx.scene.image.Image;
import javafx.scene.image.PixelFormat;
import javafx.scene.image.WritableImage;
import javafx.scene.image.WritablePixelFormat;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.nio.ByteBuffer;
public class OpenCVUtils {
static {
// Load OpenCV native library
nu.pattern.OpenCV.loadLocally();
// Or use: System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
}
/**
* Convert OpenCV Mat to JavaFX Image
*/
public static Image mat2Image(Mat mat) {
if (mat.empty()) {
return null;
}
// Convert color space if needed
Mat convertedMat = new Mat();
if (mat.channels() == 1) {
Imgproc.cvtColor(mat, convertedMat, Imgproc.COLOR_GRAY2BGR);
} else if (mat.channels() == 3) {
Imgproc.cvtColor(mat, convertedMat, Imgproc.COLOR_BGR2RGB);
} else if (mat.channels() == 4) {
Imgproc.cvtColor(mat, convertedMat, Imgproc.COLOR_BGRA2RGBA);
} else {
convertedMat = mat.clone();
}
try {
// Convert Mat to byte array
byte[] buffer = new byte[convertedMat.cols() * convertedMat.rows() * convertedMat.channels()];
convertedMat.get(0, 0, buffer);
// Create JavaFX Image
WritablePixelFormat<ByteBuffer> format = convertedMat.channels() == 4 ? 
PixelFormat.getByteBgraPreInstance() : 
PixelFormat.getByteRgbInstance();
WritableImage writableImage = new WritableImage(convertedMat.cols(), convertedMat.rows());
writableImage.getPixelWriter().setPixels(0, 0, convertedMat.cols(), convertedMat.rows(), 
format, buffer, 0, convertedMat.cols() * convertedMat.channels());
return writableImage;
} finally {
convertedMat.release();
}
}
/**
* Convert JavaFX Image to OpenCV Mat
*/
public static Mat image2Mat(Image image) {
if (image == null) {
return new Mat();
}
int width = (int) image.getWidth();
int height = (int) image.getHeight();
// Convert JavaFX Image to BufferedImage
BufferedImage bufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
java.awt.Graphics2D g = bufferedImage.createGraphics();
javafx.embed.swing.SwingFXUtils.fromFXImage(image, bufferedImage);
g.dispose();
// Convert BufferedImage to Mat
byte[] pixels = ((DataBufferByte) bufferedImage.getRaster().getDataBuffer()).getData();
Mat mat = new Mat(height, width, CvType.CV_8UC3);
mat.put(0, 0, pixels);
return mat;
}
/**
* Load image with OpenCV
*/
public static Mat loadImage(String filePath) {
return Imgcodecs.imread(filePath);
}
/**
* Save image with OpenCV
*/
public static boolean saveImage(Mat mat, String filePath) {
return Imgcodecs.imwrite(filePath, mat);
}
/**
* Resize image maintaining aspect ratio
*/
public static Mat resizeImage(Mat src, int maxWidth, int maxHeight) {
if (src.empty()) return src;
double aspectRatio = (double) src.width() / src.height();
int newWidth, newHeight;
if (src.width() > maxWidth || src.height() > maxHeight) {
if (aspectRatio > 1) {
// Landscape
newWidth = maxWidth;
newHeight = (int) (maxWidth / aspectRatio);
} else {
// Portrait
newHeight = maxHeight;
newWidth = (int) (maxHeight * aspectRatio);
}
} else {
newWidth = src.width();
newHeight = src.height();
}
Mat resized = new Mat();
Imgproc.resize(src, resized, new Size(newWidth, newHeight));
return resized;
}
}

Main Application Class

package com.example.opencvapp;
import javafx.application.Application;
import javafx.scene.Scene;
import javafx.scene.control.*;
import javafx.scene.image.ImageView;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
import org.opencv.core.Mat;
public class OpenCVApp extends Application {
private ImageView imageView;
private Mat currentMat;
private ImageProcessor imageProcessor;
@Override
public void start(Stage primaryStage) {
// Initialize OpenCV
OpenCVUtils.loadLibrary();
// Create UI components
createUI(primaryStage);
// Initialize image processor
imageProcessor = new ImageProcessor();
}
private void createUI(Stage stage) {
// Main layout
BorderPane root = new BorderPane();
// Create menu bar
MenuBar menuBar = createMenuBar();
// Create toolbar
ToolBar toolBar = createToolBar();
// Create image view
imageView = new ImageView();
imageView.setPreserveRatio(true);
imageView.setFitWidth(800);
imageView.setFitHeight(600);
ScrollPane imageScrollPane = new ScrollPane(imageView);
imageScrollPane.setFitToWidth(true);
imageScrollPane.setFitToHeight(true);
// Create control panel
VBox controlPanel = createControlPanel();
// Assemble layout
root.setTop(menuBar);
root.setLeft(toolBar);
root.setCenter(imageScrollPane);
root.setRight(controlPanel);
// Create scene
Scene scene = new Scene(root, 1200, 800);
scene.getStylesheets().add(getClass().getResource("/styles.css").toExternalForm());
stage.setTitle("JavaFX OpenCV Application");
stage.setScene(scene);
stage.show();
}
private MenuBar createMenuBar() {
MenuBar menuBar = new MenuBar();
// File menu
Menu fileMenu = new Menu("File");
MenuItem openItem = new MenuItem("Open Image");
MenuItem saveItem = new MenuItem("Save Image");
MenuItem exitItem = new MenuItem("Exit");
openItem.setOnAction(e -> openImage());
saveItem.setOnAction(e -> saveImage());
exitItem.setOnAction(e -> System.exit(0));
fileMenu.getItems().addAll(openItem, saveItem, new SeparatorMenuItem(), exitItem);
// Process menu
Menu processMenu = new Menu("Process");
MenuItem grayscaleItem = new MenuItem("Grayscale");
MenuItem blurItem = new MenuItem("Blur");
MenuItem edgeItem = new MenuItem("Edge Detection");
MenuItem faceItem = new MenuItem("Face Detection");
grayscaleItem.setOnAction(e -> applyGrayscale());
blurItem.setOnAction(e -> applyBlur());
edgeItem.setOnAction(e -> detectEdges());
faceItem.setOnAction(e -> detectFaces());
processMenu.getItems().addAll(grayscaleItem, blurItem, edgeItem, faceItem);
menuBar.getMenus().addAll(fileMenu, processMenu);
return menuBar;
}
private ToolBar createToolBar() {
ToolBar toolBar = new ToolBar();
Button openBtn = new Button("Open");
Button saveBtn = new Button("Save");
Button resetBtn = new Button("Reset");
Button grayscaleBtn = new Button("Gray");
Button blurBtn = new Button("Blur");
Button edgeBtn = new Button("Edges");
Button faceBtn = new Button("Faces");
openBtn.setOnAction(e -> openImage());
saveBtn.setOnAction(e -> saveImage());
resetBtn.setOnAction(e -> resetImage());
grayscaleBtn.setOnAction(e -> applyGrayscale());
blurBtn.setOnAction(e -> applyBlur());
edgeBtn.setOnAction(e -> detectEdges());
faceBtn.setOnAction(e -> detectFaces());
toolBar.getItems().addAll(openBtn, saveBtn, resetBtn, 
new Separator(), grayscaleBtn, blurBtn, edgeBtn, faceBtn);
return toolBar;
}
private VBox createControlPanel() {
VBox controlPanel = new VBox(10);
controlPanel.setStyle("-fx-padding: 10; -fx-spacing: 10;");
// Blur controls
Label blurLabel = new Label("Blur Settings");
Slider blurSlider = new Slider(1, 15, 3);
blurSlider.setMajorTickUnit(2);
blurSlider.setShowTickLabels(true);
blurSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
if (currentMat != null && !currentMat.empty()) {
applyCustomBlur(newVal.intValue());
}
});
// Threshold controls
Label thresholdLabel = new Label("Threshold");
Slider thresholdSlider = new Slider(0, 255, 127);
thresholdSlider.setMajorTickUnit(50);
thresholdSlider.setShowTickLabels(true);
thresholdSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
if (currentMat != null && !currentMat.empty()) {
applyThreshold(newVal.intValue());
}
});
// Brightness/Contrast
Label brightnessLabel = new Label("Brightness");
Slider brightnessSlider = new Slider(-100, 100, 0);
Label contrastLabel = new Label("Contrast");
Slider contrastSlider = new Slider(0.1, 3.0, 1.0);
Button applyAdjustments = new Button("Apply Adjustments");
applyAdjustments.setOnAction(e -> {
if (currentMat != null && !currentMat.empty()) {
adjustBrightnessContrast(
brightnessSlider.getValue(),
contrastSlider.getValue()
);
}
});
controlPanel.getChildren().addAll(
blurLabel, blurSlider,
thresholdLabel, thresholdSlider,
brightnessLabel, brightnessSlider,
contrastLabel, contrastSlider,
applyAdjustments
);
return controlPanel;
}
private void openImage() {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Open Image File");
fileChooser.getExtensionFilters().addAll(
new FileChooser.ExtensionFilter("Image Files", "*.png", "*.jpg", "*.jpeg", "*.bmp", "*.gif")
);
File file = fileChooser.showOpenDialog(null);
if (file != null) {
currentMat = OpenCVUtils.loadImage(file.getAbsolutePath());
if (!currentMat.empty()) {
updateImageView();
} else {
showAlert("Error", "Could not load image: " + file.getName());
}
}
}
private void saveImage() {
if (currentMat == null || currentMat.empty()) {
showAlert("Error", "No image to save");
return;
}
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Save Image File");
fileChooser.getExtensionFilters().addAll(
new FileChooser.ExtensionFilter("PNG", "*.png"),
new FileChooser.ExtensionFilter("JPEG", "*.jpg"),
new FileChooser.ExtensionFilter("BMP", "*.bmp")
);
File file = fileChooser.showSaveDialog(null);
if (file != null) {
boolean success = OpenCVUtils.saveImage(currentMat, file.getAbsolutePath());
if (success) {
showAlert("Success", "Image saved successfully");
} else {
showAlert("Error", "Could not save image");
}
}
}
private void updateImageView() {
if (currentMat != null && !currentMat.empty()) {
imageView.setImage(OpenCVUtils.mat2Image(currentMat));
}
}
private void showAlert(String title, String message) {
Alert alert = new Alert(Alert.AlertType.INFORMATION);
alert.setTitle(title);
alert.setHeaderText(null);
alert.setContentText(message);
alert.showAndWait();
}
// Image processing methods will be implemented next...
private void applyGrayscale() { }
private void applyBlur() { }
private void applyCustomBlur(int size) { }
private void detectEdges() { }
private void detectFaces() { }
private void resetImage() { }
private void applyThreshold(int value) { }
private void adjustBrightnessContrast(double brightness, double contrast) { }
public static void main(String[] args) {
launch(args);
}
}

Image Processing Implementation

Image Processor Class

package com.example.opencvapp;
import org.opencv.core.*;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import java.util.ArrayList;
import java.util.List;
public class ImageProcessor {
private CascadeClassifier faceDetector;
private CascadeClassifier eyeDetector;
public ImageProcessor() {
// Initialize classifiers
try {
faceDetector = new CascadeClassifier();
eyeDetector = new CascadeClassifier();
// Load pre-trained classifiers
faceDetector.load("haarcascade_frontalface_default.xml");
eyeDetector.load("haarcascade_eye.xml");
} catch (Exception e) {
System.err.println("Error loading classifiers: " + e.getMessage());
}
}
/**
* Convert image to grayscale
*/
public Mat applyGrayscale(Mat src) {
if (src.empty()) return src;
Mat dst = new Mat();
if (src.channels() == 3) {
Imgproc.cvtColor(src, dst, Imgproc.COLOR_BGR2GRAY);
} else if (src.channels() == 4) {
Imgproc.cvtColor(src, dst, Imgproc.COLOR_BGRA2GRAY);
} else {
src.copyTo(dst);
}
return dst;
}
/**
* Apply Gaussian blur
*/
public Mat applyBlur(Mat src, int kernelSize) {
if (src.empty() || kernelSize < 1) return src;
// Ensure kernel size is odd
if (kernelSize % 2 == 0) kernelSize++;
Mat dst = new Mat();
Imgproc.GaussianBlur(src, dst, new Size(kernelSize, kernelSize), 0);
return dst;
}
/**
* Detect edges using Canny algorithm
*/
public Mat detectEdges(Mat src, double threshold1, double threshold2) {
if (src.empty()) return src;
Mat gray = new Mat();
Mat edges = new Mat();
// Convert to grayscale if needed
if (src.channels() > 1) {
Imgproc.cvtColor(src, gray, Imgproc.COLOR_BGR2GRAY);
} else {
gray = src;
}
// Apply Canny edge detection
Imgproc.Canny(gray, edges, threshold1, threshold2);
return edges;
}
/**
* Adjust brightness and contrast
*/
public Mat adjustBrightnessContrast(Mat src, double alpha, double beta) {
if (src.empty()) return src;
Mat dst = new Mat();
src.convertTo(dst, -1, alpha, beta);
return dst;
}
/**
* Apply threshold
*/
public Mat applyThreshold(Mat src, int thresholdValue) {
if (src.empty()) return src;
Mat gray = new Mat();
Mat thresholded = new Mat();
// Convert to grayscale if needed
if (src.channels() > 1) {
Imgproc.cvtColor(src, gray, Imgproc.COLOR_BGR2GRAY);
} else {
gray = src;
}
// Apply binary threshold
Imgproc.threshold(gray, thresholded, thresholdValue, 255, Imgproc.THRESH_BINARY);
return thresholded;
}
/**
* Detect faces in image
*/
public Mat detectFaces(Mat src) {
if (src.empty() || faceDetector == null) return src;
Mat result = src.clone();
Mat gray = new Mat();
// Convert to grayscale for face detection
Imgproc.cvtColor(src, gray, Imgproc.COLOR_BGR2GRAY);
// Detect faces
MatOfRect faces = new MatOfRect();
faceDetector.detectMultiScale(gray, faces, 1.1, 3, 0, new Size(30, 30));
// Draw rectangles around faces
List<Rect> faceList = faces.toList();
for (Rect rect : faceList) {
Imgproc.rectangle(result, 
new Point(rect.x, rect.y),
new Point(rect.x + rect.width, rect.y + rect.height),
new Scalar(0, 255, 0), 3);
// Detect eyes within each face
Mat faceROI = gray.submat(rect);
MatOfRect eyes = new MatOfRect();
eyeDetector.detectMultiScale(faceROI, eyes);
// Draw rectangles around eyes
List<Rect> eyeList = eyes.toList();
for (Rect eye : eyeList) {
Point center = new Point(
rect.x + eye.x + eye.width / 2,
rect.y + eye.y + eye.height / 2
);
int radius = (int) Math.round((eye.width + eye.height) * 0.25);
Imgproc.circle(result, center, radius, new Scalar(255, 0, 0), 2);
}
}
return result;
}
/**
* Apply morphological operations
*/
public Mat applyMorphology(Mat src, int operation, int kernelSize) {
if (src.empty()) return src;
Mat dst = new Mat();
Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, 
new Size(kernelSize, kernelSize));
Imgproc.morphologyEx(src, dst, operation, kernel);
return dst;
}
/**
* Apply histogram equalization
*/
public Mat applyHistogramEqualization(Mat src) {
if (src.empty()) return src;
Mat dst = new Mat();
if (src.channels() == 1) {
// Grayscale image
Imgproc.equalizeHist(src, dst);
} else {
// Color image - equalize each channel separately
List<Mat> channels = new ArrayList<>();
Core.split(src, channels);
for (int i = 0; i < channels.size(); i++) {
Imgproc.equalizeHist(channels.get(i), channels.get(i));
}
Core.merge(channels, dst);
}
return dst;
}
/**
* Find and draw contours
*/
public Mat findContours(Mat src) {
if (src.empty()) return src;
Mat gray = new Mat();
Mat edges = new Mat();
Mat result = src.clone();
// Convert to grayscale and detect edges
if (src.channels() > 1) {
Imgproc.cvtColor(src, gray, Imgproc.COLOR_BGR2GRAY);
} else {
gray = src;
}
Imgproc.Canny(gray, edges, 50, 150);
// Find contours
List<MatOfPoint> contours = new ArrayList<>();
Mat hierarchy = new Mat();
Imgproc.findContours(edges, contours, hierarchy, 
Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
// Draw contours
Imgproc.drawContours(result, contours, -1, new Scalar(0, 255, 0), 2);
return result;
}
}

Real-time Camera Processing

Camera Capture and Processing

package com.example.opencvapp.camera;
import org.opencv.core.Mat;
import org.opencv.videoio.VideoCapture;
import org.opencv.videoio.Videoio;
import javafx.animation.AnimationTimer;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
public class CameraController {
private VideoCapture camera;
private ImageProcessor imageProcessor;
private boolean isCameraActive = false;
private AnimationTimer cameraTimer;
public CameraController() {
this.imageProcessor = new ImageProcessor();
}
/**
* Start camera capture
*/
public void startCamera(ImageView imageView) {
if (isCameraActive) {
return;
}
camera = new VideoCapture(0); // Use default camera
if (!camera.isOpened()) {
System.err.println("Error: Camera not accessible");
return;
}
// Set camera resolution
camera.set(Videoio.CAP_PROP_FRAME_WIDTH, 640);
camera.set(Videoio.CAP_PROP_FRAME_HEIGHT, 480);
isCameraActive = true;
// Create animation timer for real-time processing
cameraTimer = new AnimationTimer() {
@Override
public void handle(long now) {
if (isCameraActive) {
Mat frame = new Mat();
if (camera.read(frame) && !frame.empty()) {
// Process frame (optional)
Mat processedFrame = processFrame(frame);
// Convert to JavaFX Image and update view
Image image = OpenCVUtils.mat2Image(processedFrame);
javafx.application.Platform.runLater(() -> {
imageView.setImage(image);
});
processedFrame.release();
}
frame.release();
}
}
};
cameraTimer.start();
}
/**
* Stop camera capture
*/
public void stopCamera() {
isCameraActive = false;
if (cameraTimer != null) {
cameraTimer.stop();
}
if (camera != null) {
camera.release();
}
}
/**
* Process individual camera frame
*/
private Mat processFrame(Mat frame) {
// Apply various processing effects
Mat processed = frame.clone();
// Example: Apply face detection in real-time
processed = imageProcessor.detectFaces(processed);
// Add timestamp
String timestamp = java.time.LocalTime.now().toString();
org.opencv.imgproc.Imgproc.putText(
processed, 
timestamp, 
new org.opencv.core.Point(10, 30),
org.opencv.imgproc.Imgproc.FONT_HERSHEY_SIMPLEX,
0.7, 
new Scalar(0, 255, 0), 
2
);
return processed;
}
/**
* Take snapshot from camera
*/
public Mat takeSnapshot() {
if (camera == null || !isCameraActive) {
return new Mat();
}
Mat snapshot = new Mat();
if (camera.read(snapshot) && !snapshot.empty()) {
return snapshot;
}
return new Mat();
}
public boolean isCameraActive() {
return isCameraActive;
}
}

Camera UI Integration

package com.example.opencvapp;
import com.example.opencvapp.camera.CameraController;
import javafx.scene.control.Button;
import javafx.scene.control.ToggleButton;
import javafx.scene.layout.HBox;
public class CameraPanel {
private CameraController cameraController;
private ToggleButton cameraToggle;
private Button snapshotButton;
public CameraPanel(OpenCVApp mainApp) {
this.cameraController = new CameraController();
createCameraControls();
}
private void createCameraControls() {
HBox cameraBox = new HBox(10);
cameraToggle = new ToggleButton("Start Camera");
snapshotButton = new Button("Take Snapshot");
snapshotButton.setDisable(true);
cameraToggle.setOnAction(e -> {
if (cameraToggle.isSelected()) {
// Start camera
cameraController.startCamera(mainApp.getImageView());
cameraToggle.setText("Stop Camera");
snapshotButton.setDisable(false);
} else {
// Stop camera
cameraController.stopCamera();
cameraToggle.setText("Start Camera");
snapshotButton.setDisable(true);
}
});
snapshotButton.setOnAction(e -> {
Mat snapshot = cameraController.takeSnapshot();
if (!snapshot.empty()) {
mainApp.setCurrentMat(snapshot);
mainApp.updateImageView();
}
});
cameraBox.getChildren().addAll(cameraToggle, snapshotButton);
}
public HBox getCameraPanel() {
return cameraBox;
}
public void stopCamera() {
if (cameraController.isCameraActive()) {
cameraController.stopCamera();
cameraToggle.setSelected(false);
cameraToggle.setText("Start Camera");
}
}
}

Advanced Features

Image Filter Gallery

package com.example.opencvapp.filters;
import org.opencv.core.*;
import org.opencv.imgproc.Imgproc;
public class ImageFilters {
/**
* Apply sepia tone filter
*/
public static Mat applySepia(Mat src) {
if (src.empty()) return src;
Mat dst = src.clone();
Mat kernel = new Mat(3, 3, CvType.CV_32F);
// Sepia kernel
kernel.put(0, 0, 0.272, 0.534, 0.131);
kernel.put(1, 0, 0.349, 0.686, 0.168);
kernel.put(2, 0, 0.393, 0.769, 0.189);
Imgproc.transform(src, dst, kernel);
return dst;
}
/**
* Apply sketch effect
*/
public static Mat applySketch(Mat src) {
if (src.empty()) return src;
Mat gray = new Mat();
Mat inverted = new Mat();
Mat blurred = new Mat();
Mat result = new Mat();
// Convert to grayscale
Imgproc.cvtColor(src, gray, Imgproc.COLOR_BGR2GRAY);
// Invert the image
Core.bitwise_not(gray, inverted);
// Apply Gaussian blur
Imgproc.GaussianBlur(inverted, blurred, new Size(21, 21), 0);
// Blend with original
Core.divide(gray, 255 - blurred, result, 255);
return result;
}
/**
* Apply oil painting effect
*/
public static Mat applyOilPainting(Mat src, int radius, int levels) {
if (src.empty()) return src;
Mat dst = new Mat(src.size(), src.type());
for (int y = radius; y < src.rows() - radius; y++) {
for (int x = radius; x < src.cols() - radius; x++) {
// Extract neighborhood
Rect roi = new Rect(x - radius, y - radius, 2 * radius + 1, 2 * radius + 1);
Mat neighborhood = new Mat(src, roi);
// Find dominant color
Scalar dominantColor = findDominantColor(neighborhood, levels);
// Set pixel to dominant color
dst.put(y, x, dominantColor.val);
}
}
return dst;
}
private static Scalar findDominantColor(Mat neighborhood, int levels) {
// Simple implementation - find most frequent color in quantized space
Mat reshaped = neighborhood.reshape(1, neighborhood.rows() * neighborhood.cols());
Mat quantized = new Mat();
// Quantize colors
reshaped.convertTo(quantized, CvType.CV_32F);
Core.divide(quantized, 255.0 / levels, quantized);
Core.multiply(quantized, 255.0 / levels, quantized);
quantized.convertTo(quantized, CvType.CV_8U);
// Find most common color
Mat hist = new Mat();
List<Mat> images = Arrays.asList(quantized);
Imgproc.calcHist(images, new MatOfInt(0), new Mat(), hist, 
new MatOfInt(levels), new MatOfFloat(0, 256));
Core.MinMaxLocResult mm = Core.minMaxLoc(hist);
double maxVal = mm.maxVal;
return new Scalar(maxVal * levels);
}
/**
* Apply cartoon effect
*/
public static Mat applyCartoonEffect(Mat src) {
if (src.empty()) return src;
Mat gray = new Mat();
Mat edges = new Mat();
Mat color = new Mat();
// Convert to grayscale
Imgproc.cvtColor(src, gray, Imgproc.COLOR_BGR2GRAY);
// Detect edges
Imgproc.medianBlur(gray, gray, 7);
Mat edges2 = new Mat();
Imgproc.Laplacian(gray, edges2, CvType.CV_8U, 5);
Core.convertScaleAbs(edges2, edges);
Core.bitwise_not(edges, edges);
// Reduce color palette
Mat reducedColor = new Mat();
Imgproc.bilateralFilter(src, color, 9, 300, 300);
// Reduce colors using k-means (simplified)
Mat data = color.reshape(1, color.cols() * color.rows());
data.convertTo(data, CvType.CV_32F);
Mat labels = new Mat();
Mat centers = new Mat();
Core.kmeans(data, 8, labels, 
new TermCriteria(TermCriteria.MAX_ITER, 10, 1.0), 
3, Core.KMEANS_PP_CENTERS, centers);
centers.convertTo(centers, CvType.CV_8U);
Mat result = labels.reshape(1, color.rows());
// Combine edges with reduced colors
Mat cartoon = new Mat();
Core.bitwise_and(color, color, cartoon, edges);
return cartoon;
}
}

CSS Styling

/* styles.css */
.root {
-fx-font-family: "Segoe UI", Arial, sans-serif;
-fx-base: #2c3e50;
-fx-background: #34495e;
}
.button {
-fx-background-color: #3498db;
-fx-text-fill: white;
-fx-background-radius: 5;
-fx-padding: 8 15 8 15;
}
.button:hover {
-fx-background-color: #2980b9;
}
.toggle-button:selected {
-fx-background-color: #e74c3c;
}
.slider .track {
-fx-background-color: #bdc3c7;
}
.slider .thumb {
-fx-background-color: #3498db;
}
.image-view {
-fx-effect: dropshadow(three-pass-box, rgba(0,0,0,0.3), 10, 0, 0, 0);
}
.menu-bar {
-fx-background-color: #2c3e50;
}
.menu-bar .menu {
-fx-text-fill: white;
}
.tool-bar {
-fx-background-color: #34495e;
-fx-padding: 10;
}
.vbox, .hbox {
-fx-spacing: 10;
-fx-padding: 10;
}
.label {
-fx-text-fill: #ecf0f1;
-fx-font-weight: bold;
}

Build and Deployment

Running the Application

# With Maven
mvn clean javafx:run
# Or directly with Java
java --module-path /path/to/javafx-sdk/lib \
--add-modules javafx.controls,javafx.fxml \
-cp "target/classes:opencv-4.8.0.jar" \
com.example.opencvapp.OpenCVApp

Conclusion

This JavaFX + OpenCV integration provides:

Key Features:

  • Real-time image processing and computer vision
  • Camera integration for live video processing
  • Advanced image filters and effects
  • Face detection and object recognition
  • Modern JavaFX user interface

Use Cases:

  • Medical imaging applications
  • Security and surveillance systems
  • Photo editing software
  • Industrial quality control
  • Educational tools for computer vision

Performance Tips:

  • Use Mat.clone() sparingly to avoid memory overhead
  • Release Mat objects when no longer needed
  • Process images in background threads
  • Use appropriate image resolutions for the task
  • Cache frequently used resources

This combination leverages Java's strong typing and OpenCV's powerful computer vision capabilities while providing a modern, responsive user interface through JavaFX.

Leave a Reply

Your email address will not be published. Required fields are marked *


Macro Nepal Helper