Introduction
Edge detection is a fundamental technique in computer vision and image processing that identifies points where image brightness changes sharply. This article provides complete implementations of various edge detection algorithms in Java, from basic filters to advanced techniques.
Project Setup
Maven Dependencies
<!-- pom.xml -->
<properties>
<javafx.version>21</javafx.version>
<opencv.version>4.8.0</opencv.version>
</properties>
<dependencies>
<!-- JavaFX for Image Display -->
<dependency>
<groupId>org.openjfx</groupId>
<artifactId>javafx-controls</artifactId>
<version>${javafx.version}</version>
</dependency>
<dependency>
<groupId>org.openjfx</groupId>
<artifactId>javafx-fxml</artifactId>
<version>${javafx.version}</version>
</dependency>
<!-- OpenCV for Advanced Computer Vision -->
<dependency>
<groupId>org.openpnp</groupId>
<artifactId>opencv</artifactId>
<version>${opencv.version}</version>
</dependency>
<!-- ImageIO for Basic Image Processing -->
<dependency>
<groupId>com.twelvemonkeys.imageio</groupId>
<artifactId>imageio-core</artifactId>
<version>3.9.4</version>
</dependency>
</dependencies>
Core Image Processing Framework
Image Utility Class
package com.edgedetection.core;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
public class ImageUtils {
public static BufferedImage loadImage(String filePath) throws IOException {
return ImageIO.read(new File(filePath));
}
public static void saveImage(BufferedImage image, String filePath) throws IOException {
String format = filePath.substring(filePath.lastIndexOf(".") + 1);
ImageIO.write(image, format, new File(filePath));
}
public static BufferedImage convertToGrayScale(BufferedImage original) {
BufferedImage grayImage = new BufferedImage(
original.getWidth(),
original.getHeight(),
BufferedImage.TYPE_BYTE_GRAY
);
Graphics2D g = grayImage.createGraphics();
g.drawImage(original, 0, 0, null);
g.dispose();
return grayImage;
}
public static int[] getRGBPixelArray(BufferedImage image) {
int width = image.getWidth();
int height = image.getHeight();
int[] pixels = new int[width * height];
image.getRGB(0, 0, width, height, pixels, 0, width);
return pixels;
}
public static BufferedImage createImageFromArray(int[] pixels, int width, int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
image.setRGB(0, 0, width, height, pixels, 0, width);
return image;
}
public static double[][] convertTo2DGrayArray(BufferedImage image) {
int width = image.getWidth();
int height = image.getHeight();
double[][] grayArray = new double[height][width];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int rgb = image.getRGB(x, y);
int r = (rgb >> 16) & 0xFF;
int g = (rgb >> 8) & 0xFF;
int b = rgb & 0xFF;
grayArray[y][x] = (r + g + b) / 3.0;
}
}
return grayArray;
}
public static BufferedImage createImageFrom2DArray(double[][] grayArray) {
int height = grayArray.length;
int width = grayArray[0].length;
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int value = (int) Math.max(0, Math.min(255, grayArray[y][x]));
int rgb = (value << 16) | (value << 8) | value;
image.setRGB(x, y, rgb);
}
}
return image;
}
public static BufferedImage resizeImage(BufferedImage original, int newWidth, int newHeight) {
BufferedImage resized = new BufferedImage(newWidth, newHeight, original.getType());
Graphics2D g = resized.createGraphics();
g.setRenderingHint(RenderingHints.KEY_INTERPOLATION,
RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g.drawImage(original, 0, 0, newWidth, newHeight, null);
g.dispose();
return resized;
}
}
Basic Edge Detection Filters
1. Sobel Edge Detector
package com.edgedetection.filters;
import java.awt.image.BufferedImage;
public class SobelEdgeDetector {
// Sobel kernels
private static final double[][] SOBEL_X = {
{-1, 0, 1},
{-2, 0, 2},
{-1, 0, 1}
};
private static final double[][] SOBEL_Y = {
{-1, -2, -1},
{ 0, 0, 0},
{ 1, 2, 1}
};
public static BufferedImage detectEdges(BufferedImage input) {
return detectEdges(input, 128);
}
public static BufferedImage detectEdges(BufferedImage input, int threshold) {
int width = input.getWidth();
int height = input.getHeight();
BufferedImage output = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
// Convert to grayscale if needed
BufferedImage grayImage = input.getType() == BufferedImage.TYPE_BYTE_GRAY ?
input : ImageUtils.convertToGrayScale(input);
double[][] imageData = ImageUtils.convertTo2DGrayArray(grayImage);
double[][] gradientMagnitude = new double[height][width];
// Apply Sobel operator
for (int y = 1; y < height - 1; y++) {
for (int x = 1; x < width - 1; x++) {
double gx = applyKernel(imageData, SOBEL_X, x, y);
double gy = applyKernel(imageData, SOBEL_Y, x, y);
gradientMagnitude[y][x] = Math.sqrt(gx * gx + gy * gy);
}
}
// Apply threshold and create output image
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int value = gradientMagnitude[y][x] > threshold ? 255 : 0;
int rgb = (value << 16) | (value << 8) | value;
output.setRGB(x, y, rgb);
}
}
return output;
}
public static BufferedImage detectEdgesWithDirection(BufferedImage input) {
int width = input.getWidth();
int height = input.getHeight();
BufferedImage output = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
BufferedImage grayImage = ImageUtils.convertToGrayScale(input);
double[][] imageData = ImageUtils.convertTo2DGrayArray(grayImage);
for (int y = 1; y < height - 1; y++) {
for (int x = 1; x < width - 1; x++) {
double gx = applyKernel(imageData, SOBEL_X, x, y);
double gy = applyKernel(imageData, SOBEL_Y, x, y);
double magnitude = Math.sqrt(gx * gx + gy * gy);
double direction = Math.atan2(gy, gx);
// Convert direction to color
int r = (int) ((Math.sin(direction) + 1) * 127.5);
int g = (int) ((Math.cos(direction) + 1) * 127.5);
int b = (int) (magnitude > 50 ? 255 : 0);
int rgb = (r << 16) | (g << 8) | b;
output.setRGB(x, y, rgb);
}
}
return output;
}
private static double applyKernel(double[][] image, double[][] kernel, int x, int y) {
double sum = 0;
int kernelSize = kernel.length;
int offset = kernelSize / 2;
for (int ky = 0; ky < kernelSize; ky++) {
for (int kx = 0; kx < kernelSize; kx++) {
int px = x + kx - offset;
int py = y + ky - offset;
sum += image[py][px] * kernel[ky][kx];
}
}
return sum;
}
public static double[][][] getGradientFields(BufferedImage input) {
int width = input.getWidth();
int height = input.getHeight();
BufferedImage grayImage = ImageUtils.convertToGrayScale(input);
double[][] imageData = ImageUtils.convertTo2DGrayArray(grayImage);
double[][] gradientX = new double[height][width];
double[][] gradientY = new double[height][width];
double[][] magnitude = new double[height][width];
double[][] direction = new double[height][width];
for (int y = 1; y < height - 1; y++) {
for (int x = 1; x < width - 1; x++) {
gradientX[y][x] = applyKernel(imageData, SOBEL_X, x, y);
gradientY[y][x] = applyKernel(imageData, SOBEL_Y, x, y);
magnitude[y][x] = Math.sqrt(
gradientX[y][x] * gradientX[y][x] +
gradientY[y][x] * gradientY[y][x]
);
direction[y][x] = Math.atan2(gradientY[y][x], gradientX[y][x]);
}
}
return new double[][][]{gradientX, gradientY, magnitude, direction};
}
}
2. Prewitt Edge Detector
package com.edgedetection.filters;
import java.awt.image.BufferedImage;
public class PrewittEdgeDetector {
private static final double[][] PREWITT_X = {
{-1, 0, 1},
{-1, 0, 1},
{-1, 0, 1}
};
private static final double[][] PREWITT_Y = {
{-1, -1, -1},
{ 0, 0, 0},
{ 1, 1, 1}
};
public static BufferedImage detectEdges(BufferedImage input, int threshold) {
int width = input.getWidth();
int height = input.getHeight();
BufferedImage output = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
BufferedImage grayImage = ImageUtils.convertToGrayScale(input);
double[][] imageData = ImageUtils.convertTo2DGrayArray(grayImage);
double[][] gradientMagnitude = new double[height][width];
for (int y = 1; y < height - 1; y++) {
for (int x = 1; x < width - 1; x++) {
double gx = applyKernel(imageData, PREWITT_X, x, y);
double gy = applyKernel(imageData, PREWITT_Y, x, y);
gradientMagnitude[y][x] = Math.sqrt(gx * gx + gy * gy);
}
}
// Normalize and threshold
double maxGradient = findMax(gradientMagnitude);
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int value = (gradientMagnitude[y][x] / maxGradient * 255) > threshold ? 255 : 0;
int rgb = (value << 16) | (value << 8) | value;
output.setRGB(x, y, rgb);
}
}
return output;
}
private static double applyKernel(double[][] image, double[][] kernel, int x, int y) {
double sum = 0;
for (int ky = 0; ky < 3; ky++) {
for (int kx = 0; kx < 3; kx++) {
int px = x + kx - 1;
int py = y + ky - 1;
sum += image[py][px] * kernel[ky][kx];
}
}
return sum;
}
private static double findMax(double[][] array) {
double max = Double.MIN_VALUE;
for (double[] row : array) {
for (double value : row) {
if (value > max) max = value;
}
}
return max;
}
}
3. Roberts Cross Operator
package com.edgedetection.filters;
import java.awt.image.BufferedImage;
public class RobertsCrossDetector {
// Roberts Cross kernels
private static final double[][] ROBERTS_X = {
{1, 0},
{0, -1}
};
private static final double[][] ROBERTS_Y = {
{0, 1},
{-1, 0}
};
public static BufferedImage detectEdges(BufferedImage input, int threshold) {
int width = input.getWidth();
int height = input.getHeight();
BufferedImage output = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
BufferedImage grayImage = ImageUtils.convertToGrayScale(input);
double[][] imageData = ImageUtils.convertTo2DGrayArray(grayImage);
for (int y = 0; y < height - 1; y++) {
for (int x = 0; x < width - 1; x++) {
double gx = applyKernel(imageData, ROBERTS_X, x, y);
double gy = applyKernel(imageData, ROBERTS_Y, x, y);
double magnitude = Math.sqrt(gx * gx + gy * gy);
int value = magnitude > threshold ? 255 : 0;
int rgb = (value << 16) | (value << 8) | value;
output.setRGB(x, y, rgb);
}
}
return output;
}
private static double applyKernel(double[][] image, double[][] kernel, int x, int y) {
double sum = 0;
for (int ky = 0; ky < 2; ky++) {
for (int kx = 0; kx < 2; kx++) {
sum += image[y + ky][x + kx] * kernel[ky][kx];
}
}
return sum;
}
}
Advanced Edge Detection Algorithms
4. Canny Edge Detector
package com.edgedetection.filters;
import java.awt.image.BufferedImage;
import java.util.Arrays;
public class CannyEdgeDetector {
private double sigma = 1.4;
private int lowThreshold = 30;
private int highThreshold = 90;
public CannyEdgeDetector() {}
public CannyEdgeDetector(double sigma, int lowThreshold, int highThreshold) {
this.sigma = sigma;
this.lowThreshold = lowThreshold;
this.highThreshold = highThreshold;
}
public BufferedImage detectEdges(BufferedImage input) {
// Step 1: Convert to grayscale
BufferedImage grayImage = ImageUtils.convertToGrayScale(input);
double[][] imageData = ImageUtils.convertTo2DGrayArray(grayImage);
int height = imageData.length;
int width = imageData[0].length;
// Step 2: Apply Gaussian blur
double[][] blurred = applyGaussianBlur(imageData, sigma);
// Step 3: Compute gradients using Sobel
double[][][] gradients = SobelEdgeDetector.getGradientFields(
ImageUtils.createImageFrom2DArray(blurred)
);
double[][] gradientX = gradients[0];
double[][] gradientY = gradients[1];
double[][] magnitude = gradients[2];
double[][] direction = gradients[3];
// Step 4: Non-maximum suppression
double[][] suppressed = nonMaximumSuppression(magnitude, direction);
// Step 5: Double thresholding
boolean[][] strongEdges = new boolean[height][width];
boolean[][] weakEdges = new boolean[height][width];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
double mag = suppressed[y][x];
if (mag >= highThreshold) {
strongEdges[y][x] = true;
} else if (mag >= lowThreshold) {
weakEdges[y][x] = true;
}
}
}
// Step 6: Edge tracking by hysteresis
boolean[][] finalEdges = hysteresis(strongEdges, weakEdges);
// Create output image
BufferedImage output = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int value = finalEdges[y][x] ? 255 : 0;
int rgb = (value << 16) | (value << 8) | value;
output.setRGB(x, y, rgb);
}
}
return output;
}
private double[][] applyGaussianBlur(double[][] image, double sigma) {
int height = image.length;
int width = image[0].length;
int kernelSize = (int) (6 * sigma) | 1; // Make kernel size odd
if (kernelSize < 3) kernelSize = 3;
double[][] kernel = createGaussianKernel(kernelSize, sigma);
double[][] blurred = new double[height][width];
int offset = kernelSize / 2;
// Apply convolution
for (int y = offset; y < height - offset; y++) {
for (int x = offset; x < width - offset; x++) {
double sum = 0;
for (int ky = 0; ky < kernelSize; ky++) {
for (int kx = 0; kx < kernelSize; kx++) {
int px = x + kx - offset;
int py = y + ky - offset;
sum += image[py][px] * kernel[ky][kx];
}
}
blurred[y][x] = sum;
}
}
return blurred;
}
private double[][] createGaussianKernel(int size, double sigma) {
double[][] kernel = new double[size][size];
double sum = 0;
int center = size / 2;
for (int y = 0; y < size; y++) {
for (int x = 0; x < size; x++) {
double dx = x - center;
double dy = y - center;
kernel[y][x] = Math.exp(-(dx * dx + dy * dy) / (2 * sigma * sigma));
sum += kernel[y][x];
}
}
// Normalize kernel
for (int y = 0; y < size; y++) {
for (int x = 0; x < size; x++) {
kernel[y][x] /= sum;
}
}
return kernel;
}
private double[][] nonMaximumSuppression(double[][] magnitude, double[][] direction) {
int height = magnitude.length;
int width = magnitude[0].length;
double[][] suppressed = new double[height][width];
for (int y = 1; y < height - 1; y++) {
for (int x = 1; x < width - 1; x++) {
double angle = direction[y][x];
double mag = magnitude[y][x];
// Quantize angle to 0, 45, 90, or 135 degrees
double qAngle = quantizeAngle(angle);
// Get neighboring pixels based on gradient direction
double neighbor1, neighbor2;
if (qAngle == 0) { // East-West
neighbor1 = magnitude[y][x-1];
neighbor2 = magnitude[y][x+1];
} else if (qAngle == 45) { // Northeast-Southwest
neighbor1 = magnitude[y-1][x+1];
neighbor2 = magnitude[y+1][x-1];
} else if (qAngle == 90) { // North-South
neighbor1 = magnitude[y-1][x];
neighbor2 = magnitude[y+1][x];
} else { // Northwest-Southeast (135 degrees)
neighbor1 = magnitude[y-1][x-1];
neighbor2 = magnitude[y+1][x+1];
}
// Suppress non-maximum values
if (mag >= neighbor1 && mag >= neighbor2) {
suppressed[y][x] = mag;
} else {
suppressed[y][x] = 0;
}
}
}
return suppressed;
}
private double quantizeAngle(double angle) {
// Convert radians to degrees and normalize to [0, 180)
double degrees = Math.toDegrees(angle);
if (degrees < 0) degrees += 180;
if ((degrees >= 0 && degrees < 22.5) || (degrees >= 157.5 && degrees < 180)) {
return 0;
} else if (degrees >= 22.5 && degrees < 67.5) {
return 45;
} else if (degrees >= 67.5 && degrees < 112.5) {
return 90;
} else {
return 135;
}
}
private boolean[][] hysteresis(boolean[][] strongEdges, boolean[][] weakEdges) {
int height = strongEdges.length;
int width = strongEdges[0].length;
boolean[][] finalEdges = new boolean[height][width];
// Copy strong edges to final result
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
finalEdges[y][x] = strongEdges[y][x];
}
}
// Connect weak edges that are connected to strong edges
boolean changed;
do {
changed = false;
for (int y = 1; y < height - 1; y++) {
for (int x = 1; x < width - 1; x++) {
if (weakEdges[y][x] && !finalEdges[y][x]) {
// Check 8-connected neighbors
for (int ny = -1; ny <= 1; ny++) {
for (int nx = -1; nx <= 1; nx++) {
if (finalEdges[y + ny][x + nx]) {
finalEdges[y][x] = true;
changed = true;
break;
}
}
if (finalEdges[y][x]) break;
}
}
}
}
} while (changed);
return finalEdges;
}
// Getters and setters for parameters
public double getSigma() { return sigma; }
public void setSigma(double sigma) { this.sigma = sigma; }
public int getLowThreshold() { return lowThreshold; }
public void setLowThreshold(int lowThreshold) { this.lowThreshold = lowThreshold; }
public int getHighThreshold() { return highThreshold; }
public void setHighThreshold(int highThreshold) { this.highThreshold = highThreshold; }
}
5. Laplacian of Gaussian (LoG)
package com.edgedetection.filters;
import java.awt.image.BufferedImage;
public class LaplacianOfGaussian {
public static BufferedImage detectEdges(BufferedImage input, double sigma) {
int width = input.getWidth();
int height = input.getHeight();
BufferedImage grayImage = ImageUtils.convertToGrayScale(input);
double[][] imageData = ImageUtils.convertTo2DGrayArray(grayImage);
// Apply Gaussian blur first
double[][] blurred = applyGaussianBlur(imageData, sigma);
// Apply Laplacian
double[][] log = applyLaplacian(blurred);
// Find zero crossings
boolean[][] edges = findZeroCrossings(log);
// Create output image
BufferedImage output = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int value = edges[y][x] ? 255 : 0;
int rgb = (value << 16) | (value << 8) | value;
output.setRGB(x, y, rgb);
}
}
return output;
}
private static double[][] applyGaussianBlur(double[][] image, double sigma) {
int kernelSize = (int) (6 * sigma) | 1;
if (kernelSize < 3) kernelSize = 3;
double[][] kernel = createGaussianKernel(kernelSize, sigma);
return applyConvolution(image, kernel);
}
private static double[][] applyLaplacian(double[][] image) {
double[][] laplacianKernel = {
{0, 1, 0},
{1, -4, 1},
{0, 1, 0}
};
return applyConvolution(image, laplacianKernel);
}
private static double[][] createGaussianKernel(int size, double sigma) {
double[][] kernel = new double[size][size];
double sum = 0;
int center = size / 2;
for (int y = 0; y < size; y++) {
for (int x = 0; x < size; x++) {
double dx = x - center;
double dy = y - center;
kernel[y][x] = Math.exp(-(dx * dx + dy * dy) / (2 * sigma * sigma));
sum += kernel[y][x];
}
}
// Normalize
for (int y = 0; y < size; y++) {
for (int x = 0; x < size; x++) {
kernel[y][x] /= sum;
}
}
return kernel;
}
private static double[][] applyConvolution(double[][] image, double[][] kernel) {
int height = image.length;
int width = image[0].length;
int kernelSize = kernel.length;
int offset = kernelSize / 2;
double[][] result = new double[height][width];
for (int y = offset; y < height - offset; y++) {
for (int x = offset; x < width - offset; x++) {
double sum = 0;
for (int ky = 0; ky < kernelSize; ky++) {
for (int kx = 0; kx < kernelSize; kx++) {
int px = x + kx - offset;
int py = y + ky - offset;
sum += image[py][px] * kernel[ky][kx];
}
}
result[y][x] = sum;
}
}
return result;
}
private static boolean[][] findZeroCrossings(double[][] logImage) {
int height = logImage.length;
int width = logImage[0].length;
boolean[][] edges = new boolean[height][width];
double threshold = 0.01; // Adjust based on image characteristics
for (int y = 1; y < height - 1; y++) {
for (int x = 1; x < width - 1; x++) {
double center = logImage[y][x];
// Check for zero crossing in 4 directions
boolean zeroCrossing = false;
// Horizontal
if (logImage[y][x-1] * logImage[y][x+1] < 0 &&
Math.abs(logImage[y][x-1] - logImage[y][x+1]) > threshold) {
zeroCrossing = true;
}
// Vertical
else if (logImage[y-1][x] * logImage[y+1][x] < 0 &&
Math.abs(logImage[y-1][x] - logImage[y+1][x]) > threshold) {
zeroCrossing = true;
}
// Diagonal 1
else if (logImage[y-1][x-1] * logImage[y+1][x+1] < 0 &&
Math.abs(logImage[y-1][x-1] - logImage[y+1][x+1]) > threshold) {
zeroCrossing = true;
}
// Diagonal 2
else if (logImage[y-1][x+1] * logImage[y+1][x-1] < 0 &&
Math.abs(logImage[y-1][x+1] - logImage[y+1][x-1]) > threshold) {
zeroCrossing = true;
}
edges[y][x] = zeroCrossing;
}
}
return edges;
}
}
Real-time Edge Detection with JavaFX
Edge Detection GUI Application
package com.edgedetection.gui;
import com.edgedetection.filters.*;
import javafx.application.Application;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.*;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.*;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
public class EdgeDetectionGUI extends Application {
private ImageView originalImageView;
private ImageView processedImageView;
private ComboBox<String> filterComboBox;
private Slider thresholdSlider;
private Slider sigmaSlider;
private Label statusLabel;
private BufferedImage currentImage;
private Map<String, EdgeDetector> detectors;
public EdgeDetectionGUI() {
detectors = new HashMap<>();
detectors.put("Sobel", new SobelDetector());
detectors.put("Prewitt", new PrewittDetector());
detectors.put("Roberts Cross", new RobertsDetector());
detectors.put("Canny", new CannyDetector());
detectors.put("Laplacian of Gaussian", new LoGDetector());
}
@Override
public void start(Stage primaryStage) {
primaryStage.setTitle("Java Edge Detection Filters");
// Create main layout
BorderPane root = new BorderPane();
root.setTop(createMenuBar());
root.setCenter(createMainContent());
root.setBottom(createControlPanel());
Scene scene = new Scene(root, 1200, 800);
scene.getStylesheets().add(getClass().getResource("/styles.css").toExternalForm());
primaryStage.setScene(scene);
primaryStage.show();
}
private MenuBar createMenuBar() {
MenuBar menuBar = new MenuBar();
Menu fileMenu = new Menu("File");
MenuItem openItem = new MenuItem("Open Image");
MenuItem saveItem = new MenuItem("Save Result");
MenuItem exitItem = new MenuItem("Exit");
openItem.setOnAction(e -> openImage());
saveItem.setOnAction(e -> saveImage());
exitItem.setOnAction(e -> System.exit(0));
fileMenu.getItems().addAll(openItem, saveItem, new SeparatorMenuItem(), exitItem);
menuBar.getMenus().addAll(fileMenu);
return menuBar;
}
private HBox createMainContent() {
HBox contentBox = new HBox(20);
contentBox.setPadding(new Insets(20));
contentBox.setAlignment(Pos.CENTER);
// Original image panel
VBox originalBox = new VBox(10);
originalBox.setAlignment(Pos.CENTER);
Label originalLabel = new Label("Original Image");
originalLabel.setStyle("-fx-font-size: 16; -fx-font-weight: bold;");
originalImageView = new ImageView();
originalImageView.setFitWidth(500);
originalImageView.setFitHeight(400);
originalImageView.setPreserveRatio(true);
originalBox.getChildren().addAll(originalLabel, originalImageView);
// Processed image panel
VBox processedBox = new VBox(10);
processedBox.setAlignment(Pos.CENTER);
Label processedLabel = new Label("Edge Detection Result");
processedLabel.setStyle("-fx-font-size: 16; -fx-font-weight: bold;");
processedImageView = new ImageView();
processedImageView.setFitWidth(500);
processedImageView.setFitHeight(400);
processedImageView.setPreserveRatio(true);
processedBox.getChildren().addAll(processedLabel, processedImageView);
contentBox.getChildren().addAll(originalBox, processedBox);
return contentBox;
}
private VBox createControlPanel() {
VBox controlPanel = new VBox(15);
controlPanel.setPadding(new Insets(20));
controlPanel.setStyle("-fx-background-color: #f4f4f4;");
// Filter selection
HBox filterBox = new HBox(10);
filterBox.setAlignment(Pos.CENTER_LEFT);
Label filterLabel = new Label("Edge Detection Filter:");
filterLabel.setStyle("-fx-font-weight: bold;");
filterComboBox = new ComboBox<>();
filterComboBox.getItems().addAll(detectors.keySet());
filterComboBox.setValue("Sobel");
filterComboBox.setOnAction(e -> applySelectedFilter());
filterBox.getChildren().addAll(filterLabel, filterComboBox);
// Threshold control
HBox thresholdBox = new HBox(10);
thresholdBox.setAlignment(Pos.CENTER_LEFT);
Label thresholdLabel = new Label("Threshold:");
thresholdSlider = new Slider(0, 255, 128);
thresholdSlider.setShowTickLabels(true);
thresholdSlider.setShowTickMarks(true);
thresholdSlider.setMajorTickUnit(64);
thresholdSlider.valueProperty().addListener((obs, oldVal, newVal) -> applySelectedFilter());
thresholdBox.getChildren().addAll(thresholdLabel, thresholdSlider);
// Sigma control (for Gaussian-based filters)
HBox sigmaBox = new HBox(10);
sigmaBox.setAlignment(Pos.CENTER_LEFT);
Label sigmaLabel = new Label("Sigma:");
sigmaSlider = new Slider(0.1, 5.0, 1.4);
sigmaSlider.setShowTickLabels(true);
sigmaSlider.setShowTickMarks(true);
sigmaSlider.setMajorTickUnit(1.0);
sigmaSlider.valueProperty().addListener((obs, oldVal, newVal) -> applySelectedFilter());
sigmaBox.getChildren().addAll(sigmaLabel, sigmaSlider);
// Process button
Button processButton = new Button("Apply Filter");
processButton.setStyle("-fx-background-color: #4CAF50; -fx-text-fill: white; -fx-font-weight: bold;");
processButton.setOnAction(e -> applySelectedFilter());
// Status label
statusLabel = new Label("Ready to process images");
statusLabel.setStyle("-fx-font-style: italic;");
controlPanel.getChildren().addAll(filterBox, thresholdBox, sigmaBox, processButton, statusLabel);
return controlPanel;
}
private void openImage() {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Open Image File");
fileChooser.getExtensionFilters().addAll(
new FileChooser.ExtensionFilter("Image Files", "*.png", "*.jpg", "*.jpeg", "*.gif", "*.bmp")
);
File file = fileChooser.showOpenDialog(null);
if (file != null) {
try {
currentImage = ImageIO.read(file);
Image fxImage = javafx.embed.swing.SwingFXUtils.toFXImage(currentImage, null);
originalImageView.setImage(fxImage);
statusLabel.setText("Image loaded: " + file.getName());
applySelectedFilter();
} catch (Exception e) {
showError("Error loading image: " + e.getMessage());
}
}
}
private void saveImage() {
if (processedImageView.getImage() == null) {
showError("No processed image to save");
return;
}
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Save Processed Image");
fileChooser.getExtensionFilters().addAll(
new FileChooser.ExtensionFilter("PNG Files", "*.png"),
new FileChooser.ExtensionFilter("JPEG Files", "*.jpg")
);
File file = fileChooser.showSaveDialog(null);
if (file != null) {
try {
BufferedImage bufferedImage = javafx.embed.swing.SwingFXUtils.fromFXImage(
processedImageView.getImage(), null);
String format = file.getName().endsWith(".jpg") ? "JPEG" : "PNG";
ImageIO.write(bufferedImage, format, file);
statusLabel.setText("Image saved: " + file.getName());
} catch (Exception e) {
showError("Error saving image: " + e.getMessage());
}
}
}
private void applySelectedFilter() {
if (currentImage == null) return;
String selectedFilter = filterComboBox.getValue();
if (selectedFilter == null) return;
try {
BufferedImage result;
EdgeDetector detector = detectors.get(selectedFilter);
if (detector instanceof ParameterizedDetector) {
ParameterizedDetector paramDetector = (ParameterizedDetector) detector;
paramDetector.setThreshold((int) thresholdSlider.getValue());
if (paramDetector instanceof GaussianDetector) {
GaussianDetector gaussianDetector = (GaussianDetector) paramDetector;
gaussianDetector.setSigma(sigmaSlider.getValue());
}
}
result = detector.detectEdges(currentImage);
Image fxResult = javafx.embed.swing.SwingFXUtils.toFXImage(result, null);
processedImageView.setImage(fxResult);
statusLabel.setText("Applied " + selectedFilter + " filter");
} catch (Exception e) {
showError("Error applying filter: " + e.getMessage());
}
}
private void showError(String message) {
Alert alert = new Alert(Alert.AlertType.ERROR);
alert.setTitle("Error");
alert.setHeaderText(null);
alert.setContentText(message);
alert.showAndWait();
}
public static void main(String[] args) {
launch(args);
}
// Interface for edge detectors
private interface EdgeDetector {
BufferedImage detectEdges(BufferedImage input);
}
private interface ParameterizedDetector extends EdgeDetector {
void setThreshold(int threshold);
}
private interface GaussianDetector extends ParameterizedDetector {
void setSigma(double sigma);
}
// Wrapper classes for the detectors
private class SobelDetector implements ParameterizedDetector {
private int threshold = 128;
@Override
public BufferedImage detectEdges(BufferedImage input) {
return SobelEdgeDetector.detectEdges(input, threshold);
}
@Override
public void setThreshold(int threshold) {
this.threshold = threshold;
}
}
private class PrewittDetector implements ParameterizedDetector {
private int threshold = 128;
@Override
public BufferedImage detectEdges(BufferedImage input) {
return PrewittEdgeDetector.detectEdges(input, threshold);
}
@Override
public void setThreshold(int threshold) {
this.threshold = threshold;
}
}
private class RobertsDetector implements ParameterizedDetector {
private int threshold = 128;
@Override
public BufferedImage detectEdges(BufferedImage input) {
return RobertsCrossDetector.detectEdges(input, threshold);
}
@Override
public void setThreshold(int threshold) {
this.threshold = threshold;
}
}
private class CannyDetector implements GaussianDetector {
private int lowThreshold = 30;
private int highThreshold = 90;
private double sigma = 1.4;
@Override
public BufferedImage detectEdges(BufferedImage input) {
CannyEdgeDetector canny = new CannyEdgeDetector(sigma, lowThreshold, highThreshold);
return canny.detectEdges(input);
}
@Override
public void setThreshold(int threshold) {
this.lowThreshold = threshold / 3;
this.highThreshold = threshold;
}
@Override
public void setSigma(double sigma) {
this.sigma = sigma;
}
}
private class LoGDetector implements GaussianDetector {
private int threshold = 128;
private double sigma = 1.4;
@Override
public BufferedImage detectEdges(BufferedImage input) {
return LaplacianOfGaussian.detectEdges(input, sigma);
}
@Override
public void setThreshold(int threshold) {
this.threshold = threshold;
}
@Override
public void setSigma(double sigma) {
this.sigma = sigma;
}
}
}
Performance Optimization
Parallel Image Processing
package com.edgedetection.optimization;
import java.awt.image.BufferedImage;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveAction;
public class ParallelEdgeDetector {
private static final int THRESHOLD = 10000; // Pixels per task
public static BufferedImage parallelSobel(BufferedImage input, int threshold) {
int width = input.getWidth();
int height = input.getHeight();
BufferedImage output = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
BufferedImage grayImage = ImageUtils.convertToGrayScale(input);
double[][] imageData = ImageUtils.convertTo2DGrayArray(grayImage);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(new SobelTask(imageData, output, threshold, 0, 0, width, height));
pool.shutdown();
return output;
}
private static class SobelTask extends RecursiveAction {
private final double[][] input;
private final BufferedImage output;
private final int threshold;
private final int startX, startY, endX, endY;
private static final double[][] SOBEL_X = {{-1,0,1}, {-2,0,2}, {-1,0,1}};
private static final double[][] SOBEL_Y = {{-1,-2,-1}, {0,0,0}, {1,2,1}};
public SobelTask(double[][] input, BufferedImage output, int threshold,
int startX, int startY, int endX, int endY) {
this.input = input;
this.output = output;
this.threshold = threshold;
this.startX = startX;
this.startY = startY;
this.endX = endX;
this.endY = endY;
}
@Override
protected void compute() {
int area = (endX - startX) * (endY - startY);
if (area <= THRESHOLD) {
computeDirectly();
} else {
int midX = startX + (endX - startX) / 2;
int midY = startY + (endY - startY) / 2;
invokeAll(
new SobelTask(input, output, threshold, startX, startY, midX, midY),
new SobelTask(input, output, threshold, midX, startY, endX, midY),
new SobelTask(input, output, threshold, startX, midY, midX, endY),
new SobelTask(input, output, threshold, midX, midY, endX, endY)
);
}
}
private void computeDirectly() {
for (int y = Math.max(1, startY); y < Math.min(input.length - 1, endY); y++) {
for (int x = Math.max(1, startX); x < Math.min(input[0].length - 1, endX); x++) {
double gx = applyKernel(input, SOBEL_X, x, y);
double gy = applyKernel(input, SOBEL_Y, x, y);
double magnitude = Math.sqrt(gx * gx + gy * gy);
int value = magnitude > threshold ? 255 : 0;
int rgb = (value << 16) | (value << 8) | value;
output.setRGB(x, y, rgb);
}
}
}
private double applyKernel(double[][] image, double[][] kernel, int x, int y) {
double sum = 0;
for (int ky = 0; ky < 3; ky++) {
for (int kx = 0; kx < 3; kx++) {
sum += image[y + ky - 1][x + kx - 1] * kernel[ky][kx];
}
}
return sum;
}
}
}
CSS Styling for GUI
/* styles.css */
.root {
-fx-font-family: "Segoe UI", Arial, sans-serif;
-fx-font-size: 14px;
}
.button {
-fx-background-radius: 5;
-fx-border-radius: 5;
-fx-padding: 8 16;
-fx-cursor: hand;
}
.button:hover {
-fx-effect: dropshadow(three-pass-box, rgba(0,0,0,0.2), 5, 0, 0, 0);
}
.slider .track {
-fx-background-color: #e0e0e0;
}
.slider .thumb {
-fx-background-color: #2196F3;
}
.combo-box {
-fx-background-radius: 5;
}
.label {
-fx-text-fill: #333333;
}
.image-view {
-fx-effect: dropshadow(three-pass-box, rgba(0,0,0,0.3), 10, 0, 0, 0);
}
Usage Examples
Command Line Interface
package com.edgedetection.demo;
import com.edgedetection.filters.*;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.File;
public class EdgeDetectionDemo {
public static void main(String[] args) {
if (args.length < 2) {
System.out.println("Usage: java EdgeDetectionDemo <inputImage> <outputImage> [filter] [threshold]");
System.out.println("Available filters: sobel, prewitt, roberts, canny, log");
return;
}
String inputPath = args[0];
String outputPath = args[1];
String filter = args.length > 2 ? args[2] : "sobel";
int threshold = args.length > 3 ? Integer.parseInt(args[3]) : 128;
try {
BufferedImage input = ImageIO.read(new File(inputPath));
BufferedImage output;
switch (filter.toLowerCase()) {
case "sobel":
output = SobelEdgeDetector.detectEdges(input, threshold);
break;
case "prewitt":
output = PrewittEdgeDetector.detectEdges(input, threshold);
break;
case "roberts":
output = RobertsCrossDetector.detectEdges(input, threshold);
break;
case "canny":
CannyEdgeDetector canny = new CannyEdgeDetector();
output = canny.detectEdges(input);
break;
case "log":
output = LaplacianOfGaussian.detectEdges(input, 1.4);
break;
default:
System.out.println("Unknown filter: " + filter);
return;
}
ImageIO.write(output, "png", new File(outputPath));
System.out.println("Edge detection completed. Output: " + outputPath);
} catch (Exception e) {
System.err.println("Error: " + e.getMessage());
e.printStackTrace();
}
}
}
Summary
This comprehensive edge detection implementation provides:
- Multiple Algorithms: Sobel, Prewitt, Roberts Cross, Canny, Laplacian of Gaussian
- Performance Optimization: Parallel processing for large images
- User-Friendly GUI: JavaFX-based interface with real-time preview
- Flexible Parameters: Adjustable thresholds and sigma values
- Image I/O Support: Read/write various image formats
- Professional Quality: Production-ready code with error handling
The system demonstrates fundamental computer vision techniques while providing practical, usable tools for image analysis and processing tasks.