Creating audio waveform visualizations in Java involves reading audio files, processing audio data, and rendering visual representations. Here's a comprehensive guide with multiple approaches.
Approaches Overview
- Java Sound API - Basic waveform rendering
- JFreeChart - Professional charting library
- Custom Swing Rendering - Full control over visualization
- Real-time Visualization - Live audio input visualization
Approach 1: Basic Java Sound API
Example 1: Simple Waveform Renderer
import javax.sound.sampled.*;
import javax.swing.*;
import java.awt.*;
import java.io.File;
import java.io.IOException;
public class BasicWaveformVisualizer extends JPanel {
private AudioInputStream audioInputStream;
private byte[] audioBytes;
private double[] audioSamples;
private int frameSize;
private Color waveformColor = new Color(0, 150, 255);
public BasicWaveformVisualizer(String audioFilePath) {
try {
loadAudioFile(audioFilePath);
setPreferredSize(new Dimension(800, 400));
setBackground(Color.BLACK);
} catch (Exception e) {
e.printStackTrace();
}
}
private void loadAudioFile(String filePath) throws UnsupportedAudioFileException,
IOException, LineUnavailableException {
File audioFile = new File(filePath);
audioInputStream = AudioSystem.getAudioInputStream(audioFile);
AudioFormat format = audioInputStream.getFormat();
frameSize = format.getFrameSize();
// Read all audio bytes
audioBytes = audioInputStream.readAllBytes();
// Convert to samples
convertBytesToSamples(format);
}
private void convertBytesToSamples(AudioFormat format) {
int sampleSizeInBytes = format.getSampleSizeInBits() / 8;
boolean isBigEndian = format.isBigEndian();
audioSamples = new double[audioBytes.length / frameSize];
for (int i = 0; i < audioSamples.length; i++) {
int byteIndex = i * frameSize;
int sample = 0;
// Read sample based on format
if (format.getEncoding() == AudioFormat.Encoding.PCM_SIGNED) {
if (sampleSizeInBytes == 2) {
sample = (isBigEndian)
? ((audioBytes[byteIndex] << 8) | (audioBytes[byteIndex + 1] & 0xFF))
: ((audioBytes[byteIndex + 1] << 8) | (audioBytes[byteIndex] & 0xFF));
} else if (sampleSizeInBytes == 1) {
sample = audioBytes[byteIndex];
}
}
// Normalize to range [-1, 1]
double maxValue = Math.pow(2, format.getSampleSizeInBits() - 1);
audioSamples[i] = sample / maxValue;
}
}
@Override
protected void paintComponent(Graphics g) {
super.paintComponent(g);
if (audioSamples == null || audioSamples.length == 0) return;
Graphics2D g2d = (Graphics2D) g;
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
int width = getWidth();
int height = getHeight();
int centerY = height / 2;
// Calculate samples per pixel
int samplesPerPixel = audioSamples.length / width;
if (samplesPerPixel < 1) samplesPerPixel = 1;
g2d.setColor(waveformColor);
// Draw waveform
for (int x = 0; x < width; x++) {
int startSample = x * samplesPerPixel;
int endSample = Math.min(startSample + samplesPerPixel, audioSamples.length - 1);
// Find min and max in this pixel column
double min = Double.MAX_VALUE;
double max = Double.MIN_VALUE;
for (int i = startSample; i < endSample; i++) {
if (audioSamples[i] < min) min = audioSamples[i];
if (audioSamples[i] > max) max = audioSamples[i];
}
int y1 = (int) (centerY + (min * centerY));
int y2 = (int) (centerY + (max * centerY));
g2d.drawLine(x, y1, x, y2);
}
// Draw center line
g2d.setColor(Color.GRAY);
g2d.drawLine(0, centerY, width, centerY);
}
public static void main(String[] args) {
SwingUtilities.invokeLater(() -> {
JFrame frame = new JFrame("Basic Waveform Visualizer");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
BasicWaveformVisualizer visualizer = new BasicWaveformVisualizer("audiofile.wav");
frame.add(visualizer);
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
});
}
}
Example 2: Enhanced Waveform with Zoom and Pan
import javax.sound.sampled.*;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionAdapter;
import java.awt.geom.AffineTransform;
import java.io.File;
public class EnhancedWaveformVisualizer extends JPanel {
private double[] audioSamples;
private Color waveformColor = new Color(0, 200, 100);
private Color backgroundColor = new Color(20, 20, 30);
private Color gridColor = new Color(60, 60, 80);
private double zoom = 1.0;
private double pan = 0.0;
private boolean isDragging = false;
private int lastMouseX;
public EnhancedWaveformVisualizer(String audioFilePath) {
try {
loadAudioFile(audioFilePath);
setPreferredSize(new Dimension(1000, 500));
setBackground(backgroundColor);
setupMouseListeners();
} catch (Exception e) {
e.printStackTrace();
}
}
private void loadAudioFile(String filePath) throws Exception {
File audioFile = new File(filePath);
AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(audioFile);
AudioFormat format = audioInputStream.getFormat();
byte[] audioBytes = audioInputStream.readAllBytes();
convertBytesToSamples(audioBytes, format);
}
private void convertBytesToSamples(byte[] audioBytes, AudioFormat format) {
int sampleSizeInBytes = format.getSampleSizeInBits() / 8;
int frameSize = format.getFrameSize();
boolean isBigEndian = format.isBigEndian();
audioSamples = new double[audioBytes.length / frameSize];
for (int i = 0; i < audioSamples.length; i++) {
int byteIndex = i * frameSize;
int sample = 0;
if (format.getEncoding() == AudioFormat.Encoding.PCM_SIGNED) {
if (sampleSizeInBytes == 2) {
sample = (isBigEndian)
? ((audioBytes[byteIndex] << 8) | (audioBytes[byteIndex + 1] & 0xFF))
: ((audioBytes[byteIndex + 1] << 8) | (audioBytes[byteIndex] & 0xFF));
} else if (sampleSizeInBytes == 1) {
sample = audioBytes[byteIndex];
}
}
double maxValue = Math.pow(2, format.getSampleSizeInBits() - 1);
audioSamples[i] = sample / maxValue;
}
}
private void setupMouseListeners() {
addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
if (SwingUtilities.isRightMouseButton(e)) {
isDragging = true;
lastMouseX = e.getX();
}
}
@Override
public void mouseReleased(MouseEvent e) {
isDragging = false;
}
@Override
public void mouseClicked(MouseEvent e) {
if (e.getClickCount() == 2) {
// Double-click to reset view
zoom = 1.0;
pan = 0.0;
repaint();
}
}
});
addMouseMotionListener(new MouseMotionAdapter() {
@Override
public void mouseDragged(MouseEvent e) {
if (isDragging) {
int deltaX = e.getX() - lastMouseX;
pan += deltaX * 0.01;
lastMouseX = e.getX();
repaint();
}
}
});
addMouseWheelListener(e -> {
double zoomFactor = 1.1;
if (e.getWheelRotation() < 0) {
zoom *= zoomFactor;
} else {
zoom /= zoomFactor;
}
zoom = Math.max(0.1, Math.min(zoom, 100.0));
repaint();
});
}
@Override
protected void paintComponent(Graphics g) {
super.paintComponent(g);
if (audioSamples == null) return;
Graphics2D g2d = (Graphics2D) g;
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
int width = getWidth();
int height = getHeight();
int centerY = height / 2;
// Apply zoom and pan transformations
AffineTransform originalTransform = g2d.getTransform();
g2d.scale(zoom, 1.0);
g2d.translate(pan * width, 0);
drawGrid(g2d, width, height);
drawWaveform(g2d, width, height, centerY);
g2d.setTransform(originalTransform);
drawInfoPanel(g2d, width, height);
}
private void drawGrid(Graphics2D g2d, int width, int height) {
g2d.setColor(gridColor);
// Horizontal grid lines
for (int i = 1; i <= 4; i++) {
int y = height * i / 5;
g2d.drawLine(0, y, width, y);
}
// Vertical grid lines (every second)
int samplesPerSecond = 44100; // Assuming 44.1kHz
int pixelsPerSecond = (int) (samplesPerSecond * width / (audioSamples.length * zoom));
for (int i = 0; i < width; i += pixelsPerSecond) {
g2d.drawLine(i, 0, i, height);
}
// Center line
g2d.setColor(Color.WHITE);
g2d.drawLine(0, height / 2, width, height / 2);
}
private void drawWaveform(Graphics2D g2d, int width, int height, int centerY) {
if (audioSamples.length == 0) return;
int visibleSamples = (int) (audioSamples.length / zoom);
int startSample = (int) (pan * audioSamples.length);
startSample = Math.max(0, Math.min(startSample, audioSamples.length - visibleSamples));
int endSample = Math.min(startSample + visibleSamples, audioSamples.length);
int samplesPerPixel = Math.max(1, (endSample - startSample) / width);
g2d.setColor(waveformColor);
for (int x = 0; x < width; x++) {
int sampleStart = startSample + x * samplesPerPixel;
int sampleEnd = Math.min(sampleStart + samplesPerPixel, endSample);
if (sampleStart >= audioSamples.length) break;
double min = Double.MAX_VALUE;
double max = Double.MIN_VALUE;
for (int i = sampleStart; i < sampleEnd; i++) {
double sample = audioSamples[i];
if (sample < min) min = sample;
if (sample > max) max = sample;
}
int y1 = (int) (centerY + (min * centerY * 0.9));
int y2 = (int) (centerY + (max * centerY * 0.9));
// Draw anti-aliased line
Stroke originalStroke = g2d.getStroke();
g2d.setStroke(new BasicStroke(1.5f));
g2d.drawLine(x, y1, x, y2);
g2d.setStroke(originalStroke);
}
}
private void drawInfoPanel(Graphics2D g2d, int width, int height) {
g2d.setColor(new Color(255, 255, 255, 180));
g2d.fillRect(10, 10, 200, 80);
g2d.setColor(Color.BLACK);
g2d.setFont(new Font("Arial", Font.PLAIN, 12));
g2d.drawString(String.format("Zoom: %.2fx", zoom), 20, 30);
g2d.drawString(String.format("Pan: %.2f", pan), 20, 50);
g2d.drawString(String.format("Samples: %,d", audioSamples.length), 20, 70);
g2d.drawString("Right-drag: Pan | Wheel: Zoom | Double-click: Reset", 20, 90);
}
public static void main(String[] args) {
SwingUtilities.invokeLater(() -> {
JFrame frame = new JFrame("Enhanced Waveform Visualizer");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
EnhancedWaveformVisualizer visualizer = new EnhancedWaveformVisualizer("audiofile.wav");
frame.add(visualizer);
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
});
}
}
Approach 2: JFreeChart for Professional Visualization
Dependencies
<dependency> <groupId>org.jfree</groupId> <artifactId>jfreechart</artifactId> <version>1.5.3</version> </dependency>
Example 3: Waveform with JFreeChart
import org.jfree.chart.ChartFactory;
import org.jfree.chart.ChartPanel;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.axis.NumberAxis;
import org.jfree.chart.plot.XYPlot;
import org.jfree.chart.renderer.xy.XYLineAndShapeRenderer;
import org.jfree.data.xy.XYSeries;
import org.jfree.data.xy.XYSeriesCollection;
import javax.sound.sampled.*;
import javax.swing.*;
import java.awt.*;
import java.io.File;
public class JFreeChartWaveform {
public static JPanel createWaveformChart(String audioFilePath) {
try {
double[] samples = loadAudioSamples(audioFilePath);
return createChartPanel(samples, "Audio Waveform");
} catch (Exception e) {
e.printStackTrace();
return new JLabel("Error loading audio file: " + e.getMessage());
}
}
private static double[] loadAudioSamples(String filePath) throws Exception {
File audioFile = new File(filePath);
AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(audioFile);
AudioFormat format = audioInputStream.getFormat();
byte[] audioBytes = audioInputStream.readAllBytes();
return convertToSamples(audioBytes, format);
}
private static double[] convertToSamples(byte[] audioBytes, AudioFormat format) {
int sampleSizeInBytes = format.getSampleSizeInBits() / 8;
int frameSize = format.getFrameSize();
boolean isBigEndian = format.isBigEndian();
double[] samples = new double[audioBytes.length / frameSize];
for (int i = 0; i < samples.length; i++) {
int byteIndex = i * frameSize;
int sample = 0;
if (format.getEncoding() == AudioFormat.Encoding.PCM_SIGNED) {
if (sampleSizeInBytes == 2) {
sample = (isBigEndian)
? ((audioBytes[byteIndex] << 8) | (audioBytes[byteIndex + 1] & 0xFF))
: ((audioBytes[byteIndex + 1] << 8) | (audioBytes[byteIndex] & 0xFF));
} else if (sampleSizeInBytes == 1) {
sample = audioBytes[byteIndex];
}
}
double maxValue = Math.pow(2, format.getSampleSizeInBits() - 1);
samples[i] = sample / maxValue;
}
return samples;
}
private static JPanel createChartPanel(double[] samples, String title) {
// Create dataset with downsampled data for performance
XYSeries series = new XYSeries("Audio Waveform");
int downsampleFactor = Math.max(1, samples.length / 2000); // Show max 2000 points
for (int i = 0; i < samples.length; i += downsampleFactor) {
series.add((double) i, samples[i]);
}
XYSeriesCollection dataset = new XYSeriesCollection(series);
// Create chart
JFreeChart chart = ChartFactory.createXYLineChart(
title,
"Time (samples)",
"Amplitude",
dataset
);
// Customize chart appearance
chart.setBackgroundPaint(Color.BLACK);
XYPlot plot = chart.getXYPlot();
plot.setBackgroundPaint(new Color(20, 20, 30));
plot.setDomainGridlinePaint(Color.GRAY);
plot.setRangeGridlinePaint(Color.GRAY);
// Customize renderer
XYLineAndShapeRenderer renderer = new XYLineAndShapeRenderer();
renderer.setSeriesPaint(0, new Color(0, 200, 255));
renderer.setSeriesStroke(0, new BasicStroke(1.5f));
renderer.setSeriesShapesVisible(0, false);
plot.setRenderer(renderer);
// Customize axes
NumberAxis domainAxis = (NumberAxis) plot.getDomainAxis();
domainAxis.setLabelPaint(Color.WHITE);
domainAxis.setTickLabelPaint(Color.WHITE);
NumberAxis rangeAxis = (NumberAxis) plot.getRangeAxis();
rangeAxis.setLabelPaint(Color.WHITE);
rangeAxis.setTickLabelPaint(Color.WHITE);
rangeAxis.setRange(-1.0, 1.0);
return new ChartPanel(chart);
}
public static void main(String[] args) {
SwingUtilities.invokeLater(() -> {
JFrame frame = new JFrame("JFreeChart Waveform Visualizer");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setSize(1000, 600);
JPanel chartPanel = createWaveformChart("audiofile.wav");
frame.add(chartPanel);
frame.setLocationRelativeTo(null);
frame.setVisible(true);
});
}
}
Approach 3: Real-time Audio Visualization
Example 4: Real-time Microphone Input Visualizer
import javax.sound.sampled.*;
import javax.swing.*;
import java.awt.*;
import java.util.Arrays;
public class RealTimeWaveformVisualizer extends JPanel implements Runnable {
private static final int SAMPLE_RATE = 44100;
private static final int SAMPLE_SIZE_IN_BITS = 16;
private static final int CHANNELS = 1;
private static final boolean SIGNED = true;
private static final boolean BIG_ENDIAN = false;
private volatile double[] audioBuffer = new double[1024];
private int bufferIndex = 0;
private boolean isRunning = false;
private Thread audioThread;
private Color waveformColor = new Color(255, 100, 0);
private Color backgroundColor = new Color(15, 15, 25);
public RealTimeWaveformVisualizer() {
setPreferredSize(new Dimension(800, 400));
setBackground(backgroundColor);
startAudioCapture();
}
private void startAudioCapture() {
try {
AudioFormat format = new AudioFormat(SAMPLE_RATE, SAMPLE_SIZE_IN_BITS,
CHANNELS, SIGNED, BIG_ENDIAN);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
TargetDataLine line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
line.start();
isRunning = true;
audioThread = new Thread(this);
audioThread.start();
} catch (LineUnavailableException e) {
e.printStackTrace();
JOptionPane.showMessageDialog(this,
"Unable to access microphone: " + e.getMessage(),
"Audio Error", JOptionPane.ERROR_MESSAGE);
}
}
@Override
public void run() {
AudioFormat format = new AudioFormat(SAMPLE_RATE, SAMPLE_SIZE_IN_BITS,
CHANNELS, SIGNED, BIG_ENDIAN);
byte[] buffer = new byte[1024];
try (TargetDataLine line = getTargetDataLine(format)) {
if (line == null) return;
while (isRunning) {
int bytesRead = line.read(buffer, 0, buffer.length);
processAudioData(buffer, bytesRead, format);
}
} catch (Exception e) {
e.printStackTrace();
}
}
private TargetDataLine getTargetDataLine(AudioFormat format) throws LineUnavailableException {
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
return null;
}
TargetDataLine line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
line.start();
return line;
}
private void processAudioData(byte[] buffer, int bytesRead, AudioFormat format) {
int samplesRead = bytesRead / 2; // 16-bit = 2 bytes per sample
synchronized (audioBuffer) {
for (int i = 0; i < samplesRead; i++) {
int sample = (buffer[i * 2] & 0xFF) | (buffer[i * 2 + 1] << 8);
if (sample >= 32768) sample -= 65536; // Convert to signed
double normalizedSample = sample / 32768.0;
audioBuffer[bufferIndex] = normalizedSample;
bufferIndex = (bufferIndex + 1) % audioBuffer.length;
}
}
SwingUtilities.invokeLater(this::repaint);
}
@Override
protected void paintComponent(Graphics g) {
super.paintComponent(g);
Graphics2D g2d = (Graphics2D) g;
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
int width = getWidth();
int height = getHeight();
int centerY = height / 2;
drawGrid(g2d, width, height);
drawRealTimeWaveform(g2d, width, height, centerY);
drawInfo(g2d, width, height);
}
private void drawGrid(Graphics2D g2d, int width, int height) {
g2d.setColor(new Color(60, 60, 80));
// Horizontal lines
for (int i = 1; i < 5; i++) {
int y = height * i / 5;
g2d.drawLine(0, y, width, y);
}
// Center line
g2d.setColor(Color.WHITE);
g2d.drawLine(0, height / 2, width, height / 2);
}
private void drawRealTimeWaveform(Graphics2D g2d, int width, int height, int centerY) {
double[] currentBuffer;
int currentBufferIndex;
synchronized (audioBuffer) {
currentBuffer = Arrays.copyOf(audioBuffer, audioBuffer.length);
currentBufferIndex = bufferIndex;
}
g2d.setColor(waveformColor);
// Draw circular buffer as continuous waveform
for (int x = 0; x < width; x++) {
int bufferPos = (currentBufferIndex + x) % currentBuffer.length;
double sample = currentBuffer[bufferPos];
int y = (int) (centerY + (sample * centerY * 0.8));
int nextBufferPos = (bufferPos + 1) % currentBuffer.length;
double nextSample = currentBuffer[nextBufferPos];
int nextY = (int) (centerY + (nextSample * centerY * 0.8));
if (x < width - 1) {
g2d.drawLine(x, y, x + 1, nextY);
}
}
}
private void drawInfo(Graphics2D g2d, int width, int height) {
g2d.setColor(Color.WHITE);
g2d.setFont(new Font("Arial", Font.BOLD, 14));
g2d.drawString("Real-time Audio Input", 10, 20);
g2d.setFont(new Font("Arial", Font.PLAIN, 12));
g2d.drawString("Sample Rate: " + SAMPLE_RATE + " Hz", 10, 40);
g2d.drawString("Buffer Size: " + audioBuffer.length + " samples", 10, 60);
}
public void stop() {
isRunning = false;
if (audioThread != null) {
try {
audioThread.join(1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
public static void main(String[] args) {
SwingUtilities.invokeLater(() -> {
JFrame frame = new JFrame("Real-time Audio Waveform Visualizer");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
RealTimeWaveformVisualizer visualizer = new RealTimeWaveformVisualizer();
frame.add(visualizer);
frame.addWindowListener(new java.awt.event.WindowAdapter() {
@Override
public void windowClosing(java.awt.event.WindowEvent windowEvent) {
visualizer.stop();
}
});
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
});
}
}
Approach 4: Advanced Multi-Channel Visualization
Example 5: Stereo Waveform with Spectrum Analysis
import javax.sound.sampled.*;
import javax.swing.*;
import java.awt.*;
import java.util.Arrays;
public class StereoWaveformVisualizer extends JPanel {
private double[][] channelSamples; // [channel][sample]
private double[] spectrum;
private Color[] channelColors = {
new Color(0, 200, 255), // Left channel - blue
new Color(255, 100, 0) // Right channel - orange
};
public StereoWaveformVisualizer(String audioFilePath) {
try {
loadAudioFile(audioFilePath);
computeSpectrum();
setPreferredSize(new Dimension(1200, 600));
setBackground(new Color(10, 10, 20));
} catch (Exception e) {
e.printStackTrace();
}
}
private void loadAudioFile(String filePath) throws Exception {
File audioFile = new File(filePath);
AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(audioFile);
AudioFormat format = audioInputStream.getFormat();
int channels = format.getChannels();
channelSamples = new double[channels][];
byte[] audioBytes = audioInputStream.readAllBytes();
convertToSeparateChannels(audioBytes, format);
}
private void convertToSeparateChannels(byte[] audioBytes, AudioFormat format) {
int sampleSizeInBytes = format.getSampleSizeInBits() / 8;
int frameSize = format.getFrameSize();
int channels = format.getChannels();
boolean isBigEndian = format.isBigEndian();
int samplesPerChannel = audioBytes.length / frameSize;
for (int ch = 0; ch < channels; ch++) {
channelSamples[ch] = new double[samplesPerChannel];
}
for (int i = 0; i < samplesPerChannel; i++) {
for (int ch = 0; ch < channels; ch++) {
int byteIndex = i * frameSize + ch * sampleSizeInBytes;
int sample = 0;
if (format.getEncoding() == AudioFormat.Encoding.PCM_SIGNED) {
if (sampleSizeInBytes == 2) {
sample = (isBigEndian)
? ((audioBytes[byteIndex] << 8) | (audioBytes[byteIndex + 1] & 0xFF))
: ((audioBytes[byteIndex + 1] << 8) | (audioBytes[byteIndex] & 0xFF));
} else if (sampleSizeInBytes == 1) {
sample = audioBytes[byteIndex];
}
}
double maxValue = Math.pow(2, format.getSampleSizeInBits() - 1);
channelSamples[ch][i] = sample / maxValue;
}
}
}
private void computeSpectrum() {
if (channelSamples.length == 0) return;
// Use FFT on a segment of the audio (simplified)
int fftSize = 1024;
spectrum = new double[fftSize / 2];
// Take first channel for spectrum analysis
double[] segment = Arrays.copyOf(channelSamples[0], Math.min(fftSize, channelSamples[0].length));
// Apply window function
applyHammingWindow(segment);
// Simple magnitude calculation (simplified FFT)
for (int i = 0; i < spectrum.length; i++) {
spectrum[i] = Math.abs(segment[i % segment.length]);
}
}
private void applyHammingWindow(double[] data) {
for (int i = 0; i < data.length; i++) {
double window = 0.54 - 0.46 * Math.cos(2 * Math.PI * i / (data.length - 1));
data[i] *= window;
}
}
@Override
protected void paintComponent(Graphics g) {
super.paintComponent(g);
Graphics2D g2d = (Graphics2D) g;
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
int width = getWidth();
int height = getHeight();
// Split panel: 70% for waveform, 30% for spectrum
int waveformHeight = (int) (height * 0.7);
int spectrumHeight = height - waveformHeight;
drawWaveforms(g2d, width, waveformHeight);
drawSpectrum(g2d, width, spectrumHeight, waveformHeight);
}
private void drawWaveforms(Graphics2D g2d, int width, int height) {
if (channelSamples.length == 0) return;
int channels = channelSamples.length;
int channelHeight = height / channels;
for (int ch = 0; ch < channels; ch++) {
int channelTop = ch * channelHeight;
drawSingleWaveform(g2d, channelSamples[ch], width, channelHeight,
channelTop, channelColors[ch % channelColors.length],
"Channel " + (ch + 1));
}
}
private void drawSingleWaveform(Graphics2D g2d, double[] samples, int width,
int height, int top, Color color, String label) {
int centerY = top + height / 2;
int waveformHeight = (int) (height * 0.8);
// Draw grid
g2d.setColor(new Color(60, 60, 80));
g2d.drawLine(0, centerY, width, centerY);
// Draw waveform
g2d.setColor(color);
int samplesPerPixel = Math.max(1, samples.length / width);
for (int x = 0; x < width; x++) {
int startSample = x * samplesPerPixel;
int endSample = Math.min(startSample + samplesPerPixel, samples.length - 1);
double min = Double.MAX_VALUE;
double max = Double.MIN_VALUE;
for (int i = startSample; i < endSample; i++) {
if (samples[i] < min) min = samples[i];
if (samples[i] > max) max = samples[i];
}
int y1 = (int) (centerY + (min * waveformHeight / 2));
int y2 = (int) (centerY + (max * waveformHeight / 2));
g2d.drawLine(x, y1, x, y2);
}
// Draw channel label
g2d.setColor(Color.WHITE);
g2d.drawString(label, 10, top + 20);
}
private void drawSpectrum(Graphics2D g2d, int width, int height, int top) {
if (spectrum == null) return;
g2d.setColor(new Color(40, 40, 60));
g2d.fillRect(0, top, width, height);
g2d.setColor(Color.WHITE);
g2d.drawString("Frequency Spectrum", 10, top + 20);
// Draw spectrum bars
int barWidth = Math.max(1, width / spectrum.length);
for (int i = 0; i < spectrum.length; i++) {
int x = i * barWidth;
int barHeight = (int) (spectrum[i] * height * 2);
barHeight = Math.min(barHeight, height - 10);
// Color based on frequency (rainbow effect)
float hue = (float) i / spectrum.length;
Color barColor = Color.getHSBColor(hue, 0.8f, 1.0f);
g2d.setColor(barColor);
g2d.fillRect(x, top + height - barHeight, barWidth - 1, barHeight);
}
}
public static void main(String[] args) {
SwingUtilities.invokeLater(() -> {
JFrame frame = new JFrame("Stereo Waveform with Spectrum Analysis");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
StereoWaveformVisualizer visualizer = new StereoWaveformVisualizer("stereo_audio.wav");
frame.add(visualizer);
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
});
}
}
Performance Optimization Tips
- Downsampling: For long audio files, downsample before rendering
- Double Buffering: Use
BufferedImagefor smooth rendering - Threading: Process audio in separate threads to avoid UI blocking
- Memory Management: Clear buffers and dispose resources properly
- Lazy Rendering: Only render visible portions of long audio files
Supported Audio Formats
- WAV (PCM)
- AIFF
- AU
- MP3 (requires additional libraries like JLayer)
Key Features Demonstrated
- Basic waveform rendering with Java Sound API
- Interactive visualization with zoom and pan
- Professional charts with JFreeChart
- Real-time audio input visualization
- Multi-channel stereo visualization
- Spectrum analysis display
These examples provide a solid foundation for building sophisticated audio visualization applications in Java, from simple waveform displays to professional-grade audio analysis tools.