FileChannel for Memory-Mapped Files in Java

Overview

Memory-mapped files allow you to map a region of a file directly into memory, enabling efficient file I/O operations. Java provides this capability through the FileChannel class and its map() method, which creates a MappedByteBuffer that represents the memory-mapped region.

Key Benefits

  • High Performance: Bypasses traditional I/O system calls
  • Direct Memory Access: File data accessed as if it were in memory
  • OS Managed: Operating system handles paging and caching
  • Shared Memory: Multiple processes can map the same file

Core Classes

  • FileChannel: Channel for reading, writing, mapping files
  • MappedByteBuffer: Buffer representing memory-mapped file region
  • FileInputStream/FileOutputStream: For creating file channels

Basic Usage

1. Creating Memory-Mapped Files

import java.io.RandomAccessFile;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
public class MemoryMappedFileExample {
public static void main(String[] args) throws Exception {
// Method 1: Using RandomAccessFile
createWithRandomAccessFile();
// Method 2: Using FileChannel.open()
createWithFileChannelOpen();
}
private static void createWithRandomAccessFile() throws Exception {
try (RandomAccessFile file = new RandomAccessFile("example.dat", "rw");
FileChannel channel = file.getChannel()) {
// Map the entire file (0 to file size) into memory
MappedByteBuffer buffer = channel.map(
FileChannel.MapMode.READ_WRITE,  // Read-write access
0,                              // Starting position
channel.size()                  // Size to map
);
// Work with the buffer
workWithBuffer(buffer);
}
}
private static void createWithFileChannelOpen() throws Exception {
Path path = Paths.get("example2.dat");
try (FileChannel channel = FileChannel.open(
path, 
StandardOpenOption.READ,
StandardOpenOption.WRITE,
StandardOpenOption.CREATE)) {
// Map file region
MappedByteBuffer buffer = channel.map(
FileChannel.MapMode.READ_WRITE,
0,
1024 * 1024  // Map 1MB
);
workWithBuffer(buffer);
}
}
private static void workWithBuffer(MappedByteBuffer buffer) {
// Write data
buffer.putInt(12345);
buffer.putDouble(3.14159);
buffer.put("Hello Memory Mapping".getBytes());
// Read data - flip to switch from write to read mode
buffer.flip();
int intValue = buffer.getInt();
double doubleValue = buffer.getDouble();
byte[] stringBytes = new byte[20];
buffer.get(stringBytes);
String stringValue = new String(stringBytes).trim();
System.out.printf("Read values: %d, %.5f, %s%n", 
intValue, doubleValue, stringValue);
}
}

Map Modes

public class MapModesExample {
public static void demonstrateMapModes() throws Exception {
Path filePath = Paths.get("testfile.dat");
// Create a file with some content
try (FileChannel channel = FileChannel.open(filePath, 
StandardOpenOption.CREATE, 
StandardOpenOption.READ, 
StandardOpenOption.WRITE)) {
// Write initial data
ByteBuffer initData = ByteBuffer.allocate(100);
initData.put("Initial file content".getBytes());
initData.flip();
channel.write(initData);
}
// READ_ONLY mode - for reading only
try (FileChannel channel = FileChannel.open(filePath, StandardOpenOption.READ)) {
MappedByteBuffer readOnlyBuffer = channel.map(
FileChannel.MapMode.READ_ONLY, 0, channel.size());
// Can read but not write
byte[] data = new byte[20];
readOnlyBuffer.get(data);
System.out.println("Read: " + new String(data));
// readOnlyBuffer.put((byte) 1); // Would throw ReadOnlyBufferException
}
// READ_WRITE mode - for reading and writing
try (FileChannel channel = FileChannel.open(filePath, 
StandardOpenOption.READ, StandardOpenOption.WRITE)) {
MappedByteBuffer readWriteBuffer = channel.map(
FileChannel.MapMode.READ_WRITE, 0, channel.size());
// Can both read and write
readWriteBuffer.position(0);
readWriteBuffer.put("Modified content".getBytes());
}
// PRIVATE mode - copy-on-write (changes not propagated to file)
try (FileChannel channel = FileChannel.open(filePath, 
StandardOpenOption.READ, StandardOpenOption.WRITE)) {
MappedByteBuffer privateBuffer = channel.map(
FileChannel.MapMode.PRIVATE, 0, channel.size());
// Changes are private to this buffer
privateBuffer.position(0);
privateBuffer.put("Private modification".getBytes());
// File on disk remains unchanged
}
}
}

Practical Examples

Example 1: High-Performance File Copy

public class MemoryMappedFileCopy {
public static void copyFile(String sourcePath, String destPath) throws Exception {
Path source = Paths.get(sourcePath);
Path destination = Paths.get(destPath);
try (FileChannel sourceChannel = FileChannel.open(source, StandardOpenOption.READ);
FileChannel destChannel = FileChannel.open(destination, 
StandardOpenOption.CREATE, 
StandardOpenOption.READ, 
StandardOpenOption.WRITE)) {
long size = sourceChannel.size();
// Map source file for reading
MappedByteBuffer sourceBuffer = sourceChannel.map(
FileChannel.MapMode.READ_ONLY, 0, size);
// Map destination file for writing
MappedByteBuffer destBuffer = destChannel.map(
FileChannel.MapMode.READ_WRITE, 0, size);
// Perform the copy
destBuffer.put(sourceBuffer);
// Force changes to disk
destBuffer.force();
}
}
public static void main(String[] args) throws Exception {
long startTime = System.currentTimeMillis();
copyFile("largefile.dat", "copy.dat");
long endTime = System.currentTimeMillis();
System.out.printf("File copied in %d ms%n", endTime - startTime);
}
}

Example 2: Shared Memory Database-like Structure

public class MemoryMappedDatabase {
private final MappedByteBuffer buffer;
private final FileChannel channel;
private static final int HEADER_SIZE = 1024; // bytes
private static final int RECORD_SIZE = 128;  // bytes
private static final int MAX_RECORDS = 1000;
public MemoryMappedDatabase(String filePath) throws Exception {
Path path = Paths.get(filePath);
this.channel = FileChannel.open(path, 
StandardOpenOption.CREATE,
StandardOpenOption.READ,
StandardOpenOption.WRITE);
long fileSize = HEADER_SIZE + (long) RECORD_SIZE * MAX_RECORDS;
this.buffer = channel.map(
FileChannel.MapMode.READ_WRITE,
0,
fileSize
);
initializeHeader();
}
private void initializeHeader() {
if (buffer.getInt(0) == 0) { // Check if uninitialized
buffer.putInt(0, 1);     // Version
buffer.putInt(4, 0);     // Record count
}
}
public void addRecord(String data) {
int recordCount = buffer.getInt(4);
if (recordCount >= MAX_RECORDS) {
throw new IllegalStateException("Database full");
}
int recordOffset = HEADER_SIZE + recordCount * RECORD_SIZE;
// Write record
buffer.position(recordOffset);
buffer.putLong(System.currentTimeMillis()); // timestamp
byte[] dataBytes = data.getBytes();
buffer.putInt(dataBytes.length); // data length
buffer.put(dataBytes);           // actual data
// Update record count
buffer.putInt(4, recordCount + 1);
// Force to disk
buffer.force();
}
public List<String> readAllRecords() {
List<String> records = new ArrayList<>();
int recordCount = buffer.getInt(4);
for (int i = 0; i < recordCount; i++) {
int recordOffset = HEADER_SIZE + i * RECORD_SIZE;
buffer.position(recordOffset);
long timestamp = buffer.getLong();
int dataLength = buffer.getInt();
byte[] dataBytes = new byte[dataLength];
buffer.get(dataBytes);
String record = String.format("[%tT] %s", 
Instant.ofEpochMilli(timestamp), new String(dataBytes));
records.add(record);
}
return records;
}
public void close() throws Exception {
channel.close();
}
public static void main(String[] args) throws Exception {
MemoryMappedDatabase db = new MemoryMappedDatabase("database.dat");
// Add some records
db.addRecord("First record");
db.addRecord("Second record");
db.addRecord("Third record");
// Read all records
List<String> records = db.readAllRecords();
records.forEach(System.out::println);
db.close();
}
}

Example 3: Concurrent Access with Read-Write Locks

public class ConcurrentMemoryMappedFile {
private final MappedByteBuffer buffer;
private final FileChannel channel;
private final ReadWriteLock lock = new ReentrantReadWriteLock();
public ConcurrentMemoryMappedFile(String filePath, long size) throws Exception {
Path path = Paths.get(filePath);
this.channel = FileChannel.open(path,
StandardOpenOption.CREATE,
StandardOpenOption.READ,
StandardOpenOption.WRITE);
this.buffer = channel.map(FileChannel.MapMode.READ_WRITE, 0, size);
}
public void writeData(int position, byte[] data) {
lock.writeLock().lock();
try {
buffer.position(position);
buffer.put(data);
buffer.force(); // Ensure data is written to disk
} finally {
lock.writeLock().unlock();
}
}
public byte[] readData(int position, int length) {
lock.readLock().lock();
try {
buffer.position(position);
byte[] data = new byte[length];
buffer.get(data);
return data;
} finally {
lock.readLock().unlock();
}
}
public void atomicIncrement(int position) {
lock.writeLock().lock();
try {
buffer.position(position);
int value = buffer.getInt();
buffer.position(position);
buffer.putInt(value + 1);
buffer.force();
} finally {
lock.writeLock().unlock();
}
}
public void close() throws Exception {
channel.close();
}
}
// Usage example
class ConcurrentExample {
public static void main(String[] args) throws Exception {
ConcurrentMemoryMappedFile sharedFile = 
new ConcurrentMemoryMappedFile("shared.dat", 1024);
// Multiple threads can safely access the file
ExecutorService executor = Executors.newFixedThreadPool(4);
for (int i = 0; i < 10; i++) {
final int threadId = i;
executor.submit(() -> {
sharedFile.atomicIncrement(0); // Safely increment counter
sharedFile.writeData(4 + threadId * 10, 
("Hello from thread " + threadId).getBytes());
});
}
executor.shutdown();
executor.awaitTermination(1, TimeUnit.MINUTES);
sharedFile.close();
}
}

Advanced Techniques

1. Handling Large Files with Multiple Mappings

public class LargeFileHandler {
private static final long MAX_MAP_SIZE = Integer.MAX_VALUE; // ~2GB
public void processLargeFile(String filePath) throws Exception {
Path path = Paths.get(filePath);
try (FileChannel channel = FileChannel.open(path, StandardOpenOption.READ)) {
long fileSize = channel.size();
long position = 0;
while (position < fileSize) {
long size = Math.min(MAX_MAP_SIZE, fileSize - position);
MappedByteBuffer buffer = channel.map(
FileChannel.MapMode.READ_ONLY, position, size);
processBuffer(buffer, position);
position += size;
}
}
}
private void processBuffer(MappedByteBuffer buffer, long fileOffset) {
// Process the buffer chunk
while (buffer.hasRemaining()) {
// Read and process data
byte b = buffer.get();
// Process byte...
}
}
}

2. Memory-Mapped File with ByteBuffer Views

public class StructuredMemoryMap {
private final MappedByteBuffer buffer;
private final IntBuffer intView;
private final LongBuffer longView;
private final CharBuffer charView;
public StructuredMemoryMap(String filePath, long size) throws Exception {
Path path = Paths.get(filePath);
try (FileChannel channel = FileChannel.open(path,
StandardOpenOption.CREATE,
StandardOpenOption.READ,
StandardOpenOption.WRITE)) {
this.buffer = channel.map(FileChannel.MapMode.READ_WRITE, 0, size);
// Create different views of the same buffer
this.intView = buffer.asIntBuffer();
this.longView = buffer.asLongBuffer();
this.charView = buffer.asCharBuffer();
}
}
public void writeStructuredData() {
// Write integers
intView.put(0, 42);
intView.put(1, 100);
// Write longs
longView.put(2, 123456789L); // Note: position calculation considers element size
// Write characters
charView.put(20, 'H');
charView.put(21, 'i');
buffer.force();
}
public void readStructuredData() {
System.out.println("First int: " + intView.get(0));
System.out.println("Second int: " + intView.get(1));
System.out.println("Long: " + longView.get(2));
System.out.println("Chars: " + charView.get(20) + charView.get(21));
}
}

Performance Considerations

1. Benchmark Comparison

public class IOPerformanceBenchmark {
public static void testTraditionalIO(String filePath, byte[] data) throws Exception {
long start = System.nanoTime();
try (FileOutputStream fos = new FileOutputStream(filePath)) {
for (int i = 0; i < 1000; i++) {
fos.write(data);
}
}
long duration = System.nanoTime() - start;
System.out.printf("Traditional I/O: %d ns%n", duration);
}
public static void testMemoryMappedIO(String filePath, byte[] data) throws Exception {
long start = System.nanoTime();
try (RandomAccessFile file = new RandomAccessFile(filePath, "rw");
FileChannel channel = file.getChannel()) {
MappedByteBuffer buffer = channel.map(
FileChannel.MapMode.READ_WRITE, 0, data.length * 1000L);
for (int i = 0; i < 1000; i++) {
buffer.put(data);
}
buffer.force();
}
long duration = System.nanoTime() - start;
System.out.printf("Memory Mapped I/O: %d ns%n", duration);
}
public static void main(String[] args) throws Exception {
byte[] testData = new byte[1024]; // 1KB
Arrays.fill(testData, (byte) 1);
testTraditionalIO("traditional.dat", testData);
testMemoryMappedIO("mapped.dat", testData);
}
}

Best Practices

  1. Always close channels in try-with-resources or finally blocks
  2. Use force() sparingly as it's expensive
  3. Handle large files by mapping portions at a time
  4. Consider memory usage when mapping large files
  5. Use appropriate map modes (READ_ONLY, READ_WRITE, PRIVATE)
  6. Be aware of platform limitations on maximum mapping size

Common Pitfalls

  1. Not calling force() when persistence is required
  2. Mapping files larger than available memory
  3. Forgetting to handle byte order in multi-platform environments
  4. Not considering concurrent access in multi-threaded applications

Memory-mapped files provide exceptional performance for file I/O operations and are particularly useful for large files, random access patterns, and shared memory scenarios.

Leave a Reply

Your email address will not be published. Required fields are marked *


Macro Nepal Helper