Infrastructure as Code with Terraform in Java

Overview

Infrastructure as Code (IaC) with Terraform in Java enables developers to programmatically define, provision, and manage cloud infrastructure using Java code. This approach combines Terraform's powerful infrastructure management capabilities with Java's type safety and ecosystem.

Architecture Components

1. Terraform Java SDK Integration

public class TerraformJavaIntegration {
private final ProcessBuilder processBuilder;
private final String terraformPath;
public TerraformJavaIntegration(String terraformPath) {
this.terraformPath = terraformPath;
this.processBuilder = new ProcessBuilder();
}
public TerraformResult executeCommand(String workspace, String command, String... args) 
throws TerraformException {
try {
List<String> commandParts = new ArrayList<>();
commandParts.add(terraformPath);
if (workspace != null) {
commandParts.add("-chdir=" + workspace);
}
commandParts.add(command);
commandParts.addAll(Arrays.asList(args));
commandParts.add("-auto-approve");
processBuilder.command(commandParts);
Process process = processBuilder.start();
String output = captureOutput(process.getInputStream());
String error = captureOutput(process.getErrorStream());
int exitCode = process.waitFor();
return new TerraformResult(exitCode, output, error);
} catch (IOException | InterruptedException e) {
throw new TerraformException("Failed to execute Terraform command", e);
}
}
private String captureOutput(InputStream inputStream) throws IOException {
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(inputStream))) {
return reader.lines().collect(Collectors.joining("\n"));
}
}
}

2. Type-Safe Terraform Configuration Builder

public class TerraformConfigBuilder {
private final Map<String, Object> variables;
private final List<TerraformResource> resources;
private final List<TerraformProvider> providers;
private final List<TerraformModule> modules;
private final Map<String, Object> outputs;
public TerraformConfigBuilder() {
this.variables = new LinkedHashMap<>();
this.resources = new ArrayList<>();
this.providers = new ArrayList<>();
this.modules = new ArrayList<>();
this.outputs = new LinkedHashMap<>();
}
public TerraformConfigBuilder variable(String name, Object defaultValue) {
variables.put(name, Map.of("default", defaultValue));
return this;
}
public TerraformConfigBuilder variable(String name, String description, Object defaultValue) {
variables.put(name, Map.of(
"description", description,
"default", defaultValue
));
return this;
}
public TerraformConfigBuilder provider(String type, String alias, Map<String, Object> config) {
providers.add(new TerraformProvider(type, alias, config));
return this;
}
public TerraformConfigBuilder resource(String type, String name, Map<String, Object> config) {
resources.add(new TerraformResource(type, name, config));
return this;
}
public TerraformConfigBuilder module(String name, String source, Map<String, Object> inputs) {
modules.add(new TerraformModule(name, source, inputs));
return this;
}
public TerraformConfigBuilder output(String name, String description, Object value) {
outputs.put(name, Map.of(
"description", description,
"value", value
));
return this;
}
public String build() {
Map<String, Object> root = new LinkedHashMap<>();
if (!variables.isEmpty()) {
root.put("variable", variables);
}
if (!providers.isEmpty()) {
Map<String, Object> providersMap = new LinkedHashMap<>();
for (TerraformProvider provider : providers) {
String key = provider.getType();
if (provider.getAlias() != null) {
key += "." + provider.getAlias();
}
providersMap.put(key, provider.getConfig());
}
root.put("provider", providersMap);
}
if (!resources.isEmpty()) {
Map<String, Object> resourcesMap = new LinkedHashMap<>();
for (TerraformResource resource : resources) {
String key = resource.getType() + "." + resource.getName();
resourcesMap.put(key, resource.getConfig());
}
root.put("resource", resourcesMap);
}
if (!modules.isEmpty()) {
Map<String, Object> modulesMap = new LinkedHashMap<>();
for (TerraformModule module : modules) {
modulesMap.put(module.getName(), Map.of(
"source", module.getSource(),
"version", module.getVersion(),
"inputs", module.getInputs()
));
}
root.put("module", modulesMap);
}
if (!outputs.isEmpty()) {
root.put("output", outputs);
}
return toHCL(root);
}
private String toHCL(Map<String, Object> config) {
StringBuilder hcl = new StringBuilder();
for (Map.Entry<String, Object> entry : config.entrySet()) {
hcl.append(entry.getKey()).append(" ").append(toHCLValue(entry.getValue(), 0));
}
return hcl.toString();
}
private String toHCLValue(Object value, int indent) {
// Convert Java objects to HCL syntax
if (value instanceof Map) {
return toHCLMap((Map<?, ?>) value, indent);
} else if (value instanceof List) {
return toHCLList((List<?>) value, indent);
} else if (value instanceof String) {
return "\"" + value + "\"";
} else {
return String.valueOf(value);
}
}
private String toHCLMap(Map<?, ?> map, int indent) {
StringBuilder sb = new StringBuilder("{\n");
String indentStr = "  ".repeat(indent + 1);
for (Map.Entry<?, ?> entry : map.entrySet()) {
sb.append(indentStr)
.append(entry.getKey())
.append(" = ")
.append(toHCLValue(entry.getValue(), indent + 1))
.append("\n");
}
sb.append("  ".repeat(indent)).append("}");
return sb.toString();
}
private String toHCLList(List<?> list, int indent) {
if (list.isEmpty()) return "[]";
StringBuilder sb = new StringBuilder("[\n");
String indentStr = "  ".repeat(indent + 1);
for (Object item : list) {
sb.append(indentStr)
.append(toHCLValue(item, indent + 1))
.append(",\n");
}
sb.append("  ".repeat(indent)).append("]");
return sb.toString();
}
}

AWS Infrastructure Builder

1. AWS VPC Configuration

public class AwsVpcBuilder {
private final TerraformConfigBuilder configBuilder;
private final String environment;
private final String region;
public AwsVpcBuilder(String environment, String region) {
this.configBuilder = new TerraformConfigBuilder();
this.environment = environment;
this.region = region;
initializeProvider();
}
private void initializeProvider() {
configBuilder.provider("aws", "primary", Map.of(
"region", region,
"default_tags", Map.of(
"Environment", environment,
"Project", "JavaTerraformDemo"
)
));
}
public AwsVpcBuilder withVpc(String cidrBlock, boolean enableDnsHostnames) {
configBuilder.resource("aws_vpc", "main", Map.of(
"cidr_block", cidrBlock,
"enable_dns_hostnames", enableDnsHostnames,
"tags", Map.of("Name", "vpc-" + environment)
));
return this;
}
public AwsVpcBuilder withSubnets(List<SubnetConfig> subnetConfigs) {
for (int i = 0; i < subnetConfigs.size(); i++) {
SubnetConfig config = subnetConfigs.get(i);
String availabilityZone = region + config.getAzSuffix();
configBuilder.resource("aws_subnet", "subnet_" + (i + 1), Map.of(
"vpc_id", "${aws_vpc.main.id}",
"cidr_block", config.getCidrBlock(),
"availability_zone", availabilityZone,
"map_public_ip_on_launch", config.isPublic(),
"tags", Map.of(
"Name", "subnet-" + (i + 1) + "-" + environment,
"Type", config.isPublic() ? "public" : "private"
)
));
}
return this;
}
public AwsVpcBuilder withInternetGateway() {
configBuilder.resource("aws_internet_gateway", "main", Map.of(
"vpc_id", "${aws_vpc.main.id}",
"tags", Map.of("Name", "igw-" + environment)
));
return this;
}
public AwsVpcBuilder withNatGateway(String subnetName) {
// Create Elastic IP for NAT Gateway
configBuilder.resource("aws_eip", "nat", Map.of(
"domain", "vpc",
"tags", Map.of("Name", "eip-nat-" + environment)
));
// Create NAT Gateway
configBuilder.resource("aws_nat_gateway", "main", Map.of(
"allocation_id", "${aws_eip.nat.id}",
"subnet_id", "${aws_subnet." + subnetName + ".id}",
"tags", Map.of("Name", "nat-" + environment)
));
return this;
}
public AwsVpcBuilder withRouteTables(List<RouteTableConfig> routeTableConfigs) {
for (RouteTableConfig config : routeTableConfigs) {
String rtName = "rt_" + config.getName();
// Create Route Table
configBuilder.resource("aws_route_table", rtName, Map.of(
"vpc_id", "${aws_vpc.main.id}",
"tags", Map.of("Name", "rt-" + config.getName() + "-" + environment)
));
// Add routes
if (config.getGatewayId() != null) {
configBuilder.resource("aws_route", rtName + "_route", Map.of(
"route_table_id", "${aws_route_table." + rtName + ".id}",
"destination_cidr_block", "0.0.0.0/0",
"gateway_id", config.getGatewayId()
));
}
if (config.getNatGatewayId() != null) {
configBuilder.resource("aws_route", rtName + "_nat_route", Map.of(
"route_table_id", "${aws_route_table." + rtName + ".id}",
"destination_cidr_block", "0.0.0.0/0",
"nat_gateway_id", config.getNatGatewayId()
));
}
// Associate with subnets
for (String subnet : config.getAssociatedSubnets()) {
configBuilder.resource("aws_route_table_association", 
rtName + "_assoc_" + subnet, Map.of(
"subnet_id", "${aws_subnet." + subnet + ".id}",
"route_table_id", "${aws_route_table." + rtName + ".id}"
));
}
}
return this;
}
public AwsVpcBuilder withSecurityGroups(List<SecurityGroupConfig> securityGroupConfigs) {
for (SecurityGroupConfig config : securityGroupConfigs) {
List<Map<String, Object>> ingressRules = config.getIngressRules().stream()
.map(rule -> Map.of(
"description", rule.getDescription(),
"from_port", rule.getFromPort(),
"to_port", rule.getToPort(),
"protocol", rule.getProtocol(),
"cidr_blocks", rule.getCidrBlocks()
))
.collect(Collectors.toList());
List<Map<String, Object>> egressRules = List.of(Map.of(
"from_port", 0,
"to_port", 0,
"protocol", "-1",
"cidr_blocks", List.of("0.0.0.0/0")
));
configBuilder.resource("aws_security_group", config.getName(), Map.of(
"name", config.getName(),
"description", config.getDescription(),
"vpc_id", "${aws_vpc.main.id}",
"ingress", ingressRules,
"egress", egressRules,
"tags", Map.of("Name", config.getName() + "-" + environment)
));
}
return this;
}
public String build() {
// Add outputs
configBuilder.output("vpc_id", "VPC ID", "${aws_vpc.main.id}")
.output("vpc_cidr", "VPC CIDR", "${aws_vpc.main.cidr_block}")
.output("public_subnet_ids", "Public Subnet IDs", 
"${aws_subnet.public.*.id}")
.output("private_subnet_ids", "Private Subnet IDs", 
"${aws_subnet.private.*.id}");
return configBuilder.build();
}
// Configuration classes
public static class SubnetConfig {
private final String cidrBlock;
private final char azSuffix;
private final boolean isPublic;
public SubnetConfig(String cidrBlock, char azSuffix, boolean isPublic) {
this.cidrBlock = cidrBlock;
this.azSuffix = azSuffix;
this.isPublic = isPublic;
}
// Getters
public String getCidrBlock() { return cidrBlock; }
public char getAzSuffix() { return azSuffix; }
public boolean isPublic() { return isPublic; }
}
public static class RouteTableConfig {
private final String name;
private String gatewayId;
private String natGatewayId;
private final List<String> associatedSubnets;
public RouteTableConfig(String name, List<String> associatedSubnets) {
this.name = name;
this.associatedSubnets = associatedSubnets;
}
// Builder methods
public RouteTableConfig withInternetGateway() {
this.gatewayId = "${aws_internet_gateway.main.id}";
return this;
}
public RouteTableConfig withNatGateway() {
this.natGatewayId = "${aws_nat_gateway.main.id}";
return this;
}
// Getters
public String getName() { return name; }
public String getGatewayId() { return gatewayId; }
public String getNatGatewayId() { return natGatewayId; }
public List<String> getAssociatedSubnets() { return associatedSubnets; }
}
public static class SecurityGroupConfig {
private final String name;
private final String description;
private final List<IngressRule> ingressRules;
public SecurityGroupConfig(String name, String description) {
this.name = name;
this.description = description;
this.ingressRules = new ArrayList<>();
}
public SecurityGroupConfig addIngressRule(IngressRule rule) {
this.ingressRules.add(rule);
return this;
}
// Getters
public String getName() { return name; }
public String getDescription() { return description; }
public List<IngressRule> getIngressRules() { return ingressRules; }
}
public static class IngressRule {
private final String description;
private final int fromPort;
private final int toPort;
private final String protocol;
private final List<String> cidrBlocks;
public IngressRule(String description, int fromPort, int toPort, 
String protocol, List<String> cidrBlocks) {
this.description = description;
this.fromPort = fromPort;
this.toPort = toPort;
this.protocol = protocol;
this.cidrBlocks = cidrBlocks;
}
// Getters
public String getDescription() { return description; }
public int getFromPort() { return fromPort; }
public int getToPort() { return toPort; }
public String getProtocol() { return protocol; }
public List<String> getCidrBlocks() { return cidrBlocks; }
}
}

2. ECS Cluster and Service Builder

public class EcsBuilder {
private final TerraformConfigBuilder configBuilder;
private final String environment;
private final String region;
public EcsBuilder(String environment, String region) {
this.configBuilder = new TerraformConfigBuilder();
this.environment = environment;
this.region = region;
initializeProvider();
}
private void initializeProvider() {
configBuilder.provider("aws", "primary", Map.of("region", region));
}
public EcsBuilder withEcsCluster(String clusterName) {
configBuilder.resource("aws_ecs_cluster", "main", Map.of(
"name", clusterName + "-" + environment,
"setting", List.of(Map.of(
"name", "containerInsights",
"value", "enabled"
)),
"tags", Map.of("Environment", environment)
));
return this;
}
public EcsBuilder withTaskDefinition(String family, ContainerDefinition containerDef) {
Map<String, Object> container = Map.of(
"name", containerDef.getName(),
"image", containerDef.getImage(),
"cpu", containerDef.getCpu(),
"memory", containerDef.getMemory(),
"essential", true,
"portMappings", containerDef.getPortMappings().stream()
.map(pm -> Map.of(
"containerPort", pm.getContainerPort(),
"hostPort", pm.getHostPort(),
"protocol", pm.getProtocol()
))
.collect(Collectors.toList()),
"environment", containerDef.getEnvironment().entrySet().stream()
.map(entry -> Map.of("name", entry.getKey(), "value", entry.getValue()))
.collect(Collectors.toList()),
"logConfiguration", Map.of(
"logDriver", "awslogs",
"options", Map.of(
"awslogs-group", "/ecs/" + family,
"awslogs-region", region,
"awslogs-stream-prefix", "ecs"
)
)
);
configBuilder.resource("aws_ecs_task_definition", "app", Map.of(
"family", family,
"network_mode", "awsvpc",
"requires_compatibilities", List.of("FARGATE"),
"cpu", containerDef.getCpu(),
"memory", containerDef.getMemory(),
"execution_role_arn", "${aws_iam_role.ecs_task_execution_role.arn}",
"task_role_arn", "${aws_iam_role.ecs_task_role.arn}",
"container_definitions", jsonEncode(List.of(container))
));
return this;
}
public EcsBuilder withEcsService(String serviceName, String clusterName, 
ServiceConfig serviceConfig) {
configBuilder.resource("aws_ecs_service", "main", Map.of(
"name", serviceName,
"cluster", "${aws_ecs_cluster.main.id}",
"task_definition", "${aws_ecs_task_definition.app.arn}",
"desired_count", serviceConfig.getDesiredCount(),
"launch_type", "FARGATE",
"network_configuration", Map.of(
"subnets", serviceConfig.getSubnetIds(),
"security_groups", serviceConfig.getSecurityGroupIds(),
"assign_public_ip", serviceConfig.isAssignPublicIp()
),
"load_balancer", serviceConfig.getLoadBalancers().stream()
.map(lb -> Map.of(
"target_group_arn", lb.getTargetGroupArn(),
"container_name", lb.getContainerName(),
"container_port", lb.getContainerPort()
))
.collect(Collectors.toList()),
"tags", Map.of("Environment", environment)
));
return this;
}
public EcsBuilder withIamRoles() {
// ECS Task Execution Role
configBuilder.resource("aws_iam_role", "ecs_task_execution_role", Map.of(
"name", "ecsTaskExecutionRole-" + environment,
"assume_role_policy", jsonEncode(Map.of(
"Version", "2012-10-17",
"Statement", List.of(Map.of(
"Action", "sts:AssumeRole",
"Effect", "Allow",
"Principal", Map.of("Service", "ecs-tasks.amazonaws.com")
))
))
));
configBuilder.resource("aws_iam_role_policy_attachment", 
"ecs_task_execution_role_policy", Map.of(
"role", "${aws_iam_role.ecs_task_execution_role.name}",
"policy_arn", "arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy"
));
// ECS Task Role
configBuilder.resource("aws_iam_role", "ecs_task_role", Map.of(
"name", "ecsTaskRole-" + environment,
"assume_role_policy", jsonEncode(Map.of(
"Version", "2012-10-17",
"Statement", List.of(Map.of(
"Action", "sts:AssumeRole",
"Effect", "Allow",
"Principal", Map.of("Service", "ecs-tasks.amazonaws.com")
))
))
));
return this;
}
public EcsBuilder withCloudWatchLogGroup(String logGroupName) {
configBuilder.resource("aws_cloudwatch_log_group", "ecs", Map.of(
"name", "/ecs/" + logGroupName,
"retention_in_days", 30,
"tags", Map.of("Environment", environment)
));
return this;
}
private String jsonEncode(Object object) {
try {
return new ObjectMapper().writeValueAsString(object);
} catch (JsonProcessingException e) {
throw new RuntimeException("Failed to encode JSON", e);
}
}
public String build() {
configBuilder.output("ecs_cluster_name", "ECS Cluster Name", 
"${aws_ecs_cluster.main.name}")
.output("ecs_service_name", "ECS Service Name", 
"${aws_ecs_service.main.name}")
.output("task_definition_arn", "Task Definition ARN", 
"${aws_ecs_task_definition.app.arn}");
return configBuilder.build();
}
// Configuration classes
public static class ContainerDefinition {
private final String name;
private final String image;
private final int cpu;
private final int memory;
private final List<PortMapping> portMappings;
private final Map<String, String> environment;
public ContainerDefinition(String name, String image, int cpu, int memory) {
this.name = name;
this.image = image;
this.cpu = cpu;
this.memory = memory;
this.portMappings = new ArrayList<>();
this.environment = new HashMap<>();
}
public ContainerDefinition addPortMapping(PortMapping portMapping) {
this.portMappings.add(portMapping);
return this;
}
public ContainerDefinition addEnvironment(String key, String value) {
this.environment.put(key, value);
return this;
}
// Getters
public String getName() { return name; }
public String getImage() { return image; }
public int getCpu() { return cpu; }
public int getMemory() { return memory; }
public List<PortMapping> getPortMappings() { return portMappings; }
public Map<String, String> getEnvironment() { return environment; }
}
public static class PortMapping {
private final int containerPort;
private final int hostPort;
private final String protocol;
public PortMapping(int containerPort, int hostPort, String protocol) {
this.containerPort = containerPort;
this.hostPort = hostPort;
this.protocol = protocol;
}
// Getters
public int getContainerPort() { return containerPort; }
public int getHostPort() { return hostPort; }
public String getProtocol() { return protocol; }
}
public static class ServiceConfig {
private final int desiredCount;
private final List<String> subnetIds;
private final List<String> securityGroupIds;
private final boolean assignPublicIp;
private final List<LoadBalancer> loadBalancers;
public ServiceConfig(int desiredCount, List<String> subnetIds, 
List<String> securityGroupIds, boolean assignPublicIp) {
this.desiredCount = desiredCount;
this.subnetIds = subnetIds;
this.securityGroupIds = securityGroupIds;
this.assignPublicIp = assignPublicIp;
this.loadBalancers = new ArrayList<>();
}
public ServiceConfig addLoadBalancer(LoadBalancer loadBalancer) {
this.loadBalancers.add(loadBalancer);
return this;
}
// Getters
public int getDesiredCount() { return desiredCount; }
public List<String> getSubnetIds() { return subnetIds; }
public List<String> getSecurityGroupIds() { return securityGroupIds; }
public boolean isAssignPublicIp() { return assignPublicIp; }
public List<LoadBalancer> getLoadBalancers() { return loadBalancers; }
}
public static class LoadBalancer {
private final String targetGroupArn;
private final String containerName;
private final int containerPort;
public LoadBalancer(String targetGroupArn, String containerName, int containerPort) {
this.targetGroupArn = targetGroupArn;
this.containerName = containerName;
this.containerPort = containerPort;
}
// Getters
public String getTargetGroupArn() { return targetGroupArn; }
public String getContainerName() { return containerName; }
public int getContainerPort() { return containerPort; }
}
}

Terraform Lifecycle Management

1. Terraform Workspace Manager

@Component
public class TerraformWorkspaceManager {
private final TerraformJavaIntegration terraform;
private final String baseDirectory;
private final ObjectMapper objectMapper;
public TerraformWorkspaceManager(TerraformJavaIntegration terraform, 
String baseDirectory) {
this.terraform = terraform;
this.baseDirectory = baseDirectory;
this.objectMapper = new ObjectMapper();
}
public Workspace createWorkspace(String workspaceName, String config) 
throws TerraformException {
Path workspacePath = Paths.get(baseDirectory, workspaceName);
try {
Files.createDirectories(workspacePath);
// Write main.tf
Files.write(workspacePath.resolve("main.tf"), config.getBytes());
// Write terraform.tfvars if needed
Map<String, Object> tfvars = Map.of(
"environment", workspaceName,
"region", "us-east-1"
);
String tfvarsContent = toHCL(tfvars);
Files.write(workspacePath.resolve("terraform.tfvars"), 
tfvarsContent.getBytes());
// Initialize Terraform
TerraformResult initResult = terraform.executeCommand(
workspacePath.toString(), "init");
if (initResult.getExitCode() != 0) {
throw new TerraformException("Terraform init failed: " + 
initResult.getError());
}
return new Workspace(workspaceName, workspacePath.toString());
} catch (IOException e) {
throw new TerraformException("Failed to create workspace", e);
}
}
public TerraformResult plan(Workspace workspace) throws TerraformException {
return terraform.executeCommand(workspace.getPath(), "plan", 
"-out", "tfplan");
}
public TerraformResult apply(Workspace workspace) throws TerraformException {
return terraform.executeCommand(workspace.getPath(), "apply", "tfplan");
}
public TerraformResult destroy(Workspace workspace) throws TerraformException {
return terraform.executeCommand(workspace.getPath(), "destroy");
}
public Map<String, Object> getOutputs(Workspace workspace) throws TerraformException {
TerraformResult result = terraform.executeCommand(workspace.getPath(), "output", "-json");
if (result.getExitCode() != 0) {
throw new TerraformException("Failed to get outputs: " + result.getError());
}
try {
return objectMapper.readValue(result.getOutput(), Map.class);
} catch (JsonProcessingException e) {
throw new TerraformException("Failed to parse outputs", e);
}
}
public WorkspaceState getState(Workspace workspace) throws TerraformException {
TerraformResult result = terraform.executeCommand(workspace.getPath(), "show", "-json");
if (result.getExitCode() != 0) {
throw new TerraformException("Failed to get state: " + result.getError());
}
try {
return objectMapper.readValue(result.getOutput(), WorkspaceState.class);
} catch (JsonProcessingException e) {
throw new TerraformException("Failed to parse state", e);
}
}
private String toHCL(Map<String, Object> map) {
return map.entrySet().stream()
.map(entry -> entry.getKey() + " = \"" + entry.getValue() + "\"")
.collect(Collectors.joining("\n"));
}
}

2. Infrastructure Deployment Service

@Service
public class InfrastructureDeploymentService {
private final TerraformWorkspaceManager workspaceManager;
private final DeploymentHistoryRepository historyRepository;
private final ExecutorService executorService;
public InfrastructureDeploymentService(TerraformWorkspaceManager workspaceManager,
DeploymentHistoryRepository historyRepository) {
this.workspaceManager = workspaceManager;
this.historyRepository = historyRepository;
this.executorService = Executors.newFixedThreadPool(3);
}
@Async
public CompletableFuture<DeploymentResult> deployInfrastructure(
DeploymentRequest request) {
return CompletableFuture.supplyAsync(() -> {
DeploymentHistory history = createDeploymentHistory(request);
try {
// Create or update workspace
Workspace workspace = workspaceManager.createWorkspace(
request.getWorkspaceName(), request.getConfiguration());
// Plan deployment
history.setStatus(DeploymentStatus.PLANNING);
historyRepository.save(history);
TerraformResult planResult = workspaceManager.plan(workspace);
history.setPlanOutput(planResult.getOutput());
if (planResult.getExitCode() != 0) {
history.setStatus(DeploymentStatus.FAILED);
history.setErrorMessage(planResult.getError());
historyRepository.save(history);
return new DeploymentResult(false, "Plan failed", history);
}
// Apply changes
history.setStatus(DeploymentStatus.APPLYING);
historyRepository.save(history);
TerraformResult applyResult = workspaceManager.apply(workspace);
history.setApplyOutput(applyResult.getOutput());
if (applyResult.getExitCode() != 0) {
history.setStatus(DeploymentStatus.FAILED);
history.setErrorMessage(applyResult.getError());
historyRepository.save(history);
return new DeploymentResult(false, "Apply failed", history);
}
// Get outputs
Map<String, Object> outputs = workspaceManager.getOutputs(workspace);
history.setOutputs(outputs);
history.setStatus(DeploymentStatus.COMPLETED);
historyRepository.save(history);
return new DeploymentResult(true, "Deployment completed", history);
} catch (TerraformException e) {
history.setStatus(DeploymentStatus.FAILED);
history.setErrorMessage(e.getMessage());
historyRepository.save(history);
return new DeploymentResult(false, e.getMessage(), history);
}
}, executorService);
}
public DeploymentResult destroyInfrastructure(String workspaceName) {
DeploymentHistory history = createDestructionHistory(workspaceName);
try {
Workspace workspace = new Workspace(workspaceName, 
Paths.get(workspaceManager.getBaseDirectory(), workspaceName).toString());
history.setStatus(DeploymentStatus.DESTROYING);
historyRepository.save(history);
TerraformResult destroyResult = workspaceManager.destroy(workspace);
history.setDestroyOutput(destroyResult.getOutput());
if (destroyResult.getExitCode() != 0) {
history.setStatus(DeploymentStatus.FAILED);
history.setErrorMessage(destroyResult.getError());
historyRepository.save(history);
return new DeploymentResult(false, "Destruction failed", history);
}
history.setStatus(DeploymentStatus.DESTROYED);
historyRepository.save(history);
return new DeploymentResult(true, "Infrastructure destroyed", history);
} catch (TerraformException e) {
history.setStatus(DeploymentStatus.FAILED);
history.setErrorMessage(e.getMessage());
historyRepository.save(history);
return new DeploymentResult(false, e.getMessage(), history);
}
}
public InfrastructureStatus getInfrastructureStatus(String workspaceName) {
try {
Workspace workspace = new Workspace(workspaceName, 
Paths.get(workspaceManager.getBaseDirectory(), workspaceName).toString());
WorkspaceState state = workspaceManager.getState(workspace);
Map<String, Object> outputs = workspaceManager.getOutputs(workspace);
return new InfrastructureStatus(workspaceName, state, outputs);
} catch (TerraformException e) {
return new InfrastructureStatus(workspaceName, e.getMessage());
}
}
private DeploymentHistory createDeploymentHistory(DeploymentRequest request) {
DeploymentHistory history = new DeploymentHistory();
history.setWorkspaceName(request.getWorkspaceName());
history.setConfiguration(request.getConfiguration());
history.setRequestedBy(request.getRequestedBy());
history.setStatus(DeploymentStatus.REQUESTED);
history.setCreatedAt(Instant.now());
return historyRepository.save(history);
}
private DeploymentHistory createDestructionHistory(String workspaceName) {
DeploymentHistory history = new DeploymentHistory();
history.setWorkspaceName(workspaceName);
history.setStatus(DeploymentStatus.DESTROY_REQUESTED);
history.setCreatedAt(Instant.now());
return historyRepository.save(history);
}
}

Advanced Features

1. Dynamic Module Registry

@Component
public class TerraformModuleRegistry {
private final Map<String, TerraformModule> modules;
private final GitService gitService;
public TerraformModuleRegistry(GitService gitService) {
this.modules = new ConcurrentHashMap<>();
this.gitService = gitService;
initializeDefaultModules();
}
private void initializeDefaultModules() {
registerModule(new TerraformModule("aws-vpc", "1.0.0", 
"git::https://github.com/terraform-aws-modules/terraform-aws-vpc.git",
Map.of(
"name", "string",
"cidr", "string",
"azs", "list"
)));
registerModule(new TerraformModule("aws-ecs", "1.0.0",
"git::https://github.com/terraform-aws-modules/terraform-aws-ecs.git",
Map.of(
"cluster_name", "string",
"container_insights", "bool"
)));
}
public void registerModule(TerraformModule module) {
modules.put(module.getName() + ":" + module.getVersion(), module);
}
public TerraformModule getModule(String name, String version) {
return modules.get(name + ":" + version);
}
public List<TerraformModule> searchModules(String query) {
return modules.values().stream()
.filter(module -> module.getName().contains(query) || 
module.getDescription().contains(query))
.collect(Collectors.toList());
}
public String generateModuleUsage(String moduleName, String version, 
Map<String, Object> inputs) {
TerraformModule module = getModule(moduleName, version);
if (module == null) {
throw new IllegalArgumentException("Module not found: " + moduleName);
}
TerraformConfigBuilder builder = new TerraformConfigBuilder();
builder.module(moduleName, module.getSource(), inputs);
return builder.build();
}
public void updateModuleSource(String moduleName, String version, String localPath) {
TerraformModule module = getModule(moduleName, version);
if (module != null) {
module.setSource(localPath);
}
}
}

2. State Management and Locking

@Component
public class TerraformStateManager {
private final BackendConfig backendConfig;
private final LockManager lockManager;
public TerraformStateManager(BackendConfig backendConfig, LockManager lockManager) {
this.backendConfig = backendConfig;
this.lockManager = lockManager;
}
public String generateBackendConfig(String workspaceName) {
Map<String, Object> backendConfig = new HashMap<>();
switch (this.backendConfig.getType()) {
case S3:
backendConfig.put("s3", Map.of(
"bucket", this.backendConfig.getBucketName(),
"key", "terraform/" + workspaceName + "/state.tfstate",
"region", this.backendConfig.getRegion(),
"dynamodb_table", this.backendConfig.getLockTable(),
"encrypt", true
));
break;
case AZURERM:
backendConfig.put("azurerm", Map.of(
"resource_group_name", this.backendConfig.getResourceGroup(),
"storage_account_name", this.backendConfig.getStorageAccount(),
"container_name", this.backendConfig.getContainerName(),
"key", "terraform/" + workspaceName + "/state.tfstate"
));
break;
case GCS:
backendConfig.put("gcs", Map.of(
"bucket", this.backendConfig.getBucketName(),
"prefix", "terraform/" + workspaceName
));
break;
}
return toHCL(backendConfig);
}
public boolean acquireLock(String workspaceName, String operationId) {
return lockManager.acquireLock(workspaceName, operationId, Duration.ofMinutes(30));
}
public void releaseLock(String workspaceName, String operationId) {
lockManager.releaseLock(workspaceName, operationId);
}
public boolean isLocked(String workspaceName) {
return lockManager.isLocked(workspaceName);
}
public Optional<LockInfo> getLockInfo(String workspaceName) {
return lockManager.getLockInfo(workspaceName);
}
public void migrateState(String workspaceName, String fromBackend, String toBackend) {
// Implement state migration logic
// This would involve terraform init -migrate-state
}
public StateVersion createStateSnapshot(String workspaceName) {
// Create and store state snapshot
// Implement versioning for rollback capability
return new StateVersion(workspaceName, Instant.now(), "snapshot-id");
}
public boolean rollbackToVersion(String workspaceName, String versionId) {
// Rollback state to specific version
return true;
}
}

Testing Framework

1. Terraform Configuration Testing

@ExtendWith(MockitoExtension.class)
public class TerraformConfigurationTest {
@Test
public void testVpcConfigurationGeneration() {
AwsVpcBuilder vpcBuilder = new AwsVpcBuilder("test", "us-east-1");
String config = vpcBuilder
.withVpc("10.0.0.0/16", true)
.withSubnets(Arrays.asList(
new AwsVpcBuilder.SubnetConfig("10.0.1.0/24", 'a', true),
new AwsVpcBuilder.SubnetConfig("10.0.2.0/24", 'b', false)
))
.withInternetGateway()
.build();
assertThat(config).contains("aws_vpc");
assertThat(config).contains("10.0.0.0/16");
assertThat(config).contains("aws_subnet");
}
@Test
public void testEcsConfigurationGeneration() {
EcsBuilder ecsBuilder = new EcsBuilder("test", "us-east-1");
EcsBuilder.ContainerDefinition containerDef = new EcsBuilder.ContainerDefinition(
"app", "nginx:latest", 256, 512
);
containerDef.addPortMapping(new EcsBuilder.PortMapping(80, 80, "tcp"));
containerDef.addEnvironment("ENV", "test");
EcsBuilder.ServiceConfig serviceConfig = new EcsBuilder.ServiceConfig(
2, 
Arrays.asList("subnet-123", "subnet-456"),
Arrays.asList("sg-123"),
true
);
String config = ecsBuilder
.withEcsCluster("test-cluster")
.withTaskDefinition("app", containerDef)
.withEcsService("app-service", "test-cluster", serviceConfig)
.withIamRoles()
.build();
assertThat(config).contains("aws_ecs_cluster");
assertThat(config).contains("aws_ecs_task_definition");
assertThat(config).contains("nginx:latest");
}
}
@SpringBootTest
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class TerraformIntegrationTest {
@Autowired
private TerraformWorkspaceManager workspaceManager;
@TempDir
Path tempDir;
@Test
public void testTerraformPlanAndApply() throws Exception {
// Create simple Terraform configuration
String config = """
terraform {
required_providers {
aws = {
source  = "hashicorp/aws"
version = "~> 4.0"
}
}
}
provider "aws" {
region = "us-east-1"
}
resource "aws_s3_bucket" "test" {
bucket = "test-bucket-12345"
}
""";
Workspace workspace = workspaceManager.createWorkspace("test-workspace", config);
// Test plan
TerraformResult planResult = workspaceManager.plan(workspace);
assertThat(planResult.getExitCode()).isEqualTo(0);
assertThat(planResult.getOutput()).contains("aws_s3_bucket");
// Note: Actual apply would create real resources, so we might skip in unit tests
}
}

Spring Boot Integration

1. Auto-Configuration

@Configuration
@EnableConfigurationProperties(TerraformProperties.class)
public class TerraformAutoConfiguration {
@Bean
@ConditionalOnMissingBean
public TerraformJavaIntegration terraformJavaIntegration(TerraformProperties properties) {
return new TerraformJavaIntegration(properties.getTerraformPath());
}
@Bean
@ConditionalOnMissingBean
public TerraformWorkspaceManager terraformWorkspaceManager(
TerraformJavaIntegration terraformIntegration,
TerraformProperties properties) {
return new TerraformWorkspaceManager(
terraformIntegration, properties.getWorkspaceBaseDir());
}
@Bean
@ConditionalOnMissingBean
public InfrastructureDeploymentService infrastructureDeploymentService(
TerraformWorkspaceManager workspaceManager,
DeploymentHistoryRepository historyRepository) {
return new InfrastructureDeploymentService(workspaceManager, historyRepository);
}
@Bean
@ConditionalOnMissingBean
public TerraformModuleRegistry terraformModuleRegistry(GitService gitService) {
return new TerraformModuleRegistry(gitService);
}
}
@ConfigurationProperties(prefix = "terraform")
public class TerraformProperties {
private String terraformPath = "terraform";
private String workspaceBaseDir = "./terraform-workspaces";
private Backend backend = new Backend();
private Map<String, String> environmentVariables = new HashMap<>();
// Getters and setters
public static class Backend {
private BackendType type = BackendType.LOCAL;
private String bucketName;
private String region;
private String lockTable;
// Getters and setters
}
public enum BackendType {
LOCAL, S3, AZURERM, GCS
}
}

2. REST API Controllers

@RestController
@RequestMapping("/api/infrastructure")
@Validated
public class InfrastructureController {
private final InfrastructureDeploymentService deploymentService;
private final TerraformModuleRegistry moduleRegistry;
public InfrastructureController(InfrastructureDeploymentService deploymentService,
TerraformModuleRegistry moduleRegistry) {
this.deploymentService = deploymentService;
this.moduleRegistry = moduleRegistry;
}
@PostMapping("/deploy")
public ResponseEntity<DeploymentResponse> deployInfrastructure(
@Valid @RequestBody DeploymentRequest request) {
CompletableFuture<DeploymentResult> future = 
deploymentService.deployInfrastructure(request);
return ResponseEntity.accepted()
.body(new DeploymentResponse(future, "Deployment initiated"));
}
@GetMapping("/modules")
public ResponseEntity<List<TerraformModule>> listModules(
@RequestParam(required = false) String search) {
List<TerraformModule> modules = (search == null) ? 
new ArrayList<>(moduleRegistry.getAllModules()) :
moduleRegistry.searchModules(search);
return ResponseEntity.ok(modules);
}
@PostMapping("/modules/generate")
public ResponseEntity<ModuleGenerationResponse> generateModuleConfiguration(
@Valid @RequestBody ModuleGenerationRequest request) {
String configuration = moduleRegistry.generateModuleUsage(
request.getModuleName(), request.getVersion(), request.getInputs());
return ResponseEntity.ok(new ModuleGenerationResponse(configuration));
}
@GetMapping("/{workspaceName}/status")
public ResponseEntity<InfrastructureStatus> getInfrastructureStatus(
@PathVariable String workspaceName) {
InfrastructureStatus status = deploymentService.getInfrastructureStatus(workspaceName);
return ResponseEntity.ok(status);
}
@DeleteMapping("/{workspaceName}")
public ResponseEntity<DeploymentResult> destroyInfrastructure(
@PathVariable String workspaceName) {
DeploymentResult result = deploymentService.destroyInfrastructure(workspaceName);
return ResponseEntity.ok(result);
}
}
// DTO classes
public class DeploymentRequest {
@NotBlank
private String workspaceName;
@NotBlank
private String configuration;
private String requestedBy;
private Map<String, Object> variables = new HashMap<>();
// Getters and setters
}
public class DeploymentResponse {
private final String message;
private final String deploymentId;
private final Instant timestamp;
public DeploymentResponse(CompletableFuture<DeploymentResult> future, String message) {
this.message = message;
this.deploymentId = UUID.randomUUID().toString();
this.timestamp = Instant.now();
}
// Getters
}

Security and Best Practices

1. Security Hardening

@Component
public class TerraformSecurityManager {
public void validateConfiguration(String config) throws SecurityException {
// Check for sensitive data exposure
if (containsSensitiveData(config)) {
throw new SecurityException("Configuration contains potentially sensitive data");
}
// Check for dangerous resources
if (containsDangerousResources(config)) {
throw new SecurityException("Configuration contains dangerous resources");
}
// Validate IAM policies are not too permissive
if (hasOverlyPermissiveIam(config)) {
throw new SecurityException("Configuration contains overly permissive IAM policies");
}
}
public String sanitizeConfiguration(String config) {
// Remove comments that might contain sensitive information
config = config.replaceAll("#.*", "");
// Remove empty lines
config = config.replaceAll("\\n\\s*\\n", "\n");
return config;
}
public Map<String, String> maskSensitiveOutputs(Map<String, Object> outputs) {
return outputs.entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
entry -> maskSensitiveValue(entry.getKey(), entry.getValue())
));
}
private boolean containsSensitiveData(String config) {
String[] sensitivePatterns = {
"password", "secret", "key", "token", "credential"
};
return Arrays.stream(sensitivePatterns)
.anyMatch(pattern -> config.toLowerCase().contains(pattern));
}
private boolean containsDangerousResources(String config) {
String[] dangerousResources = {
"aws_iam_user_policy_attachment.*\\*",
"aws_security_group_rule.*0.0.0.0/0"
};
return Arrays.stream(dangerousResources)
.anyMatch(pattern -> config.matches(pattern));
}
private boolean hasOverlyPermissiveIam(String config) {
return config.contains("\"Effect\":\"Allow\"") && 
config.contains("\"Resource\":\"*\"");
}
private String maskSensitiveValue(String key, Object value) {
if (key.toLowerCase().contains("password") || 
key.toLowerCase().contains("secret")) {
return "***MASKED***";
}
return String.valueOf(value);
}
}

Conclusion

Infrastructure as Code with Terraform in Java provides:

  1. Type Safety: Compile-time checking of infrastructure configurations
  2. Reusability: Modular, reusable infrastructure components
  3. IDE Support: Full IDE support with autocomplete and refactoring
  4. Testing: Unit and integration testing for infrastructure code
  5. Integration: Seamless integration with existing Java ecosystems

Key benefits:

  • Consistency: Ensure consistent infrastructure across environments
  • Version Control: Track infrastructure changes in Git
  • Collaboration: Enable team collaboration on infrastructure
  • Automation: Automate infrastructure deployment and management
  • Security: Implement security best practices programmatically

This approach bridges the gap between infrastructure management and application development, enabling teams to manage their entire stack using familiar Java tools and practices.

Leave a Reply

Your email address will not be published. Required fields are marked *


Macro Nepal Helper