Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 25 additions & 1 deletion build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,15 @@ dependencies {

// Test
testImplementation("org.junit.jupiter:junit-jupiter-engine:6.0.3")
testImplementation("org.junit.platform:junit-platform-launcher:1.14.4")
testImplementation("org.junit.platform:junit-platform-launcher:2.0.3")
testImplementation("org.wiremock:wiremock:3.13.2")

// Testcontainers
testImplementation(platform("org.testcontainers:testcontainers-bom:1.21.3"))
testImplementation("org.testcontainers:junit-jupiter")
testImplementation("org.testcontainers:testcontainers")
// SLF4J 2.x binding so Testcontainers debug logs are visible (log4j-slf4j-impl targets 1.x)
testImplementation("org.apache.logging.log4j:log4j-slf4j2-impl:$log4jVersion")
}

// ---------------------------------------------------------------------------
Expand Down Expand Up @@ -180,6 +187,23 @@ tasks.withType<Test> {
useJUnitPlatform()
}

tasks.named<Test>("test") {
useJUnitPlatform { excludeTags("e2e") }
}

val e2eTest by tasks.registering(Test::class) {
description = "Runs end-to-end tests that require Docker (via Testcontainers)"
group = "verification"
useJUnitPlatform { includeTags("e2e") }
jvmArgs("-Djava.net.preferIPv4Stack=true", "-Djava.security.egd=file:/dev/./urandom")
maxHeapSize = "1g"
testClassesDirs = sourceSets["test"].output.classesDirs
classpath = sourceSets["test"].runtimeClasspath
// docker-java reads API version from the "api.version" system property (not env var).
// Docker Desktop 4.71+ requires >= 1.40; docker-java defaults to 1.32 without this.
jvmArgs("-Dapi.version=1.47")
}

tasks.withType<JavaCompile> {
options.compilerArgs.addAll(listOf("-Xlint:deprecation", "-Xlint:unchecked"))
options.isFork = true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,14 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.io.*;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;

public class MonitoringImportProcessor {

Expand All @@ -46,7 +50,7 @@ public MonitoringImportProcessor(MonitoringImportConfig config, MonitoringImport
checkForExtractTemplates();
}

public void exec(Vector<File> files) {
public void exec(List<File> files) {

try {
for (File file : files) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@
*/
package co.elastic.support.monitoring;

import co.elastic.support.diagnostics.commands.CheckElasticsearchVersion;
import co.elastic.support.Constants;
import co.elastic.support.diagnostics.DiagnosticException;
import co.elastic.support.diagnostics.commands.CheckElasticsearchVersion;
import co.elastic.support.rest.ElasticRestClientService;
import co.elastic.support.Constants;
import co.elastic.support.rest.RestClient;
import co.elastic.support.util.ArchiveUtils;
import co.elastic.support.util.JsonYamlUtils;
Expand All @@ -21,11 +21,12 @@
import org.semver4j.Semver;

import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Vector;

public class MonitoringImportService extends ElasticRestClientService {
private Logger logger = LogManager.getLogger(MonitoringImportService.class);
private final Logger logger = LogManager.getLogger(MonitoringImportService.class);

void execImport(MonitoringImportInputs inputs) throws DiagnosticException {
Map<String, Object> configMap = JsonYamlUtils.readYamlFromClasspath(Constants.DIAG_CONFIG, true);
Expand Down Expand Up @@ -65,11 +66,9 @@ void execImport(MonitoringImportInputs inputs) throws DiagnosticException {
}
}

private Vector<File> getDirectoryEntries(String dir) {
private List<File> getDirectoryEntries(String dir) {
File targetDir = new File(dir);
Vector<File> files = new Vector<>();
files.addAll(FileUtils.listFiles(targetDir, null, true));
return files;
return new ArrayList<>(FileUtils.listFiles(targetDir, null, true));
}

private RestClient getClient(MonitoringImportInputs inputs, MonitoringImportConfig config){
Expand Down
32 changes: 0 additions & 32 deletions src/main/java/co/elastic/support/scrub/ScrubConfig.java

This file was deleted.

126 changes: 18 additions & 108 deletions src/main/java/co/elastic/support/scrub/ScrubProcessor.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,15 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
Expand All @@ -28,18 +36,16 @@ public class ScrubProcessor {
private static Map<String, Object> scrubConfig;

private static ConcurrentHashMap<Integer, Integer> ipv4 = new ConcurrentHashMap<>();
private static Vector<String> autoScrub = new Vector<>();
private static Vector<String> globalExclude = new Vector<>();
private static Vector<String> remove = new Vector<>();
private static Vector<ScrubTokenEntry> tokens = new Vector<>();
private static List<String> autoScrub = new ArrayList<>();
private static List<String> globalExclude = new ArrayList<>();
private static List<String> remove = new ArrayList<>();
private static List<ScrubTokenEntry> tokens = new ArrayList<>();
private static ConcurrentHashMap<String, String> clusterInfoCache = new ConcurrentHashMap<>();
private static ConcurrentHashMap<String, String> tokenCache = new ConcurrentHashMap<>();
private static ConcurrentHashMap<String, String> ipv4TokenCache = new ConcurrentHashMap<>();
private static ConcurrentHashMap<String, String> ipv6TokenCache = new ConcurrentHashMap<>();
private static ConcurrentHashMap<String, String> macTokenCache = new ConcurrentHashMap<>();



public ScrubProcessor(String nodes) throws DiagnosticException {
this();

Expand Down Expand Up @@ -111,36 +117,29 @@ private void initScrubTokens() {
tokens.add(new ScrubTokenEntry(tkn, inc, exc));
}
if (tokens.isEmpty()) {
if (tokens.size() == 0) {
logger.info(Constants.CONSOLE, "Scrubbing was enabled but no tokens were defined. Bypassing custom token processing.");
}
logger.info(Constants.CONSOLE, "Scrubbing was enabled but no tokens were defined. Bypassing custom token processing.");
}

logger.debug(tokens);

}

private void initIpv4() {

Random random = new Random();
IntStream intStream = random.ints(300, 556).distinct().limit(256);

int key = 0;
int[] vals = intStream.toArray();
for (int i = 0; i < 256; i++) {
ipv4.put(i, vals[i]);
}

}

public boolean isMatch(Vector<String> regexs, String entry) {
public boolean isMatch(List<String> regexs, String entry) {
for (String regx : regexs) {
if (entry.matches(regx)) {
return true;
}
}
return false;

}

public boolean isRemove(String entry) {
Expand All @@ -151,40 +150,6 @@ public boolean isExclude(String entry) {
return isMatch(globalExclude, entry);
}

public String scrubIPv4(String input) {

StringBuffer newIp = new StringBuffer();
String[] ipSegments = input.split("\\.");
for (int i = 0; i < 4; i++) {
int set = Integer.parseInt(ipSegments[i]);
if (!ipv4.containsKey(set)) {
logger.info("Error converting ip segment {} from address: {}", Integer.toString(set));
throw new RuntimeException("Error scrubbing IP Addresses");
}
int replace = ipv4.get(set);
newIp.append(replace);
if (i < 3) {
newIp.append(".");
}
}
return newIp.toString();
}

public String scrubIPv6(String input) {

String[] ipSegments = input.split(":");
int sz = ipSegments.length;
StringBuilder newIp = new StringBuilder();

for (int i = 0; i < sz; i++) {
newIp.append(generateToken(ipSegments[i]));
if (i < (sz - 1)) {
newIp.append(":");
}
}
return newIp.toString();
}

public String generateToken(String token) {

if (StringUtils.isEmpty(token)) {
Expand Down Expand Up @@ -251,71 +216,21 @@ public String processContentWithTokens(String content, String entry) {
logger.debug("Entry: {} - Pattern:{} Found:{} Replacement: {}", entry, token.pattern.toString(), hit, replacement);
content = content.replaceAll(hit, replacement );
}

/* while (matcher.find()) {
String group = matcher.group();
String replacement = tokenCache.computeIfAbsent(group, k -> generateToken(k));
logger.debug("Entry: {} - Pattern:{} Found:{} Replacement: {}", entry, token.pattern.toString(), group, replacement);
content = content.replaceFirst(group, replacement);
}*/



}
return content;
}

public String processMacddresses(String content) {

/* Pattern pattern = Pattern.compile(Constants.MacAddrRegex);
Matcher matcher = pattern.matcher(content);

while(matcher.find()){
String group = matcher.group();
content.replaceAll(group, "XX:XX:XX:XX:XX:XX");
}*/
content = processTokens(content, macTokenCache, Constants.MacAddrRegex, tokenGen);

return content;

return processTokens(content, macTokenCache, Constants.MacAddrRegex, tokenGen);
}

private String processIpv4Addresses(String content) {

/* Pattern pattern = Pattern.compile(Constants.IPv4Regex);
Matcher matcher = pattern.matcher(content);
Set<String> ips = new HashSet<>();
while(matcher.find()){
ips.add(matcher.group());
}
for(String ip: ips){
String replacement = ipv4TokenCache.computeIfAbsent(ip, k -> scrubIPv4(k));
content = content.replaceAll(ip, replacement );
}*/
content = processTokens(content, ipv4TokenCache, Constants.IPv4Regex, ipv4Gen);


return content;

return processTokens(content, ipv4TokenCache, Constants.IPv4Regex, ipv4Gen);
}


private String processIpv6Addresses(String content) {

/* Pattern pattern = Pattern.compile(Constants.IPv6Regex);
Matcher matcher = pattern.matcher(content);

Set<String> ips = new HashSet<>();
while(matcher.find()){
ips.add(matcher.group());
}
for(String ip: ips){
String replacement = ipv6TokenCache.computeIfAbsent(ip, k -> scrubIPv6(k));
content = content.replaceAll(ip, replacement );
}*/
content = processTokens(content, ipv6TokenCache, Constants.IPv6Regex, ipv6Gen);

return content;
return processTokens(content, ipv6TokenCache, Constants.IPv6Regex, ipv6Gen);
}

private String processTokens(String content, Map<String, String> cache, String regexString, TokenGenerator generator){
Expand All @@ -332,7 +247,6 @@ private String processTokens(String content, Map<String, String> cache, String r
}

return content;

}


Expand All @@ -343,7 +257,6 @@ private String processClusterArtifacts(String input) {
}

return content;

}

public String processAutoscrub(String input) {
Expand Down Expand Up @@ -409,6 +322,3 @@ public String generate(String input) {
};

}



Loading
Loading