Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion api/src/main/java/io/kafbat/ui/config/ClustersProperties.java
Original file line number Diff line number Diff line change
Expand Up @@ -243,11 +243,23 @@ public static class NgramProperties {
@NoArgsConstructor
@AllArgsConstructor
public static class ClusterFtsProperties {
boolean enabled = false;
boolean enabled = true;
boolean defaultEnabled = false;
NgramProperties schemas = new NgramProperties(1, 4);
NgramProperties consumers = new NgramProperties(1, 4);
NgramProperties connect = new NgramProperties(1, 4);
NgramProperties acl = new NgramProperties(1, 4);

public boolean use(Boolean request) {
if (enabled) {
if (Boolean.TRUE.equals(request)) {
return true;
} else if (request == null && defaultEnabled) {
return true;
}
}
return false;
}
}

@PostConstruct
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

@RestController
@RequiredArgsConstructor
public class AclsController extends AbstractController implements AclsApi, McpTool {
public class AclsController extends AbstractController implements AclsApi, McpTool {

private final AclsService aclsService;

Expand Down Expand Up @@ -69,6 +69,7 @@ public Mono<ResponseEntity<Flux<KafkaAclDTO>>> listAcls(String clusterName,
String resourceName,
KafkaAclNamePatternTypeDTO namePatternTypeDto,
String search,
Boolean fts,
ServerWebExchange exchange) {
AccessContext context = AccessContext.builder()
.cluster(clusterName)
Expand All @@ -89,7 +90,7 @@ public Mono<ResponseEntity<Flux<KafkaAclDTO>>> listAcls(String clusterName,
return validateAccess(context).then(
Mono.just(
ResponseEntity.ok(
aclsService.listAcls(getCluster(clusterName), filter, search)
aclsService.listAcls(getCluster(clusterName), filter, search, fts)
.map(ClusterMapper::toKafkaAclDto)))
).doOnEach(sig -> audit(context, sig));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ public Mono<ResponseEntity<ConsumerGroupsPageResponseDTO>> getConsumerGroupsPage
String search,
ConsumerGroupOrderingDTO orderBy,
SortOrderDTO sortOrderDto,
Boolean fts,
ServerWebExchange exchange) {

var context = AccessContext.builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@ public Mono<ResponseEntity<Flux<FullConnectorInfoDTO>>> getAllConnectors(
String search,
ConnectorColumnsToSortDTO orderBy,
SortOrderDTO sortOrder,
Boolean fts,
ServerWebExchange exchange
) {
var context = AccessContext.builder()
Expand All @@ -140,7 +141,7 @@ public Mono<ResponseEntity<Flux<FullConnectorInfoDTO>>> getAllConnectors(
? getConnectorsComparator(orderBy)
: getConnectorsComparator(orderBy).reversed();

Flux<FullConnectorInfoDTO> job = kafkaConnectService.getAllConnectors(getCluster(clusterName), search)
Flux<FullConnectorInfoDTO> job = kafkaConnectService.getAllConnectors(getCluster(clusterName), search, fts)
.filterWhen(dto -> accessControlService.isConnectAccessible(dto.getConnect(), clusterName))
.sort(comparator);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,13 +217,15 @@ public Mono<ResponseEntity<SchemaSubjectsResponseDTO>> getSchemas(String cluster
@Valid String search,
SchemaColumnsToSortDTO orderBy,
SortOrderDTO sortOrder,
Boolean fts,
ServerWebExchange serverWebExchange) {
var context = AccessContext.builder()
.cluster(clusterName)
.operationName("getSchemas")
.build();

ClustersProperties.ClusterFtsProperties fts = clustersProperties.getFts();
ClustersProperties.ClusterFtsProperties ftsProperties = clustersProperties.getFts();
boolean useFts = ftsProperties.use(fts);

return schemaRegistryService
.getAllSubjectNames(getCluster(clusterName))
Expand All @@ -234,7 +236,7 @@ public Mono<ResponseEntity<SchemaSubjectsResponseDTO>> getSchemas(String cluster
int pageSize = perPage != null && perPage > 0 ? perPage : DEFAULT_PAGE_SIZE;
int subjectToSkip = ((pageNum != null && pageNum > 0 ? pageNum : 1) - 1) * pageSize;

SchemasFilter filter = new SchemasFilter(subjects, fts.isEnabled(), fts.getSchemas());
SchemasFilter filter = new SchemasFilter(subjects, useFts, ftsProperties.getSchemas());
List<String> filteredSubjects = new ArrayList<>(filter.find(search));

var totalPages = (filteredSubjects.size() / pageSize)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,20 +174,22 @@ public Mono<ResponseEntity<TopicsResponseDTO>> getTopics(String clusterName,
@Valid String search,
@Valid TopicColumnsToSortDTO orderBy,
@Valid SortOrderDTO sortOrder,
Boolean fts,
ServerWebExchange exchange) {

AccessContext context = AccessContext.builder()
.cluster(clusterName)
.operationName("getTopics")
.build();

return topicsService.getTopicsForPagination(getCluster(clusterName), search, showInternal)
return topicsService.getTopicsForPagination(getCluster(clusterName), search, showInternal, fts)
.flatMap(topics -> accessControlService.filterViewableTopics(topics, clusterName))
.flatMap(topics -> {
int pageSize = perPage != null && perPage > 0 ? perPage : DEFAULT_PAGE_SIZE;
var topicsToSkip = ((page != null && page > 0 ? page : 1) - 1) * pageSize;
ClustersProperties.ClusterFtsProperties fts = clustersProperties.getFts();
Comparator<InternalTopic> comparatorForTopic = getComparatorForTopic(orderBy, fts.isEnabled());
ClustersProperties.ClusterFtsProperties ftsProperties = clustersProperties.getFts();
boolean useFts = ftsProperties.use(fts);
Comparator<InternalTopic> comparatorForTopic = getComparatorForTopic(orderBy, useFts);
var comparator = sortOrder == null || !sortOrder.equals(SortOrderDTO.DESC)
? comparatorForTopic : comparatorForTopic.reversed();

Expand Down
4 changes: 3 additions & 1 deletion api/src/main/java/io/kafbat/ui/model/ClusterFeature.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,7 @@ public enum ClusterFeature {
KAFKA_ACL_VIEW,
KAFKA_ACL_EDIT,
CLIENT_QUOTA_MANAGEMENT,
GRAPHS_ENABLED
GRAPHS_ENABLED,
FTS_ENABLED,
FTS_DEFAULT_ENABLED
}
23 changes: 23 additions & 0 deletions api/src/main/java/io/kafbat/ui/serdes/BuiltInSerde.java
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
package io.kafbat.ui.serdes;

import io.kafbat.ui.serde.api.PropertyResolver;
import io.kafbat.ui.serde.api.SchemaDescription;
import io.kafbat.ui.serde.api.Serde;
import java.util.Optional;

public interface BuiltInSerde extends Serde {

Expand All @@ -24,4 +26,25 @@ default void configure(PropertyResolver serdeProperties,
PropertyResolver kafkaClusterProperties,
PropertyResolver globalProperties) {
}

@Override
default boolean canSerialize(String topic, Serde.Target type) {
return false;
}

@Override
default Serde.Serializer serializer(String topic, Serde.Target type) {
throw new UnsupportedOperationException();
}

@Override
default Optional<SchemaDescription> getSchema(String topic, Serde.Target type) {
return Optional.empty();
}

@Override
default Optional<String> getDescription() {
return Optional.empty();
}

}
4 changes: 2 additions & 2 deletions api/src/main/java/io/kafbat/ui/serdes/ClusterSerdes.java
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,12 @@ public Stream<SerdeInstance> all() {

public SerdeInstance suggestSerdeForSerialize(String topic, Serde.Target type) {
return findSerdeByPatternsOrDefault(topic, type, s -> s.canSerialize(topic, type))
.orElse(serdes.get(StringSerde.name()));
.orElse(serdes.get(StringSerde.NAME));
}

public SerdeInstance suggestSerdeForDeserialize(String topic, Serde.Target type) {
return findSerdeByPatternsOrDefault(topic, type, s -> s.canDeserialize(topic, type))
.orElse(serdes.get(StringSerde.name()));
.orElse(serdes.get(StringSerde.NAME));
}

@Override
Expand Down
62 changes: 43 additions & 19 deletions api/src/main/java/io/kafbat/ui/serdes/SerdesInitializer.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@
import io.kafbat.ui.serdes.builtin.UInt32Serde;
import io.kafbat.ui.serdes.builtin.UInt64Serde;
import io.kafbat.ui.serdes.builtin.UuidBinarySerde;
import io.kafbat.ui.serdes.builtin.mm2.CheckpointSerde;
import io.kafbat.ui.serdes.builtin.mm2.HeartbeatSerde;
import io.kafbat.ui.serdes.builtin.mm2.OffsetSyncSerde;
import io.kafbat.ui.serdes.builtin.sr.SchemaRegistrySerde;
import java.util.LinkedHashMap;
import java.util.Map;
Expand All @@ -39,18 +42,23 @@ public class SerdesInitializer {
public SerdesInitializer() {
this(
ImmutableMap.<String, Class<? extends BuiltInSerde>>builder()
.put(StringSerde.name(), StringSerde.class)
.put(SchemaRegistrySerde.name(), SchemaRegistrySerde.class)
.put(ProtobufFileSerde.name(), ProtobufFileSerde.class)
.put(Int32Serde.name(), Int32Serde.class)
.put(Int64Serde.name(), Int64Serde.class)
.put(UInt32Serde.name(), UInt32Serde.class)
.put(UInt64Serde.name(), UInt64Serde.class)
.put(AvroEmbeddedSerde.name(), AvroEmbeddedSerde.class)
.put(Base64Serde.name(), Base64Serde.class)
.put(HexSerde.name(), HexSerde.class)
.put(UuidBinarySerde.name(), UuidBinarySerde.class)
.put(ProtobufRawSerde.name(), ProtobufRawSerde.class)
.put(StringSerde.NAME, StringSerde.class)
.put(SchemaRegistrySerde.NAME, SchemaRegistrySerde.class)
.put(ProtobufFileSerde.NAME, ProtobufFileSerde.class)
.put(Int32Serde.NAME, Int32Serde.class)
.put(Int64Serde.NAME, Int64Serde.class)
.put(UInt32Serde.NAME, UInt32Serde.class)
.put(UInt64Serde.NAME, UInt64Serde.class)
.put(AvroEmbeddedSerde.NAME, AvroEmbeddedSerde.class)
.put(Base64Serde.NAME, Base64Serde.class)
.put(HexSerde.NAME, HexSerde.class)
.put(UuidBinarySerde.NAME, UuidBinarySerde.class)
.put(ProtobufRawSerde.NAME, ProtobufRawSerde.class)

// mm2 serdes
.put(HeartbeatSerde.NAME, HeartbeatSerde.class)
.put(OffsetSyncSerde.NAME, OffsetSyncSerde.class)
.put(CheckpointSerde.NAME, CheckpointSerde.class)
.build(),
new CustomSerdeLoader()
);
Expand Down Expand Up @@ -131,8 +139,8 @@ public ClusterSerdes init(Environment env,
.orElse(null),
Optional.ofNullable(clusterProperties.getDefaultValueSerde())
.map(name -> Preconditions.checkNotNull(registeredSerdes.get(name), "Default value serde not found"))
.or(() -> Optional.ofNullable(registeredSerdes.get(SchemaRegistrySerde.name())))
.or(() -> Optional.ofNullable(registeredSerdes.get(ProtobufFileSerde.name())))
.or(() -> Optional.ofNullable(registeredSerdes.get(SchemaRegistrySerde.NAME)))
.or(() -> Optional.ofNullable(registeredSerdes.get(ProtobufFileSerde.NAME)))
.orElse(null),
createFallbackSerde()
);
Expand All @@ -142,15 +150,16 @@ public ClusterSerdes init(Environment env,
* Registers serdse that should only be used for specific (hard-coded) topics, like ConsumerOffsetsSerde.
*/
private void registerTopicRelatedSerde(Map<String, SerdeInstance> serdes) {
registerConsumerOffsetsSerde(serdes);
serdes.putAll(consumerOffsetsSerde());
serdes.putAll(mirrorMakerSerdes());
}

private void registerConsumerOffsetsSerde(Map<String, SerdeInstance> serdes) {
private Map<String, SerdeInstance> consumerOffsetsSerde() {
var pattern = Pattern.compile(ConsumerOffsetsSerde.TOPIC);
serdes.put(
ConsumerOffsetsSerde.name(),
return Map.of(
ConsumerOffsetsSerde.NAME,
new SerdeInstance(
ConsumerOffsetsSerde.name(),
ConsumerOffsetsSerde.NAME,
new ConsumerOffsetsSerde(),
pattern,
pattern,
Expand All @@ -159,6 +168,21 @@ private void registerConsumerOffsetsSerde(Map<String, SerdeInstance> serdes) {
);
}

private Map<String, SerdeInstance> mirrorMakerSerdes() {
return Map.of(
HeartbeatSerde.NAME,
mirrorSerde(HeartbeatSerde.NAME, HeartbeatSerde.TOPIC_NAME_PATTERN, new HeartbeatSerde()),
OffsetSyncSerde.NAME,
mirrorSerde(HeartbeatSerde.NAME, OffsetSyncSerde.TOPIC_NAME_PATTERN, new OffsetSyncSerde()),
CheckpointSerde.NAME,
mirrorSerde(HeartbeatSerde.NAME, CheckpointSerde.TOPIC_NAME_PATTERN, new CheckpointSerde())
);
}

private SerdeInstance mirrorSerde(String name, Pattern pattern, BuiltInSerde serde) {
return new SerdeInstance(name, serde, pattern, pattern, null);
}

private SerdeInstance createFallbackSerde() {
StringSerde serde = new StringSerde();
serde.configure(PropertyResolverImpl.empty(), PropertyResolverImpl.empty(), PropertyResolverImpl.empty());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,31 +13,13 @@
import org.apache.avro.generic.GenericDatumReader;

public class AvroEmbeddedSerde implements BuiltInSerde {

public static String name() {
return "Avro (Embedded)";
}

@Override
public Optional<String> getDescription() {
return Optional.empty();
}

@Override
public Optional<SchemaDescription> getSchema(String topic, Target type) {
return Optional.empty();
}
public static final String NAME = "Avro (Embedded)";

@Override
public boolean canDeserialize(String topic, Target type) {
return true;
}

@Override
public boolean canSerialize(String topic, Target type) {
return false;
}

@Override
public Serializer serializer(String topic, Target type) {
throw new IllegalStateException();
Expand Down
17 changes: 1 addition & 16 deletions api/src/main/java/io/kafbat/ui/serdes/builtin/Base64Serde.java
Original file line number Diff line number Diff line change
@@ -1,28 +1,13 @@
package io.kafbat.ui.serdes.builtin;

import io.kafbat.ui.serde.api.DeserializeResult;
import io.kafbat.ui.serde.api.SchemaDescription;
import io.kafbat.ui.serde.api.Serde;
import io.kafbat.ui.serdes.BuiltInSerde;
import java.util.Base64;
import java.util.Map;
import java.util.Optional;

public class Base64Serde implements BuiltInSerde {

public static String name() {
return "Base64";
}

@Override
public Optional<String> getDescription() {
return Optional.empty();
}

@Override
public Optional<SchemaDescription> getSchema(String topic, Serde.Target type) {
return Optional.empty();
}
public static final String NAME = "Base64";

@Override
public boolean canDeserialize(String topic, Serde.Target type) {
Expand Down
Loading
Loading