Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.hubspot.rosetta.internal;

import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.io.SegmentedStringWriter;
Expand All @@ -10,19 +11,91 @@
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.introspect.AnnotatedMember;
import com.fasterxml.jackson.databind.introspect.JacksonAnnotationIntrospector;
import com.fasterxml.jackson.databind.ser.std.NonTypedScalarSerializerBase;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import java.io.IOException;
import java.io.Writer;
import java.lang.reflect.AnnotatedElement;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;

abstract class ContextualStoredAsJsonSerializer<T>
extends NonTypedScalarSerializerBase<T> {

private static final ConcurrentHashMap<InclusionCacheKey, ObjectMapper> MAPPER_CACHE =
new ConcurrentHashMap<>();

private final BeanProperty property;
private final JsonInclude.Include inclusion;

ContextualStoredAsJsonSerializer(Class<T> t, BeanProperty property) {
super(t);
this.property = property;
this.inclusion = findInclusion(property);
}

private static JsonInclude.Include findInclusion(BeanProperty property) {
if (property == null) {
return null;
}
AnnotatedMember member = property.getMember();
if (member != null) {
AnnotatedElement annotated = member.getAnnotated();
if (annotated != null) {
JsonInclude annotation = annotated.getAnnotation(JsonInclude.class);
if (
annotation != null && annotation.value() != JsonInclude.Include.USE_DEFAULTS
) {
return annotation.value();
}
}
}
return null;
}

private ObjectMapper getConfiguredMapper(ObjectMapper baseMapper) {
if (inclusion == null) {
return baseMapper;
}
return MAPPER_CACHE.computeIfAbsent(
new InclusionCacheKey(baseMapper, inclusion),
key -> {
ObjectMapper nestedMapper = baseMapper.copy();
nestedMapper.setAnnotationIntrospector(new JacksonAnnotationIntrospector());
nestedMapper.setSerializationInclusion(key.inclusion);
return nestedMapper;
}
);
}

private static class InclusionCacheKey {

final int mapperIdentity;
final JsonInclude.Include inclusion;

InclusionCacheKey(ObjectMapper mapper, JsonInclude.Include inclusion) {
this.mapperIdentity = System.identityHashCode(mapper);
this.inclusion = inclusion;
}

@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof InclusionCacheKey)) {
return false;
}
InclusionCacheKey that = (InclusionCacheKey) o;
return mapperIdentity == that.mapperIdentity && inclusion == that.inclusion;
}

@Override
public int hashCode() {
return Objects.hash(mapperIdentity, inclusion);
}
}

protected void serializeAsBytes(
Expand Down Expand Up @@ -56,6 +129,10 @@ private byte[] serializeToBytes(
ObjectMapper mapper,
SerializerProvider provider
) throws IOException {
if (inclusion != null) {
return mapper.writeValueAsBytes(value);
}

try (ByteArrayBuilder array = new ByteArrayBuilder(new BufferRecycler())) {
if (trySerialzieToArray(value, mapper, provider, array)) {
byte[] result = array.toByteArray();
Expand All @@ -64,7 +141,6 @@ private byte[] serializeToBytes(
}
}

// fallback on old behavior
return mapper.writeValueAsBytes(value);
}

Expand All @@ -73,13 +149,16 @@ private String serializeToString(
ObjectMapper mapper,
SerializerProvider provider
) throws IOException {
if (inclusion != null) {
return mapper.writeValueAsString(value);
}

try (SegmentedStringWriter sw = new SegmentedStringWriter(new BufferRecycler())) {
if (trySerializeToWriter(value, mapper, provider, sw)) {
return sw.getAndClear();
}
}

// fallback on old behavior
JsonNode tree = mapper.valueToTree(value);
if (tree.isNull()) {
return tree.asText();
Expand Down Expand Up @@ -136,6 +215,7 @@ private boolean trySerializeToGenerator(
}

private ObjectMapper getMapper(JsonGenerator generator) {
return (ObjectMapper) generator.getCodec();
ObjectMapper baseMapper = (ObjectMapper) generator.getCodec();
return getConfiguredMapper(baseMapper);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@

import static org.assertj.core.api.Assertions.assertThat;

import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.BinaryNode;
import com.fasterxml.jackson.databind.node.NullNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
Expand Down Expand Up @@ -31,9 +33,11 @@
import com.hubspot.rosetta.beans.StoredAsJsonListTypeInfoBean.ConcreteStoredAsJsonList;
import com.hubspot.rosetta.beans.StoredAsJsonTypeInfoBean;
import com.hubspot.rosetta.beans.StoredAsJsonTypeInfoBean.ConcreteStoredAsJsonTypeInfo;
import com.hubspot.rosetta.internal.RosettaModule;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
Expand Down Expand Up @@ -785,4 +789,81 @@ public void testDeserializingStoredAsJsonPrivateField() throws Exception {
.getMapper()
.readValue(node.toString(), FieldBeanStoredAsJson.class);
}

@Test
public void itIncludesAllFieldsByDefaultWithRosettaMapper()
throws JsonProcessingException {
// Verify that the default Rosetta.getMapper() still includes all fields
// including nulls and empty collections (standard DAO behavior)
InnerBeanWithList bean = new InnerBeanWithList();
bean.values = Collections.emptyList();
bean.name = null;

String json = Rosetta.getMapper().writeValueAsString(bean);

// Both fields should be present even though one is null and one is empty
assertThat(json).contains("\"name\":null");
assertThat(json).contains("\"values\":[]");
}

@Test
public void itIgnoresMapperLevelInclusionForStoredAsJsonFields()
throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper()
.registerModule(new RosettaModule())
.setSerializationInclusion(JsonInclude.Include.NON_EMPTY);

BeanWithListStoredAsJsonNoAnnotation bean =
new BeanWithListStoredAsJsonNoAnnotation();
bean.inner = new InnerBeanWithList();
bean.inner.values = Collections.emptyList();
bean.inner.name = "test";

JsonNode node = mapper.valueToTree(bean);

assertThat(node.get("inner").isTextual()).isTrue();
String innerJson = node.get("inner").textValue();
assertThat(innerJson)
.as("Mapper-level NON_EMPTY should NOT affect @StoredAsJson without @JsonInclude")
.contains("\"values\":[]");
}

@Test
public void itRespectsJsonIncludeAnnotationOnStoredAsJsonField()
throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper().registerModule(new RosettaModule());

BeanWithListStoredAsJson bean = new BeanWithListStoredAsJson();
bean.inner = new InnerBeanWithList();
bean.inner.values = Collections.emptyList();
bean.inner.name = "test";

JsonNode node = mapper.valueToTree(bean);

assertThat(node.get("inner").isTextual()).isTrue();
String innerJson = node.get("inner").textValue();
assertThat(innerJson).contains("\"name\":\"test\"");
assertThat(innerJson)
.as("@JsonInclude(NON_EMPTY) should exclude empty list in @StoredAsJson field")
.doesNotContain("values");
}

public static class BeanWithListStoredAsJson {

@StoredAsJson
@JsonInclude(JsonInclude.Include.NON_EMPTY)
public InnerBeanWithList inner;
}

public static class BeanWithListStoredAsJsonNoAnnotation {

@StoredAsJson
public InnerBeanWithList inner;
}

public static class InnerBeanWithList {

public String name;
public List<String> values;
}
}