diff --git a/.gitignore b/.gitignore index 6686aac..88e1334 100644 --- a/.gitignore +++ b/.gitignore @@ -18,5 +18,6 @@ dist # misc .DS_Store - - +.java-version +nohup.out +webui.log diff --git a/kafka-dse-core/pom.xml b/kafka-dse-core/pom.xml index 09835b8..5855420 100644 --- a/kafka-dse-core/pom.xml +++ b/kafka-dse-core/pom.xml @@ -1,67 +1,56 @@ - - - 4.0.0 - kafka-dse-core - + kafka-dse-core - DAO, BEAN - - - com.datastax - kafka-dse-example - 6.7-SNAPSHOT - - - - - - - com.datastax.dse - dse-java-driver-core - - - com.datastax.dse - dse-java-driver-mapping - - - com.datastax.dse - dse-java-driver-extras - - - com.datastax.dse - dse-java-driver-graph - - - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-databind - - - - ch.qos.logback - logback-classic - - - ch.qos.logback - logback-core - - - org.springframework - spring-context - - - - - + + 4.0.0 + kafka-dse-core + + kafka-dse-core + DAO, BEAN + + com.datastax + kafka-dse-example + 6.7-SNAPSHOT + + + + + com.datastax.dse + dse-java-driver-core + + + com.datastax.dse + dse-java-driver-reactor + + + com.datastax.oss + java-driver-core + + + com.datastax.oss + java-driver-query-builder + + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + ch.qos.logback + logback-classic + + + ch.qos.logback + logback-core + + + org.springframework + spring-context + + diff --git a/kafka-dse-core/src/main/java/com/datastax/demo/conf/DseConfiguration.java b/kafka-dse-core/src/main/java/com/datastax/demo/conf/DseConfiguration.java index dfed163..ba5d236 100644 --- a/kafka-dse-core/src/main/java/com/datastax/demo/conf/DseConfiguration.java +++ b/kafka-dse-core/src/main/java/com/datastax/demo/conf/DseConfiguration.java @@ -1,130 +1,98 @@ package com.datastax.demo.conf; +import com.datastax.dse.driver.api.reactor.ReactorDseSession; +import com.datastax.dse.driver.api.reactor.ReactorDseSessionBuilder; +import com.datastax.dse.driver.internal.core.auth.DsePlainTextAuthProvider; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.querybuilder.SchemaBuilder; +import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; +import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoaderBuilder; +import java.net.InetSocketAddress; import java.util.List; -import java.util.Optional; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.util.StopWatch; +import org.springframework.util.StringUtils; -import com.datastax.demo.domain.LongToTimeStampCodec; -import com.datastax.driver.core.AuthProvider; -import com.datastax.driver.core.CodecRegistry; -import com.datastax.driver.core.ConsistencyLevel; -import com.datastax.driver.core.QueryOptions; -import com.datastax.driver.core.exceptions.InvalidQueryException; -import com.datastax.driver.core.schemabuilder.SchemaBuilder; -import com.datastax.driver.dse.DseCluster.Builder; -import com.datastax.driver.dse.DseSession; -import com.datastax.driver.dse.auth.DsePlainTextAuthProvider; -import com.datastax.driver.mapping.DefaultPropertyMapper; -import com.datastax.driver.mapping.MappingConfiguration; -import com.datastax.driver.mapping.MappingManager; -import com.datastax.driver.mapping.PropertyMapper; -import com.datastax.driver.mapping.PropertyTransienceStrategy; -import com.google.common.collect.ImmutableMap; - -/** - * Connectivity to DSE (cassandra, graph, search). - */ +/** Connectivity to DSE (cassandra, graph, search). */ @Configuration public class DseConfiguration { - /** Internal logger. */ - private static final Logger LOGGER = LoggerFactory.getLogger(DseConfiguration.class); - - @Value("#{'${dse.contactPoints}'.split(',')}") - public List < String > contactPoints; - - @Value("${dse.port: 9042}") - public int port; - - @Value("${dse.keyspace: system}") - public String keyspace; - - @Value("${dse.username}") - public Optional < String > dseUsername; - - @Value("${dse.password}") - public Optional < String > dsePassword; - - @Value("${dse.localdc : dc1}") - public String localDc; - - @Bean - public DseSession dseSession() { - long top = System.currentTimeMillis(); - LOGGER.info("Initializing connection to DSE Cluster"); - - Builder clusterConfig = new Builder(); - LOGGER.info(" + Contact Points : {}" , contactPoints); - contactPoints.stream().forEach(clusterConfig::addContactPoint); - LOGGER.info(" + Listening Port : {}", port); - clusterConfig.withPort(port); - - if (dseUsername.isPresent() && dsePassword.isPresent() && dseUsername.get().length() > 0) { - AuthProvider cassandraAuthProvider = new DsePlainTextAuthProvider(dseUsername.get(), dsePassword.get()); - clusterConfig.withAuthProvider(cassandraAuthProvider); - LOGGER.info(" + With username : {}", dseUsername.get()); - } - - // OPTIONS - clusterConfig.withQueryOptions( - new QueryOptions().setConsistencyLevel(ConsistencyLevel.QUORUM)); - - // Long <-> Timestamp - clusterConfig.withCodecRegistry(new CodecRegistry().register(new LongToTimeStampCodec())); - - try { - // First Connect without Keyspace (to create if needed) - DseSession tmpSession = null; - try { - tmpSession = clusterConfig.build().connect(); - tmpSession.execute(SchemaBuilder.createKeyspace(keyspace) - .ifNotExists().with() - .replication(ImmutableMap.of("class", "SimpleStrategy", "replication_factor", 1))); - LOGGER.info(" + Creating keyspace '{}' (if needed)", keyspace); - } finally { - if (tmpSession != null) { - tmpSession.close(); - } - } - - // Real Connection now - DseSession dseSession = clusterConfig.build().connect(keyspace); - LOGGER.info(" + Connection established to DSE Cluster \\_0_/ in {} millis.", System.currentTimeMillis() - top); - return dseSession; - } catch(InvalidQueryException iqe) { - LOGGER.error("\n-----------------------------------------\n\n" - + "Keyspace '{}' seems does not exist. \nPlease update 'application.yml' with correct keyspace name or create one with:\n\n" - + " create keyspace {} WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}; \n\nI will create the " - + "tables I need after that.\n-----------------------------------------", - keyspace, keyspace); - throw new IllegalStateException("", iqe); - } + /** Internal logger. */ + private static final Logger LOGGER = LoggerFactory.getLogger(DseConfiguration.class); + + @Value("#{'${dse.contactPoints}'.split(',')}") + private List contactPoints; + + @Value("${dse.port: 9042}") + private int port; + + @Value( + "#{T(com.datastax.oss.driver.api.core.CqlIdentifier).fromInternal('${dse.keyspace: demo_kafka}')}") + public CqlIdentifier keyspace; + + @Value("${dse.username}") + private String dseUsername; + + @Value("${dse.password}") + private String dsePassword; + + @Value("${dse.localdc: dc1}") + private String localDc; + + @Bean + public ReactorDseSession dseSession() { + + LOGGER.info("Initializing connection to DSE Cluster"); + LOGGER.info("Contact Points : {}", contactPoints); + LOGGER.info("Listening Port : {}", port); + LOGGER.info("Local DC : {}", localDc); + LOGGER.info("Keyspace : {}", keyspace); + + StopWatch stopWatch = new StopWatch(); + stopWatch.start(); + + ReactorDseSessionBuilder sessionBuilder = + new ReactorDseSessionBuilder().withLocalDatacenter(localDc); + + contactPoints + .stream() + .map(cp -> InetSocketAddress.createUnresolved(cp, port)) + .forEach(sessionBuilder::addContactPoint); + + DefaultDriverConfigLoaderBuilder configLoaderBuilder = + DefaultDriverConfigLoader.builder() + .withString(DefaultDriverOption.REQUEST_CONSISTENCY, "QUORUM"); + + if (!StringUtils.isEmpty(dseUsername) && !StringUtils.isEmpty(dsePassword)) { + LOGGER.info("Username : {}", dseUsername); + configLoaderBuilder + .withString( + DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class.getName()) + .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, dseUsername) + .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, dsePassword); + } + + sessionBuilder.withConfigLoader(configLoaderBuilder.build()); + + // First Connect without Keyspace (to create it if needed) + try (ReactorDseSession tempSession = sessionBuilder.build()) { + LOGGER.info("Creating keyspace {} (if needed)", keyspace); + SimpleStatement createKeyspace = + SchemaBuilder.createKeyspace(keyspace).ifNotExists().withSimpleStrategy(1).build(); + tempSession.execute(createKeyspace); + } + + // Now create the actual session + try (ReactorDseSession dseSession = sessionBuilder.withKeyspace(keyspace).build()) { + stopWatch.stop(); + LOGGER.info("Connection established to DSE Cluster \\_0_/ in {}.", stopWatch.prettyPrint()); + return dseSession; } - - /** - * Use to create mapper and perform ORM on top of Cassandra tables. - * - * @param session - * current dse session. - * @return - * mapper - */ - @Bean - public MappingManager mappingManager(DseSession session) { - // Do not map all fields, only the annotated ones with @Column or @Fields - PropertyMapper propertyMapper = new DefaultPropertyMapper() - .setPropertyTransienceStrategy(PropertyTransienceStrategy.OPT_IN); - // Build configuration from mapping - MappingConfiguration configuration = MappingConfiguration.builder() - .withPropertyMapper(propertyMapper) - .build(); - // Sample Manager with advance configuration - return new MappingManager(session, configuration); - } - + } } diff --git a/kafka-dse-core/src/main/java/com/datastax/demo/conf/DseConstants.java b/kafka-dse-core/src/main/java/com/datastax/demo/conf/DseConstants.java index f468e7c..6d3f96f 100644 --- a/kafka-dse-core/src/main/java/com/datastax/demo/conf/DseConstants.java +++ b/kafka-dse-core/src/main/java/com/datastax/demo/conf/DseConstants.java @@ -1,5 +1,7 @@ package com.datastax.demo.conf; +import com.datastax.oss.driver.api.core.CqlIdentifier; + /** * Constants in DSE-DB Tables. * @@ -7,18 +9,24 @@ */ public interface DseConstants { - /** Table Names in Keyspace (Columns are defined in Beans). */ - String STOCKS_MINUTE = "stocks_by_min"; - String STOCKS_HOUR = "stocks_by_hour"; - String STOCKS_DAY = "stocks_by_day"; - - String STOCKS_TICKS = "stocks_ticks"; - String STOCKS_INFOS = "stocks_infos"; - - String TICKER_COL_EXCHANGE = "exchange"; - String TICKER_COL_INDUSTRY = "industry"; - String TICKER_COL_NAME = "name"; - String TICKER_COL_SYMBOL = "symbol"; - + // Table names + + CqlIdentifier STOCKS_MINUTE = CqlIdentifier.fromCql("stocks_by_min"); + CqlIdentifier STOCKS_HOUR = CqlIdentifier.fromCql("stocks_by_hour"); + CqlIdentifier STOCKS_TICKS = CqlIdentifier.fromCql("stocks_ticks"); + CqlIdentifier STOCKS_INFOS = CqlIdentifier.fromCql("stocks_infos"); + + // Column names + + CqlIdentifier EXCHANGE = CqlIdentifier.fromCql("exchange"); + CqlIdentifier NAME = CqlIdentifier.fromCql("name"); + CqlIdentifier INDUSTRY = CqlIdentifier.fromCql("industry"); + CqlIdentifier SYMBOL = CqlIdentifier.fromCql("symbol"); + CqlIdentifier VALUE_DATE = CqlIdentifier.fromCql("value_date"); + CqlIdentifier VALUE = CqlIdentifier.fromCql("value"); + CqlIdentifier OPEN = CqlIdentifier.fromCql("open"); + CqlIdentifier CLOSE = CqlIdentifier.fromCql("close"); + CqlIdentifier HIGH = CqlIdentifier.fromCql("high"); + CqlIdentifier LOW = CqlIdentifier.fromCql("low"); + CqlIdentifier VOLUME = CqlIdentifier.fromCql("volume"); } - \ No newline at end of file diff --git a/kafka-dse-core/src/main/java/com/datastax/demo/domain/LongToTimeStampCodec.java b/kafka-dse-core/src/main/java/com/datastax/demo/domain/LongToTimeStampCodec.java deleted file mode 100644 index 0326103..0000000 --- a/kafka-dse-core/src/main/java/com/datastax/demo/domain/LongToTimeStampCodec.java +++ /dev/null @@ -1,36 +0,0 @@ -package com.datastax.demo.domain; - -import java.util.Date; - -import com.datastax.driver.core.TypeCodec; -import com.datastax.driver.extras.codecs.MappingCodec; - -/** - * Column expect a blob, attribute is a String, we need a codec here for conversion. - * - * In CQL you would be able to use textAsBlob(). - * - * @author DataStax evangelist team. - */ -public class LongToTimeStampCodec extends MappingCodec { - - /** - * Default charset will be UTF8. - */ - public LongToTimeStampCodec() { - super(TypeCodec.timestamp(), Long.class); - } - - /** {@inheritDoc} */ - @Override - protected Long deserialize(Date value) { - return value.getTime(); - } - - /** {@inheritDoc} */ - @Override - protected Date serialize(Long value) { - return new Date(value); - } - -} \ No newline at end of file diff --git a/kafka-dse-core/src/main/java/com/datastax/demo/domain/Stock.java b/kafka-dse-core/src/main/java/com/datastax/demo/domain/Stock.java index e4dd5d8..a185000 100644 --- a/kafka-dse-core/src/main/java/com/datastax/demo/domain/Stock.java +++ b/kafka-dse-core/src/main/java/com/datastax/demo/domain/Stock.java @@ -1,200 +1,236 @@ package com.datastax.demo.domain; -import java.io.Serializable; -import java.util.Date; - -import com.datastax.driver.mapping.annotations.ClusteringColumn; -import com.datastax.driver.mapping.annotations.Column; -import com.datastax.driver.mapping.annotations.PartitionKey; +import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.Serializable; +import java.time.Instant; +import java.util.Objects; -/** - * POJO Representing stock from Alpha Vantage. - */ +/** POJO Representing stock from Alpha Vantage. */ public class Stock implements Serializable { - /** Serial. */ - private static final long serialVersionUID = -5240591446495279713L; - - /** Stock symbol. */ - @PartitionKey - private String symbol; - - /** timestamp. */ - @ClusteringColumn - @Column(name="value_date") - @JsonFormat(shape=JsonFormat.Shape.STRING, pattern="yyyy-MM-dd'T'HH:mm:ss") - private Date valueDate; - - /** value at begining of period. */ - @Column - private double open; - - /** value at end of period. */ - @Column - private double close; - - /** low value. */ - @Column - private double low; - - /** high value. */ - @Column - private double high; - - /** number exchanged. */ - @Column - private long volume; - - /** - * Default constructor (unmarshalling) - */ - public Stock() {} - - /** - * Copy constructor (specialization) - */ - public Stock(Stock parent) { - this.valueDate = parent.getValueDate(); - this.high = parent.getHigh(); - this.low = parent.getLow(); - this.open = parent.getOpen(); - this.close = parent.getClose(); - this.volume = parent.getVolume(); - this.symbol = parent.getSymbol(); - } - - /** - * Getter accessor for attribute 'valueDate'. - * - * @return - * current value of 'valueDate' - */ - public Date getValueDate() { - return valueDate; - } - - /** - * Setter accessor for attribute 'valueDate'. - * @param valueDate - * new value for 'valueDate ' - */ - public void setValueDate(Date valueDate) { - this.valueDate = valueDate; - } - - /** - * Getter accessor for attribute 'open'. - * - * @return - * current value of 'open' - */ - public double getOpen() { - return open; + /** Serial. */ + private static final long serialVersionUID = -5240591446495279713L; + + /** Stock symbol. */ + private String symbol; + + /** timestamp. */ + @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd'T'HH:mm:ss") + private Instant valueDate; + + /** value at beginning of period. */ + private double open; + + /** value at end of period. */ + private double close; + + /** low value. */ + private double low; + + /** high value. */ + private double high; + + /** volume exchanged. */ + private long volume; + + @JsonCreator + public Stock( + @JsonProperty("symbol") String symbol, + @JsonProperty("valueDate") Instant valueDate, + @JsonProperty("open") double open, + @JsonProperty("close") double close, + @JsonProperty("low") double low, + @JsonProperty("high") double high, + @JsonProperty("volume") long volume) { + this.symbol = symbol; + this.valueDate = valueDate; + this.open = open; + this.close = close; + this.low = low; + this.high = high; + this.volume = volume; + } + + /** Copy constructor (specialization) */ + public Stock(Stock toCopy) { + this.symbol = toCopy.getSymbol(); + this.valueDate = toCopy.getValueDate(); + this.open = toCopy.getOpen(); + this.close = toCopy.getClose(); + this.high = toCopy.getHigh(); + this.low = toCopy.getLow(); + this.volume = toCopy.getVolume(); + } + + /** + * Getter accessor for attribute 'valueDate'. + * + * @return current value of 'valueDate' + */ + public Instant getValueDate() { + return valueDate; + } + + /** + * Setter accessor for attribute 'valueDate'. + * + * @param valueDate new value for 'valueDate ' + */ + public void setValueDate(Instant valueDate) { + this.valueDate = valueDate; + } + + /** + * Getter accessor for attribute 'open'. + * + * @return current value of 'open' + */ + public double getOpen() { + return open; + } + + /** + * Setter accessor for attribute 'open'. + * + * @param open new value for 'open ' + */ + public void setOpen(double open) { + this.open = open; + } + + /** + * Getter accessor for attribute 'close'. + * + * @return current value of 'close' + */ + public double getClose() { + return close; + } + + /** + * Setter accessor for attribute 'close'. + * + * @param close new value for 'close ' + */ + public void setClose(double close) { + this.close = close; + } + + /** + * Getter accessor for attribute 'low'. + * + * @return current value of 'low' + */ + public double getLow() { + return low; + } + + /** + * Setter accessor for attribute 'low'. + * + * @param low new value for 'low ' + */ + public void setLow(double low) { + this.low = low; + } + + /** + * Getter accessor for attribute 'high'. + * + * @return current value of 'high' + */ + public double getHigh() { + return high; + } + + /** + * Setter accessor for attribute 'high'. + * + * @param high new value for 'high ' + */ + public void setHigh(double high) { + this.high = high; + } + + /** + * Getter accessor for attribute 'volume'. + * + * @return current value of 'volume' + */ + public long getVolume() { + return volume; + } + + /** + * Setter accessor for attribute 'volume'. + * + * @param volume new value for 'volume ' + */ + public void setVolume(long volume) { + this.volume = volume; + } + + /** + * Getter accessor for attribute 'symbol'. + * + * @return current value of 'symbol' + */ + public String getSymbol() { + return symbol; + } + + /** + * Setter accessor for attribute 'symbol'. + * + * @param symbol new value for 'symbol ' + */ + public void setSymbol(String symbol) { + this.symbol = symbol; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - /** - * Setter accessor for attribute 'open'. - * @param open - * new value for 'open ' - */ - public void setOpen(double open) { - this.open = open; - } - - /** - * Getter accessor for attribute 'close'. - * - * @return - * current value of 'close' - */ - public double getClose() { - return close; - } - - /** - * Setter accessor for attribute 'close'. - * @param close - * new value for 'close ' - */ - public void setClose(double close) { - this.close = close; - } - - /** - * Getter accessor for attribute 'low'. - * - * @return - * current value of 'low' - */ - public double getLow() { - return low; - } - - /** - * Setter accessor for attribute 'low'. - * @param low - * new value for 'low ' - */ - public void setLow(double low) { - this.low = low; - } - - /** - * Getter accessor for attribute 'high'. - * - * @return - * current value of 'high' - */ - public double getHigh() { - return high; - } - - /** - * Setter accessor for attribute 'high'. - * @param high - * new value for 'high ' - */ - public void setHigh(double high) { - this.high = high; - } - - /** - * Getter accessor for attribute 'volume'. - * - * @return - * current value of 'volume' - */ - public long getVolume() { - return volume; - } - - /** - * Setter accessor for attribute 'volume'. - * @param volume - * new value for 'volume ' - */ - public void setVolume(long volume) { - this.volume = volume; - } - - /** - * Getter accessor for attribute 'symbol'. - * - * @return - * current value of 'symbol' - */ - public String getSymbol() { - return symbol; - } - - /** - * Setter accessor for attribute 'symbol'. - * @param symbol - * new value for 'symbol ' - */ - public void setSymbol(String symbol) { - this.symbol = symbol; + if (o == null || getClass() != o.getClass()) { + return false; } - + Stock stock = (Stock) o; + return Double.compare(stock.open, open) == 0 + && Double.compare(stock.close, close) == 0 + && Double.compare(stock.low, low) == 0 + && Double.compare(stock.high, high) == 0 + && volume == stock.volume + && symbol.equals(stock.symbol) + && valueDate.equals(stock.valueDate); + } + + @Override + public int hashCode() { + return Objects.hash(symbol, valueDate, open, close, low, high, volume); + } + + @Override + public String toString() { + return "Stock{" + + "symbol='" + + symbol + + '\'' + + ", valueDate=" + + valueDate + + ", open=" + + open + + ", close=" + + close + + ", low=" + + low + + ", high=" + + high + + ", volume=" + + volume + + '}'; + } } diff --git a/kafka-dse-core/src/main/java/com/datastax/demo/domain/Stock1Hour.java b/kafka-dse-core/src/main/java/com/datastax/demo/domain/Stock1Hour.java deleted file mode 100644 index 0e6bd51..0000000 --- a/kafka-dse-core/src/main/java/com/datastax/demo/domain/Stock1Hour.java +++ /dev/null @@ -1,20 +0,0 @@ -package com.datastax.demo.domain; - -import com.datastax.demo.conf.DseConstants; -import com.datastax.driver.mapping.annotations.Table; - -/** - * Bean to save into table for minutes aggregation. - */ -@Table(name=DseConstants.STOCKS_HOUR) -public class Stock1Hour extends Stock { - - /** Specialization for a dedicated table. */ - private static final long serialVersionUID = 6789940996895471880L; - - /** Specialization. */ - public Stock1Hour(Stock parent) { - super(parent); - } - -} diff --git a/kafka-dse-core/src/main/java/com/datastax/demo/domain/Stock1Min.java b/kafka-dse-core/src/main/java/com/datastax/demo/domain/Stock1Min.java deleted file mode 100644 index fc016a0..0000000 --- a/kafka-dse-core/src/main/java/com/datastax/demo/domain/Stock1Min.java +++ /dev/null @@ -1,20 +0,0 @@ -package com.datastax.demo.domain; - -import com.datastax.demo.conf.DseConstants; -import com.datastax.driver.mapping.annotations.Table; - -/** - * Bean to save into table for minutes aggregation. - */ -@Table(name=DseConstants.STOCKS_MINUTE) -public class Stock1Min extends Stock { - - /** Specialization for a dedicated table. */ - private static final long serialVersionUID = 6789940996895471880L; - - /** Specialization. */ - public Stock1Min(Stock parent) { - super(parent); - } - -} diff --git a/kafka-dse-core/src/main/java/com/datastax/demo/domain/StockInfo.java b/kafka-dse-core/src/main/java/com/datastax/demo/domain/StockInfo.java index a2132ea..f35a897 100644 --- a/kafka-dse-core/src/main/java/com/datastax/demo/domain/StockInfo.java +++ b/kafka-dse-core/src/main/java/com/datastax/demo/domain/StockInfo.java @@ -1,118 +1,147 @@ package com.datastax.demo.domain; -import static com.datastax.demo.conf.DseConstants.STOCKS_INFOS; - +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import java.io.Serializable; +import java.util.Objects; -import com.datastax.driver.mapping.annotations.ClusteringColumn; -import com.datastax.driver.mapping.annotations.Column; -import com.datastax.driver.mapping.annotations.PartitionKey; -import com.datastax.driver.mapping.annotations.Table; - -/** - * Value for Ticks. - */ -@Table(name=STOCKS_INFOS) +/** Value for Ticks. */ public class StockInfo implements Serializable { - - /** serial. */ - private static final long serialVersionUID = 5806346188526710465L; - - /** value. */ - @PartitionKey - private String exchange; - - /** Value Date. */ - @ClusteringColumn - private String name; - - /** code. */ - @Column - private String symbol; - - /** value. */ - @Column - private String industry; - - /** - * Default Constructor - */ - public StockInfo() {} - - /** - * Getter accessor for attribute 'symbol'. - * - * @return - * current value of 'symbol' - */ - public String getSymbol() { - return symbol; - } - /** - * Setter accessor for attribute 'symbol'. - * @param symbol - * new value for 'symbol ' - */ - public void setSymbol(String symbol) { - this.symbol = symbol; - } + /** serial. */ + private static final long serialVersionUID = 5806346188526710465L; - /** - * Getter accessor for attribute 'name'. - * - * @return - * current value of 'name' - */ - public String getName() { - return name; - } + /** value. */ + private String exchange; - /** - * Setter accessor for attribute 'name'. - * @param name - * new value for 'name ' - */ - public void setName(String name) { - this.name = name; - } + /** Value Date. */ + private String name; - /** - * Getter accessor for attribute 'industry'. - * - * @return - * current value of 'industry' - */ - public String getIndustry() { - return industry; - } + /** code. */ + private String symbol; - /** - * Setter accessor for attribute 'industry'. - * @param industry - * new value for 'industry ' - */ - public void setIndustry(String industry) { - this.industry = industry; - } + /** value. */ + private String industry; - /** - * Getter accessor for attribute 'exchange'. - * - * @return - * current value of 'exchange' - */ - public String getExchange() { - return exchange; - } + @JsonCreator + public StockInfo( + @JsonProperty("exchange") String exchange, + @JsonProperty("name") String name, + @JsonProperty("symbol") String symbol, + @JsonProperty("industry") String industry) { + this.exchange = exchange; + this.name = name; + this.symbol = symbol; + this.industry = industry; + } + + /** + * Getter accessor for attribute 'symbol'. + * + * @return current value of 'symbol' + */ + public String getSymbol() { + return symbol; + } - /** - * Setter accessor for attribute 'exchange'. - * @param exchange - * new value for 'exchange ' - */ - public void setExchange(String exchange) { - this.exchange = exchange; + /** + * Setter accessor for attribute 'symbol'. + * + * @param symbol new value for 'symbol ' + */ + public void setSymbol(String symbol) { + this.symbol = symbol; + } + + /** + * Getter accessor for attribute 'name'. + * + * @return current value of 'name' + */ + public String getName() { + return name; + } + + /** + * Setter accessor for attribute 'name'. + * + * @param name new value for 'name ' + */ + public void setName(String name) { + this.name = name; + } + + /** + * Getter accessor for attribute 'industry'. + * + * @return current value of 'industry' + */ + public String getIndustry() { + return industry; + } + + /** + * Setter accessor for attribute 'industry'. + * + * @param industry new value for 'industry ' + */ + public void setIndustry(String industry) { + this.industry = industry; + } + + /** + * Getter accessor for attribute 'exchange'. + * + * @return current value of 'exchange' + */ + public String getExchange() { + return exchange; + } + + /** + * Setter accessor for attribute 'exchange'. + * + * @param exchange new value for 'exchange ' + */ + public void setExchange(String exchange) { + this.exchange = exchange; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; } - -} \ No newline at end of file + StockInfo stockInfo = (StockInfo) o; + return exchange.equals(stockInfo.exchange) + && name.equals(stockInfo.name) + && symbol.equals(stockInfo.symbol) + && industry.equals(stockInfo.industry); + } + + @Override + public int hashCode() { + return Objects.hash(exchange, name, symbol, industry); + } + + @Override + public String toString() { + return "StockInfo{" + + "exchange='" + + exchange + + '\'' + + ", name='" + + name + + '\'' + + ", symbol='" + + symbol + + '\'' + + ", industry='" + + industry + + '\'' + + '}'; + } +} diff --git a/kafka-dse-core/src/main/java/com/datastax/demo/domain/StockTick.java b/kafka-dse-core/src/main/java/com/datastax/demo/domain/StockTick.java index f7d9d8b..fd08bde 100644 --- a/kafka-dse-core/src/main/java/com/datastax/demo/domain/StockTick.java +++ b/kafka-dse-core/src/main/java/com/datastax/demo/domain/StockTick.java @@ -1,111 +1,120 @@ package com.datastax.demo.domain; -import static com.datastax.demo.conf.DseConstants.STOCKS_TICKS; - +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import java.io.Serializable; +import java.time.Instant; +import java.util.Objects; -import com.datastax.driver.mapping.annotations.ClusteringColumn; -import com.datastax.driver.mapping.annotations.Column; -import com.datastax.driver.mapping.annotations.PartitionKey; -import com.datastax.driver.mapping.annotations.Table; - -/** - * Value for Ticks. - */ -@Table(name=STOCKS_TICKS) +/** Value for Ticks. */ public class StockTick implements Serializable { - - /** serial. */ - private static final long serialVersionUID = 5806346188526710465L; - - /** code. */ - @PartitionKey - private String symbol; - - /** Value Date. */ - @ClusteringColumn - private long valueDate; - - /** value. */ - @Column - private double value; - - /** - * Default Constructor - */ - public StockTick() {} - - /** - * Constructor with parameters. - */ - public StockTick(String tickSymbol, double value) { - this(tickSymbol, value, System.currentTimeMillis()); - } - - /** - * Constructor with parameters. - */ - public StockTick(String tickSymbol, double value, long valueDate) { - this.symbol = tickSymbol; - this.value = value; - this.valueDate = valueDate; - } - - /** - * Getter accessor for attribute 'value'. - * - * @return - * current value of 'value' - */ - public double getValue() { - return value; - } - /** - * Setter accessor for attribute 'value'. - * @param value - * new value for 'value ' - */ - public void setValue(double value) { - this.value = value; - } + /** serial. */ + private static final long serialVersionUID = 5806346188526710465L; - /** - * Getter accessor for attribute 'valueDate'. - * - * @return - * current value of 'valueDate' - */ - public long getValueDate() { - return valueDate; - } + /** code. */ + private String symbol; - /** - * Setter accessor for attribute 'valueDate'. - * @param valueDate - * new value for 'valueDate ' - */ - public void setValueDate(long valueDate) { - this.valueDate = valueDate; - } + /** Value Date. */ + private Instant valueDate; - /** - * Getter accessor for attribute 'symbol'. - * - * @return - * current value of 'symbol' - */ - public String getSymbol() { - return symbol; - } + /** value. */ + private double value; + + /** Constructor with parameters. */ + @JsonCreator + public StockTick( + @JsonProperty("symbol") String symbol, + @JsonProperty("valueDate") Instant valueDate, + @JsonProperty("value") double value) { + this.symbol = symbol; + this.value = value; + this.valueDate = valueDate; + } - /** - * Setter accessor for attribute 'symbol'. - * @param symbol - * new value for 'symbol ' - */ - public void setSymbol(String symbol) { - this.symbol = symbol; + /** + * Getter accessor for attribute 'value'. + * + * @return current value of 'value' + */ + public double getValue() { + return value; + } + + /** + * Setter accessor for attribute 'value'. + * + * @param value new value for 'value ' + */ + public void setValue(double value) { + this.value = value; + } + + /** + * Getter accessor for attribute 'valueDate'. + * + * @return current value of 'valueDate' + */ + public Instant getValueDate() { + return valueDate; + } + + /** + * Setter accessor for attribute 'valueDate'. + * + * @param valueDate new value for 'valueDate ' + */ + public void setValueDate(Instant valueDate) { + this.valueDate = valueDate; + } + + /** + * Getter accessor for attribute 'symbol'. + * + * @return current value of 'symbol' + */ + public String getSymbol() { + return symbol; + } + + /** + * Setter accessor for attribute 'symbol'. + * + * @param symbol new value for 'symbol ' + */ + public void setSymbol(String symbol) { + this.symbol = symbol; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; } - -} \ No newline at end of file + StockTick stockTick = (StockTick) o; + return Double.compare(stockTick.value, value) == 0 + && symbol.equals(stockTick.symbol) + && valueDate.equals(stockTick.valueDate); + } + + @Override + public int hashCode() { + return Objects.hash(symbol, valueDate, value); + } + + @Override + public String toString() { + return "StockTick{" + + "symbol='" + + symbol + + '\'' + + ", valueDate=" + + valueDate + + ", value=" + + value + + '}'; + } +} diff --git a/kafka-dse-core/src/main/java/com/datastax/demo/domain/package-info.java b/kafka-dse-core/src/main/java/com/datastax/demo/domain/package-info.java new file mode 100644 index 0000000..3fc24dd --- /dev/null +++ b/kafka-dse-core/src/main/java/com/datastax/demo/domain/package-info.java @@ -0,0 +1,6 @@ +@NonNullApi +@NonNullFields +package com.datastax.demo.domain; + +import org.springframework.lang.NonNullApi; +import org.springframework.lang.NonNullFields; diff --git a/kafka-dse-core/src/main/java/com/datastax/demo/utils/FileUtils.java b/kafka-dse-core/src/main/java/com/datastax/demo/utils/FileUtils.java deleted file mode 100644 index c9a431f..0000000 --- a/kafka-dse-core/src/main/java/com/datastax/demo/utils/FileUtils.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.datastax.demo.utils; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.List; -import java.util.Scanner; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * utility class to parse files. - */ -public class FileUtils { - - /** Hide default.*/ - private FileUtils() {} - - /** - * Load file content as a String. - */ - public static String readFileIntoString(String filename) { - try (Scanner s = new Scanner(new File(filename))) { - return s.useDelimiter("\\Z").next(); - } catch (FileNotFoundException e) { - throw new IllegalArgumentException("Cannot find the file", e); - } - } - - /** - * Load each line as a row. - */ - public static List readFileIntoList(String filename) { - try (Stream lines = Files.lines(Paths.get(filename))) { - return lines.collect(Collectors.toList()); - } catch (IOException e1) { - throw new IllegalArgumentException("Cannot read file", e1); - } - } - - -} diff --git a/kafka-dse-core/src/main/resources/cql/create_schema.cql b/kafka-dse-core/src/main/resources/cql/create_schema.cql index 6512cd5..af3a637 100644 --- a/kafka-dse-core/src/main/resources/cql/create_schema.cql +++ b/kafka-dse-core/src/main/resources/cql/create_schema.cql @@ -4,7 +4,7 @@ use demo_sdc; CREATE TABLE stocks_ticks ( symbol text, - valueDate timestamp, + value_date timestamp, value double, PRIMARY KEY (symbol, valueDate) ) WITH CLUSTERING ORDER BY (valueDate DESC); diff --git a/kafka-dse-producer/pom.xml b/kafka-dse-producer/pom.xml index 1df5ede..017f19d 100644 --- a/kafka-dse-producer/pom.xml +++ b/kafka-dse-producer/pom.xml @@ -1,161 +1,157 @@ - - - 4.0.0 - kafka-dse-producer - + kafka-dse-producer - DAO, BEAN - - - com.datastax - kafka-dse-example - 6.7-SNAPSHOT - - - - - - - com.github.signaflo - timeseries - 0.4 - - - - - - com.datastax - kafka-dse-core - ${project.version} - - - - com.fasterxml.jackson.dataformat - jackson-dataformat-csv - - - - - org.springframework.boot - spring-boot-starter-web - - - - org.springframework.boot - spring-boot-starter-actuator - - - de.codecentric - spring-boot-admin-starter-server - 2.0.2 - - - de.codecentric - spring-boot-admin-starter-client - 2.0.2 - - - org.springframework.boot - spring-boot-starter-security - - - org.jolokia - jolokia-core - - - - - org.apache.camel - camel-spring-boot-starter - - - org.apache.camel - camel-stream-starter - - - org.apache.camel - camel-kafka - - - - - org.apache.kafka - connect-api - - - org.apache.kafka - connect-json - - - io.confluent - kafka-avro-serializer - - - io.netty - netty-all - - - org.patriques - alphavantage4j - 1.2 - - - - - org.junit.platform - junit-platform-launcher - - - org.junit.platform - junit-platform-runner - - - org.junit.platform - junit-platform-console-standalone - - - org.junit.jupiter - junit-jupiter-engine - - - org.junit.jupiter - junit-jupiter-params - - - org.springframework.boot - spring-boot-starter-test - test - - - - - - - org.springframework.boot - spring-boot-maven-plugin - ${spring-boot.version} - - com.datastax.demo.ProducerApplication - - - - - repackage - - - - - - - - - - alphavantage - alphavantage4 - https://dl.bintray.com/patriques82/maven - - - + + 4.0.0 + kafka-dse-producer + + kafka-dse-producer + DAO, BEAN + + com.datastax + kafka-dse-example + 6.7-SNAPSHOT + + + + + com.github.signaflo + timeseries + 0.4 + + + + com.datastax + kafka-dse-core + ${project.version} + + + + com.datastax.dse + dse-java-driver-core + + + com.datastax.oss + java-driver-core + + + com.datastax.oss + java-driver-query-builder + + + com.fasterxml.jackson.dataformat + jackson-dataformat-csv + + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-actuator + + + de.codecentric + spring-boot-admin-starter-server + 2.0.2 + + + de.codecentric + spring-boot-admin-starter-client + 2.0.2 + + + org.springframework.boot + spring-boot-starter-security + + + org.jolokia + jolokia-core + + + + org.apache.camel + camel-spring-boot-starter + + + org.apache.camel + camel-stream-starter + + + org.apache.camel + camel-kafka + + + + org.apache.kafka + connect-api + + + org.apache.kafka + connect-json + + + io.confluent + kafka-avro-serializer + + + io.netty + netty-all + + + org.patriques + alphavantage4j + 1.2 + + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.jupiter + junit-jupiter-params + test + + + org.springframework.boot + spring-boot-starter-test + test + + + junit + junit + + + + + + + + org.springframework.boot + spring-boot-maven-plugin + ${spring-boot.version} + + com.datastax.demo.ProducerApplication + + + + + repackage + + + + + + + + + alphavantage + alphavantage4 + https://dl.bintray.com/patriques82/maven + + diff --git a/kafka-dse-producer/src/main/java/com/datastax/demo/ProducerApplication.java b/kafka-dse-producer/src/main/java/com/datastax/demo/ProducerApplication.java index 023588b..d95c19d 100755 --- a/kafka-dse-producer/src/main/java/com/datastax/demo/ProducerApplication.java +++ b/kafka-dse-producer/src/main/java/com/datastax/demo/ProducerApplication.java @@ -1,26 +1,20 @@ package com.datastax.demo; +import de.codecentric.boot.admin.server.config.EnableAdminServer; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.annotation.ComponentScan; -import de.codecentric.boot.admin.server.config.EnableAdminServer; - -/** - * Main class for CannysEngine. - */ +/** Main class for CannysEngine. */ @SpringBootApplication -@ComponentScan(basePackages="com.datastax.demo") +@ComponentScan(basePackages = "com.datastax.demo") @EnableAutoConfiguration @EnableAdminServer public class ProducerApplication { - /** - * A main method to start this application. - */ - public static void main(String[] args) { - SpringApplication.run(ProducerApplication.class, args); - } - + /** A main method to start this application. */ + public static void main(String[] args) { + SpringApplication.run(ProducerApplication.class, args); + } } diff --git a/kafka-dse-producer/src/main/java/com/datastax/demo/ProducerRoutes.java b/kafka-dse-producer/src/main/java/com/datastax/demo/ProducerRoutes.java index 88729bd..3e2f51d 100644 --- a/kafka-dse-producer/src/main/java/com/datastax/demo/ProducerRoutes.java +++ b/kafka-dse-producer/src/main/java/com/datastax/demo/ProducerRoutes.java @@ -1,44 +1,35 @@ package com.datastax.demo; +import com.datastax.demo.route.StockTicksConsumer; +import com.datastax.demo.route.StockTicksProducer; import org.apache.camel.builder.RouteBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import com.datastax.demo.route.StockTicksConsumer; -import com.datastax.demo.route.StockTicksProducer; - -/** - * Apache Camel trigger processors with routes. - */ +/** Apache Camel trigger processors with routes. */ @Component public class ProducerRoutes extends RouteBuilder { - // Producers - - @Autowired - private StockTicksProducer stockTickProducerAvro; - - @Autowired - private StockTicksConsumer stockTickConsumerAvro; - - /** {@inheritDoc} */ - @Override - public void configure() throws Exception { - - // ALPHAVANYAGE REST API ==> KAFKA (ticks, 1Min, 1hour) - from("timer:ticks?fixedRate=true&period={{alphavantage.pollingPeriod.ticks}}") - .routeId("ticks_Alpha2Kafka") - .process(stockTickProducerAvro) - .end(); - - // ALPHAVANYAGE KAFKA => DSE (TMP) - from("timer:ticksConsumer?" - + "fixedRate=true" - + "&period={{alphavantage.pollingPeriod.ticks}}") - .routeId("ticks_Kafka2Dse") - .process(stockTickConsumerAvro) - .end(); - - } - -} \ No newline at end of file + // Producers + + @Autowired private StockTicksProducer stockTickProducerAvro; + + @Autowired private StockTicksConsumer stockTickConsumerAvro; + + /** {@inheritDoc} */ + @Override + public void configure() { + + // ALPHAVANYAGE REST API ==> KAFKA (ticks, 1Min, 1hour) + from("timer:ticks?fixedRate=true&period={{alphavantage.pollingPeriod.ticks}}") + .routeId("ticks_Alpha2Kafka") + .process(stockTickProducerAvro) + .end(); + + // ALPHAVANYAGE KAFKA => DSE (TMP) + from("timer:ticksConsumer?" + "fixedRate=true" + "&period={{alphavantage.pollingPeriod.ticks}}") + .routeId("ticks_Kafka2Dse") + .process(stockTickConsumerAvro) + .end(); + } +} diff --git a/kafka-dse-producer/src/main/java/com/datastax/demo/conf/ProducerConfiguration.java b/kafka-dse-producer/src/main/java/com/datastax/demo/conf/ProducerConfiguration.java index a531d7d..bcbfc91 100644 --- a/kafka-dse-producer/src/main/java/com/datastax/demo/conf/ProducerConfiguration.java +++ b/kafka-dse-producer/src/main/java/com/datastax/demo/conf/ProducerConfiguration.java @@ -8,8 +8,8 @@ import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; +import com.fasterxml.jackson.databind.JsonNode; import java.util.Properties; - import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.common.serialization.StringDeserializer; @@ -20,39 +20,35 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import com.fasterxml.jackson.databind.JsonNode; - @Configuration public class ProducerConfiguration { - - @Value("${kafka.server}") - private String kafkaServer; - - @Value("${kafka.ack}") - private String producerAck; - - @Value("${kafka.group}") - private String consumerGroup; - - @Bean("producer.json") - public KafkaProducer jsonProducer() { - Properties props = new Properties(); - props.put(BOOTSTRAP_SERVERS_CONFIG, kafkaServer); - props.put(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - props.put(VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class.getName()); - props.put(ACKS_CONFIG, producerAck); - return new KafkaProducer(props); - } - - @Bean("consumer.json") - public KafkaConsumer jsonConsumer() { - Properties props = new Properties(); - props.put(BOOTSTRAP_SERVERS_CONFIG, kafkaServer); - props.put(GROUP_ID_CONFIG, consumerGroup); - props.put(KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); - props.put(VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class.getName()); - return new KafkaConsumer(props); - } - - + + @Value("${kafka.server}") + private String kafkaServer; + + @Value("${kafka.ack}") + private String producerAck; + + @Value("${kafka.group}") + private String consumerGroup; + + @Bean("producer.json") + public KafkaProducer jsonProducer() { + Properties props = new Properties(); + props.put(BOOTSTRAP_SERVERS_CONFIG, kafkaServer); + props.put(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class.getName()); + props.put(ACKS_CONFIG, producerAck); + return new KafkaProducer<>(props); + } + + @Bean("consumer.json") + public KafkaConsumer jsonConsumer() { + Properties props = new Properties(); + props.put(BOOTSTRAP_SERVERS_CONFIG, kafkaServer); + props.put(GROUP_ID_CONFIG, consumerGroup); + props.put(KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + props.put(VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class.getName()); + return new KafkaConsumer<>(props); + } } diff --git a/kafka-dse-producer/src/main/java/com/datastax/demo/conf/SecurityConf.java b/kafka-dse-producer/src/main/java/com/datastax/demo/conf/SecurityConf.java index 7ffeffa..6cf51ef 100644 --- a/kafka-dse-producer/src/main/java/com/datastax/demo/conf/SecurityConf.java +++ b/kafka-dse-producer/src/main/java/com/datastax/demo/conf/SecurityConf.java @@ -4,16 +4,13 @@ import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; -/** - * SpringBoot Admin Console. - */ +/** SpringBoot Admin Console. */ @Configuration public class SecurityConf extends WebSecurityConfigurerAdapter { - - /** {@inheritDoc} */ - @Override - protected void configure(HttpSecurity http) throws Exception { - http.authorizeRequests().anyRequest().permitAll().and().csrf().disable(); - } -} \ No newline at end of file + /** {@inheritDoc} */ + @Override + protected void configure(HttpSecurity http) throws Exception { + http.authorizeRequests().anyRequest().permitAll().and().csrf().disable(); + } +} diff --git a/kafka-dse-producer/src/main/java/com/datastax/demo/dao/AlphaVantageDao.java b/kafka-dse-producer/src/main/java/com/datastax/demo/dao/AlphaVantageDao.java index 6cfabca..6f71e49 100644 --- a/kafka-dse-producer/src/main/java/com/datastax/demo/dao/AlphaVantageDao.java +++ b/kafka-dse-producer/src/main/java/com/datastax/demo/dao/AlphaVantageDao.java @@ -1,12 +1,11 @@ package com.datastax.demo.dao; +import com.datastax.demo.domain.Stock; +import com.datastax.demo.domain.StockTick; import java.time.ZoneId; -import java.util.Date; import java.util.Set; import java.util.stream.Stream; - import javax.annotation.PostConstruct; - import org.patriques.AlphaVantageConnector; import org.patriques.BatchStockQuotes; import org.patriques.TimeSeries; @@ -20,78 +19,79 @@ import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Repository; -import com.datastax.demo.domain.Stock; -import com.datastax.demo.domain.StockTick; - @Repository public class AlphaVantageDao { - - /** Internal logger. */ - private static final Logger LOGGER = LoggerFactory.getLogger(AlphaVantageDao.class); - - @Value("${alphavantage.apiKey}") - protected String apiKey; - - @Value("${alphavantage.timeout: 30000 }") - protected int apiTimeout; - - /** Time series. */ - private TimeSeries stockTimeSeries; - - /** Initialize connector. */ - private BatchStockQuotes clientStockApi; - - @PostConstruct - public void initAlphaVantageConnector() { - AlphaVantageConnector connector = new AlphaVantageConnector(apiKey, apiTimeout); - stockTimeSeries = new TimeSeries(connector); - clientStockApi = new BatchStockQuotes(connector); - } - - public Stream < StockTick > getCurrentStockTicks(Set symbols) { - try { - BatchStockQuotesResponse response = clientStockApi.quote(symbols.toArray(new String[]{})); - return response.getStockQuotes().stream().map(this::mapStockQuoteAsStockTick); - } catch (RuntimeException re) { - LOGGER.error("Cannot get data."); - } - return Stream.empty(); - } - - public Stream < Stock > getLastXStocks1Min(String symbol, int nbValue) { - return getStocks(Interval.ONE_MIN, symbol, nbValue); - } - - public Stream < Stock > getLastXStocks1Hour(String symbol, int nbValue) { - return getStocks(Interval.SIXTY_MIN, symbol, nbValue); - } - - private Stream < Stock > getStocks(Interval interval , String symbol, int nbValue) { - OutputSize outputsize = (nbValue > 100 ) ? OutputSize.FULL : OutputSize.COMPACT; - try { - return stockTimeSeries.intraDay(symbol, interval, outputsize) - .getStockData() - .stream() - .limit(nbValue) - .map(this::mapStockDataAsStockTick); - } catch(RuntimeException re) {} - return Stream.empty(); - } - - private StockTick mapStockQuoteAsStockTick(StockQuote q) { - //long real = q.getTimestamp().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli(); - return new StockTick(q.getSymbol(), q.getPrice(), System.currentTimeMillis()); + + /** Internal logger. */ + private static final Logger LOGGER = LoggerFactory.getLogger(AlphaVantageDao.class); + + @Value("${alphavantage.apiKey}") + protected String apiKey; + + @Value("${alphavantage.timeout: 30000 }") + protected int apiTimeout; + + /** Time series. */ + private TimeSeries stockTimeSeries; + + /** Initialize connector. */ + private BatchStockQuotes clientStockApi; + + @PostConstruct + public void initAlphaVantageConnector() { + AlphaVantageConnector connector = new AlphaVantageConnector(apiKey, apiTimeout); + stockTimeSeries = new TimeSeries(connector); + clientStockApi = new BatchStockQuotes(connector); + } + + public Stream getCurrentStockTicks(Set symbols) { + try { + BatchStockQuotesResponse response = clientStockApi.quote(symbols.toArray(new String[] {})); + return response.getStockQuotes().stream().map(AlphaVantageDao::mapStockQuoteAsStockTick); + } catch (RuntimeException re) { + LOGGER.error("Cannot get data.", re); } - - private Stock mapStockDataAsStockTick(StockData item) { - Stock tick = new Stock(); - tick.setClose(item.getClose()); - tick.setOpen(item.getOpen()); - tick.setLow(item.getLow()); - tick.setHigh(item.getHigh()); - tick.setVolume(item.getVolume()); - tick.setValueDate(Date.from(item.getDateTime().atZone(ZoneId.systemDefault()).toInstant())); - return tick; + return Stream.empty(); + } + + public Stream getLastXStocks1Min(String symbol, int nbValue) { + return getStocks(Interval.ONE_MIN, symbol, nbValue); + } + + public Stream getLastXStocks1Hour(String symbol, int nbValue) { + return getStocks(Interval.SIXTY_MIN, symbol, nbValue); + } + + private Stream getStocks(Interval interval, String symbol, int nbValue) { + OutputSize outputsize = (nbValue > 100) ? OutputSize.FULL : OutputSize.COMPACT; + try { + return stockTimeSeries + .intraDay(symbol, interval, outputsize) + .getStockData() + .stream() + .limit(nbValue) + .map(item -> mapStockDataAsStock(symbol, item)); + } catch (RuntimeException re) { + LOGGER.error("Cannot get stocks.", re); } - + return Stream.empty(); + } + + public static StockTick mapStockQuoteAsStockTick(StockQuote quote) { + return new StockTick( + quote.getSymbol(), + quote.getTimestamp().atZone(ZoneId.systemDefault()).toInstant(), + quote.getPrice()); + } + + public static Stock mapStockDataAsStock(String symbol, StockData item) { + return new Stock( + symbol, + item.getDateTime().atZone(ZoneId.systemDefault()).toInstant(), + item.getOpen(), + item.getClose(), + item.getLow(), + item.getHigh(), + item.getVolume()); + } } diff --git a/kafka-dse-producer/src/main/java/com/datastax/demo/dao/CsvDao.java b/kafka-dse-producer/src/main/java/com/datastax/demo/dao/CsvDao.java index d2771ae..8b34d9b 100644 --- a/kafka-dse-producer/src/main/java/com/datastax/demo/dao/CsvDao.java +++ b/kafka-dse-producer/src/main/java/com/datastax/demo/dao/CsvDao.java @@ -3,37 +3,38 @@ import static java.util.Spliterators.spliteratorUnknownSize; import static java.util.stream.StreamSupport.stream; +import com.datastax.demo.domain.StockInfo; +import com.fasterxml.jackson.dataformat.csv.CsvMapper; +import com.fasterxml.jackson.dataformat.csv.CsvSchema; import java.io.File; import java.io.IOException; import java.util.Spliterator; import java.util.stream.Stream; - import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Repository; -import com.datastax.demo.domain.StockInfo; -import com.fasterxml.jackson.dataformat.csv.CsvMapper; -import com.fasterxml.jackson.dataformat.csv.CsvSchema; - @Repository public class CsvDao { - - @Value("${csvStocksMetadata}") - protected String csvFileName; - - /** - * Init table stocks_infos (used in home page of webUI). - * - */ - public Stream< StockInfo > readStockInfosFromCsv() { - try { - return stream(spliteratorUnknownSize(new CsvMapper().readerFor(StockInfo.class) - .with(CsvSchema.emptySchema().withHeader() - .withColumnSeparator(CsvSchema.DEFAULT_COLUMN_SEPARATOR)) - .readValues(new File(csvFileName)), Spliterator.ORDERED), false); - } catch (IOException e) { - throw new IllegalArgumentException("Cannot filled table"); - } - } + @Value("${csvStocksMetadata}") + private String csvFileName; + + /** Init table stocks_infos (used in home page of webUI). */ + public Stream readStockInfosFromCsv() { + try { + return stream( + spliteratorUnknownSize( + new CsvMapper() + .readerFor(StockInfo.class) + .with( + CsvSchema.emptySchema() + .withHeader() + .withColumnSeparator(CsvSchema.DEFAULT_COLUMN_SEPARATOR)) + .readValues(new File(csvFileName)), + Spliterator.ORDERED), + false); + } catch (IOException e) { + throw new IllegalArgumentException("Cannot read file: " + csvFileName); + } + } } diff --git a/kafka-dse-producer/src/main/java/com/datastax/demo/dao/DseDao.java b/kafka-dse-producer/src/main/java/com/datastax/demo/dao/DseDao.java index a717b86..86937a0 100644 --- a/kafka-dse-producer/src/main/java/com/datastax/demo/dao/DseDao.java +++ b/kafka-dse-producer/src/main/java/com/datastax/demo/dao/DseDao.java @@ -1,147 +1,222 @@ package com.datastax.demo.dao; -import static com.datastax.driver.core.querybuilder.QueryBuilder.eq; -import static com.datastax.driver.core.querybuilder.QueryBuilder.select; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.insertInto; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; +import static com.datastax.oss.driver.api.querybuilder.SchemaBuilder.createTable; +import static com.datastax.oss.driver.api.querybuilder.relation.Relation.column; +import com.datastax.demo.conf.DseConstants; +import com.datastax.demo.domain.Stock; +import com.datastax.demo.domain.StockInfo; +import com.datastax.demo.domain.StockTick; +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; +import com.datastax.oss.driver.api.core.type.DataTypes; import java.util.Set; +import java.util.concurrent.CompletionStage; import java.util.stream.Collectors; - import javax.annotation.PostConstruct; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; -import com.datastax.demo.conf.DseConstants; -import com.datastax.demo.domain.Stock; -import com.datastax.demo.domain.Stock1Hour; -import com.datastax.demo.domain.Stock1Min; -import com.datastax.demo.domain.StockInfo; -import com.datastax.demo.domain.StockTick; -import com.datastax.driver.core.DataType; -import com.datastax.driver.core.schemabuilder.SchemaBuilder; -import com.datastax.driver.core.schemabuilder.SchemaBuilder.Direction; -import com.datastax.driver.dse.DseSession; -import com.datastax.driver.mapping.Mapper; -import com.datastax.driver.mapping.MappingManager; - @Repository public class DseDao implements DseConstants { - - /** Internal logger. */ - private static final Logger LOGGER = LoggerFactory.getLogger(DseDao.class); - - /** Hold Connectivity to DSE. */ - @Autowired - DseSession dseSession; - - /** Hold Connectivity to DSE. */ - @Autowired - CsvDao csvDao; - - /** Hold Driver Mapper to implement ORM with Cassandra. */ - @Autowired - MappingManager mappingManager; - - /** Mapper. */ - Mapper stockTicksMapper; - - /** Mapper. */ - Mapper stockInfoMapper; - - /** Mapper. */ - Mapper stock1minMapper; - - /** Mapper. */ - Mapper stock1hourMapper; - - @PostConstruct - public void createTableifNotExist() { - // Metadata (Home page for webUI) - dseSession.execute(SchemaBuilder.createTable(STOCKS_INFOS) - .ifNotExists() - .addPartitionKey("exchange", DataType.text()) - .addClusteringColumn("name", DataType.text()) - .addColumn("industry", DataType.text()) - .addColumn("symbol", DataType.text()) - .withOptions() - .clusteringOrder("name", Direction.ASC) - .buildInternal()); - LOGGER.info(" + Table '{}' created in keyspace '{}' (if needed)", STOCKS_INFOS, dseSession.getLoggedKeyspace()); - - // Random ticks where seed is last AlphaVantage - dseSession.execute(SchemaBuilder.createTable(STOCKS_TICKS) - .ifNotExists() - .addPartitionKey("symbol", DataType.text()) - .addClusteringColumn("valueDate", DataType.timestamp()) - .addColumn("value", DataType.cdouble()) - .withOptions() - .clusteringOrder("valueDate", Direction.DESC) - .buildInternal()); - LOGGER.info(" + Table '{}' created in keyspace '{}' (if needed)", STOCKS_TICKS, dseSession.getLoggedKeyspace()); - - // Create tables for histograms - createTableStocksIntervalIfNotExist(STOCKS_MINUTE); - LOGGER.info(" + Table '{}' created in keyspace '{}' (if needed)", STOCKS_MINUTE, dseSession.getLoggedKeyspace()); - - createTableStocksIntervalIfNotExist(STOCKS_HOUR); - LOGGER.info(" + Table '{}' created in keyspace '{}' (if needed)", STOCKS_HOUR, dseSession.getLoggedKeyspace()); - - // Init Mappers - stockTicksMapper = mappingManager.mapper(StockTick.class); - stockInfoMapper = mappingManager.mapper(StockInfo.class); - - // Load CSV and fill table 'stocks_infos' - csvDao.readStockInfosFromCsv().forEach(this::saveStockInfo); - LOGGER.info(" + Table '{}' filled with symbols found in CSV.", STOCKS_INFOS); - LOGGER.info("Connection successfully established to DSE and schema has been created.", STOCKS_INFOS); - } - - /** - * Creation of tables stocks_by* - */ - private void createTableStocksIntervalIfNotExist(String tableName) { - dseSession.execute(SchemaBuilder.createTable(tableName) - .ifNotExists() - .addPartitionKey("symbol", DataType.text()) - .addClusteringColumn("value_date", DataType.timestamp()) - .addColumn("open", DataType.cdouble()) - .addColumn("close", DataType.cdouble()) - .addColumn("high", DataType.cdouble()) - .addColumn("low", DataType.cdouble()) - .addColumn("volume", DataType.bigint()) - .withOptions() - .clusteringOrder("value_date", Direction.DESC) - .buildInternal()); - } - - public void saveTicker(StockTick tick) { - dseSession.executeAsync(stockTicksMapper.saveQuery(tick)); - } - - public void saveStock1Min(Stock quote) { - dseSession.executeAsync(stock1minMapper.saveQuery(new Stock1Min(quote))); - } - - public void saveStock1Hour(Stock quote) { - dseSession.executeAsync(stock1hourMapper.saveQuery(new Stock1Hour(quote))); - } - - public void saveStock1Day(Stock quote) { - dseSession.executeAsync(stock1hourMapper.saveQuery(new Stock1Hour(quote))); - } - - public void saveStockInfo(StockInfo ti) { - dseSession.executeAsync(stockInfoMapper.saveQuery(ti)); - } - - public Set < String > getSymbolsNYSE() { - return dseSession.execute( - select("symbol").from(STOCKS_INFOS).where(eq("exchange", "NYSE"))) - .all().stream() - .map(row->row.getString("symbol")) - .collect(Collectors.toSet()); - } + /** Internal logger. */ + private static final Logger LOGGER = LoggerFactory.getLogger(DseDao.class); + + /** Hold Connectivity to DSE. */ + @Autowired private DseSession dseSession; + + /** Hold Connectivity to DSE. */ + @Autowired private CsvDao csvDao; + + private PreparedStatement insertIntoStockInfos; + private PreparedStatement insertIntoStockTicks; + private PreparedStatement insertIntoStocksMinute; + private PreparedStatement insertIntoStocksHour; + + @PostConstruct + public void createOrUpdateSchema() { + createTableStockInfosIfNotExists(); + createTableStockTicksIfNotExists(); + // Create tables for histograms + createTableStocksIntervalIfNotExists(STOCKS_MINUTE); + createTableStocksIntervalIfNotExists(STOCKS_HOUR); + prepareStatements(); + populateTableStockInfos(); + LOGGER.info("Connection established to DSE and schema successfully created or updated."); + } + + /** Metadata table (Home page for webUI) */ + private void createTableStockInfosIfNotExists() { + dseSession.execute( + createTable(STOCKS_INFOS) + .ifNotExists() + .withPartitionKey(EXCHANGE, DataTypes.TEXT) + .withClusteringColumn(NAME, DataTypes.TEXT) + .withColumn(INDUSTRY, DataTypes.TEXT) + .withColumn(SYMBOL, DataTypes.TEXT) + .withClusteringOrder(NAME, ClusteringOrder.ASC) + .build()); + LOGGER.info( + " + Table {} created in keyspace {} (if needed)", STOCKS_INFOS, dseSession.getKeyspace()); + } + + /** Random ticks where seed is last AlphaVantage */ + private void createTableStockTicksIfNotExists() { + dseSession.execute( + createTable(STOCKS_TICKS) + .ifNotExists() + .withPartitionKey(SYMBOL, DataTypes.TEXT) + .withClusteringColumn(VALUE_DATE, DataTypes.TIMESTAMP) + .withColumn(VALUE, DataTypes.DOUBLE) + .withClusteringOrder(VALUE_DATE, ClusteringOrder.DESC) + .build()); + LOGGER.info( + " + Table {} created in keyspace {} (if needed)", STOCKS_TICKS, dseSession.getKeyspace()); + } + + /** + * Creation of tables stocks_by* + * + * @param table the table name to create. + */ + private void createTableStocksIntervalIfNotExists(CqlIdentifier table) { + dseSession.execute( + createTable(table) + .ifNotExists() + .withPartitionKey(SYMBOL, DataTypes.TEXT) + .withClusteringColumn(VALUE_DATE, DataTypes.TIMESTAMP) + .withColumn(OPEN, DataTypes.DOUBLE) + .withColumn(CLOSE, DataTypes.DOUBLE) + .withColumn(HIGH, DataTypes.DOUBLE) + .withColumn(LOW, DataTypes.DOUBLE) + .withColumn(VOLUME, DataTypes.BIGINT) + .withClusteringOrder(VALUE_DATE, ClusteringOrder.DESC) + .build()); + LOGGER.info(" + Table {} created in keyspace {} (if needed)", table, dseSession.getKeyspace()); + } + + private void prepareStatements() { + insertIntoStockInfos = + dseSession.prepare( + insertInto(STOCKS_INFOS) + .value(EXCHANGE, bindMarker(EXCHANGE)) + .value(NAME, bindMarker(NAME)) + .value(INDUSTRY, bindMarker(INDUSTRY)) + .value(SYMBOL, bindMarker(SYMBOL)) + .build()); + insertIntoStockTicks = + dseSession.prepare( + insertInto(STOCKS_TICKS) + .value(SYMBOL, bindMarker(SYMBOL)) + .value(VALUE_DATE, bindMarker(VALUE_DATE)) + .value(VALUE, bindMarker(VALUE)) + .build()); + insertIntoStocksMinute = + dseSession.prepare( + insertInto(STOCKS_MINUTE) + .value(SYMBOL, bindMarker(SYMBOL)) + .value(VALUE_DATE, bindMarker(VALUE_DATE)) + .value(OPEN, bindMarker(OPEN)) + .value(CLOSE, bindMarker(CLOSE)) + .value(HIGH, bindMarker(HIGH)) + .value(LOW, bindMarker(LOW)) + .value(VOLUME, bindMarker(VOLUME)) + .build()); + insertIntoStocksHour = + dseSession.prepare( + insertInto(STOCKS_HOUR) + .value(SYMBOL, bindMarker(SYMBOL)) + .value(VALUE_DATE, bindMarker(VALUE_DATE)) + .value(OPEN, bindMarker(OPEN)) + .value(CLOSE, bindMarker(CLOSE)) + .value(HIGH, bindMarker(HIGH)) + .value(LOW, bindMarker(LOW)) + .value(VOLUME, bindMarker(VOLUME)) + .build()); + } + + /** Load CSV and fill table 'stocks_infos' */ + private void populateTableStockInfos() { + csvDao.readStockInfosFromCsv().forEach(this::saveStockInfoAsync); + LOGGER.info(" + Table {} filled with symbols found in CSV.", STOCKS_INFOS); + } + + public CompletionStage saveTickerAsync(StockTick tick) { + return dseSession + .executeAsync( + insertIntoStockTicks + .boundStatementBuilder() + .setString(SYMBOL, tick.getSymbol()) + .setInstant(VALUE_DATE, tick.getValueDate()) + .setDouble(VALUE, tick.getValue()) + .build()) + .thenApply(rs -> tick); + } + + public CompletionStage saveStock1MinAsync(Stock quote) { + return dseSession + .executeAsync( + insertIntoStocksMinute + .boundStatementBuilder() + .setString(SYMBOL, quote.getSymbol()) + .setInstant(VALUE_DATE, quote.getValueDate()) + .setDouble(OPEN, quote.getOpen()) + .setDouble(CLOSE, quote.getClose()) + .setDouble(HIGH, quote.getHigh()) + .setDouble(LOW, quote.getLow()) + .setDouble(VOLUME, quote.getVolume()) + .build()) + .thenApply(rs -> quote); + } + + public CompletionStage saveStock1HourAsync(Stock quote) { + return dseSession + .executeAsync( + insertIntoStocksHour + .boundStatementBuilder() + .setString(SYMBOL, quote.getSymbol()) + .setInstant(VALUE_DATE, quote.getValueDate()) + .setDouble(OPEN, quote.getOpen()) + .setDouble(CLOSE, quote.getClose()) + .setDouble(HIGH, quote.getHigh()) + .setDouble(LOW, quote.getLow()) + .setDouble(VOLUME, quote.getVolume()) + .build()) + .thenApply(rs -> quote); + } + + public CompletionStage saveStockInfoAsync(StockInfo info) { + return dseSession + .executeAsync( + insertIntoStockInfos + .boundStatementBuilder() + .setString(EXCHANGE, info.getExchange()) + .setString(NAME, info.getName()) + .setString(INDUSTRY, info.getIndustry()) + .setString(SYMBOL, info.getSymbol()) + .build()) + .thenApply(rs -> info); + } + + public Set getSymbolsNYSE() { + return dseSession + .execute( + selectFrom(STOCKS_INFOS) + .column(SYMBOL) + .where(column(EXCHANGE).isEqualTo(literal("NYSE"))) + .build()) + .all() + .stream() + .map(row -> row.getString(SYMBOL)) + .collect(Collectors.toSet()); + } } diff --git a/kafka-dse-producer/src/main/java/com/datastax/demo/dao/KafkaDao.java b/kafka-dse-producer/src/main/java/com/datastax/demo/dao/KafkaDao.java index 9572167..57d3e8a 100644 --- a/kafka-dse-producer/src/main/java/com/datastax/demo/dao/KafkaDao.java +++ b/kafka-dse-producer/src/main/java/com/datastax/demo/dao/KafkaDao.java @@ -1,27 +1,25 @@ package com.datastax.demo.dao; +import com.fasterxml.jackson.databind.JsonNode; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Repository; -import com.fasterxml.jackson.databind.JsonNode; - @Repository public class KafkaDao { - - @Autowired - @Qualifier("producer.json") - protected KafkaProducer jsonProducer; - - /** - * JSON PRODUCER. - * - * @param jsonMsg - */ - public void sendJsonMessage(ProducerRecord jsonMsg) { - System.out.println(jsonMsg.topic()); - jsonProducer.send(jsonMsg); - } + + @Autowired + @Qualifier("producer.json") + private KafkaProducer jsonProducer; + + /** + * JSON PRODUCER. + * + * @param jsonMsg the message to send. + */ + public void sendJsonMessage(ProducerRecord jsonMsg) { + jsonProducer.send(jsonMsg); + } } diff --git a/kafka-dse-producer/src/main/java/com/datastax/demo/route/ErrorHandlerProcessor.java b/kafka-dse-producer/src/main/java/com/datastax/demo/route/ErrorHandlerProcessor.java index 8172b35..ca0e205 100644 --- a/kafka-dse-producer/src/main/java/com/datastax/demo/route/ErrorHandlerProcessor.java +++ b/kafka-dse-producer/src/main/java/com/datastax/demo/route/ErrorHandlerProcessor.java @@ -7,34 +7,27 @@ import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; -/** - * Custom Behaviour to handle Error. - */ +/** Custom Behaviour to handle Error. */ @Component public class ErrorHandlerProcessor extends RouteBuilder implements Processor { - /** logger. */ - private static Logger logger = LoggerFactory.getLogger(ErrorHandlerProcessor.class); + /** logger. */ + private static final Logger LOGGER = LoggerFactory.getLogger(ErrorHandlerProcessor.class); - /** {@inheritDoc} */ - public void configure() throws Exception { - errorHandler(deadLetterChannel("seda:errors")); - from("seda:errors").bean(this); - } + /** {@inheritDoc} */ + public void configure() { + errorHandler(deadLetterChannel("seda:errors")); + from("seda:errors").bean(this); + } - /** - * - * @param exchange - * current camel exchange - * @throws Exception - */ - public void process(Exchange exchange) throws Exception { - Exception cause = exchange.getProperty(Exchange.EXCEPTION_CAUGHT, Exception.class); - if (cause != null) { - logger.error("A technical error has occurred: ", cause); - } - logger.error("ExchangeiD" + exchange.getExchangeId()); - logger.error("Incoming" + exchange.getFromRouteId()); - } - + /** @param exchange current camel exchange */ + @Override + public void process(Exchange exchange) { + Exception cause = exchange.getProperty(Exchange.EXCEPTION_CAUGHT, Exception.class); + if (cause != null) { + LOGGER.error("A technical error has occurred: ", cause); + } + LOGGER.info("ExchangeID" + exchange.getExchangeId()); + LOGGER.info("Incoming" + exchange.getFromRouteId()); + } } diff --git a/kafka-dse-producer/src/main/java/com/datastax/demo/route/StockTicksConsumer.java b/kafka-dse-producer/src/main/java/com/datastax/demo/route/StockTicksConsumer.java index b6a5421..816df75 100644 --- a/kafka-dse-producer/src/main/java/com/datastax/demo/route/StockTicksConsumer.java +++ b/kafka-dse-producer/src/main/java/com/datastax/demo/route/StockTicksConsumer.java @@ -1,11 +1,14 @@ package com.datastax.demo.route; +import com.datastax.demo.dao.DseDao; +import com.datastax.demo.domain.StockTick; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import java.util.Collections; import java.util.Optional; import java.util.stream.StreamSupport; - import javax.annotation.PostConstruct; - import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.kafka.clients.consumer.ConsumerRecord; @@ -17,64 +20,54 @@ import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; -import com.datastax.demo.dao.DseDao; -import com.datastax.demo.domain.StockTick; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * Consumer for CSV Tick. - */ +/** Consumer for CSV Tick. */ @Component("stockTicks.consumer") public class StockTicksConsumer implements Processor { - - /** Internal logger. */ - private static final Logger LOGGER = LoggerFactory.getLogger(StockTicksConsumer.class); - - /** Json Jackson parser. */ - private static final ObjectMapper JACKSON_MAPPER = new ObjectMapper(); - - @Autowired - @Qualifier("consumer.json") - private KafkaConsumer kafkaConsumer; - - @Value("${kafka.topics.ticks}") - private String topicTicks; - - @Autowired - private DseDao dseDao; - - @PostConstruct - public void init() { - LOGGER.info("Start consuming events from topic '{}' ..", topicTicks); - kafkaConsumer.subscribe(Collections.singletonList(topicTicks)); - } - - /** {@inheritDoc} */ - public void process(Exchange exchange) throws Exception { - StreamSupport.stream(kafkaConsumer.poll(100).spliterator(), false) - .map(this::mapAsStockData) - .filter(Optional::isPresent) - .map(Optional::get) - .forEach(dseDao::saveTicker); - } - - /** - * Skip invalid messages. - * - * @param msg - * @return - */ - public Optional mapAsStockData(ConsumerRecord msg) { - Optional result = Optional.empty(); - try { - StockTick tick = JACKSON_MAPPER.treeToValue(msg.value(), StockTick.class); - result = Optional.of(tick); - } catch (JsonProcessingException e) { - LOGGER.warn("Message " + msg.value().asText() + " cannot be processed"); - } - return result; + + /** Internal logger. */ + private static final Logger LOGGER = LoggerFactory.getLogger(StockTicksConsumer.class); + + /** Json Jackson parser. */ + private static final ObjectMapper JACKSON_MAPPER = new ObjectMapper(); + + @Autowired + @Qualifier("consumer.json") + private KafkaConsumer kafkaConsumer; + + @Value("${kafka.topics.ticks}") + private String topicTicks; + + @Autowired private DseDao dseDao; + + @PostConstruct + public void init() { + LOGGER.info("Start consuming events from topic '{}' ..", topicTicks); + kafkaConsumer.subscribe(Collections.singletonList(topicTicks)); + } + + /** {@inheritDoc} */ + public void process(Exchange exchange) throws Exception { + StreamSupport.stream(kafkaConsumer.poll(100).spliterator(), false) + .map(this::mapAsStockData) + .filter(Optional::isPresent) + .map(Optional::get) + .forEach(dseDao::saveTickerAsync); + } + + /** + * Skip invalid messages. + * + * @param msg the received message. + * @return the stock tick object, or empty if the object could not be created. + */ + public Optional mapAsStockData(ConsumerRecord msg) { + Optional result = Optional.empty(); + try { + StockTick tick = JACKSON_MAPPER.treeToValue(msg.value(), StockTick.class); + result = Optional.of(tick); + } catch (JsonProcessingException e) { + LOGGER.warn("Message " + msg.value().asText() + " cannot be processed"); } - + return result; + } } diff --git a/kafka-dse-producer/src/main/java/com/datastax/demo/route/StockTicksProducer.java b/kafka-dse-producer/src/main/java/com/datastax/demo/route/StockTicksProducer.java index fbbf5d2..6af126b 100644 --- a/kafka-dse-producer/src/main/java/com/datastax/demo/route/StockTicksProducer.java +++ b/kafka-dse-producer/src/main/java/com/datastax/demo/route/StockTicksProducer.java @@ -1,12 +1,17 @@ package com.datastax.demo.route; +import com.datastax.demo.dao.AlphaVantageDao; +import com.datastax.demo.dao.DseDao; +import com.datastax.demo.dao.KafkaDao; +import com.datastax.demo.domain.StockTick; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Instant; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; - import javax.annotation.PostConstruct; - import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.kafka.clients.producer.ProducerRecord; @@ -16,88 +21,75 @@ import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; -import com.datastax.demo.dao.AlphaVantageDao; -import com.datastax.demo.dao.DseDao; -import com.datastax.demo.dao.KafkaDao; -import com.datastax.demo.domain.StockTick; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * Read Quotes values in RT and push to KAFKA. - */ +/** Read Quotes values in RT and push to KAFKA. */ @Component("stockTicks.producer") public class StockTicksProducer implements Processor { - - /** Json Jackson parser. */ - protected static final ObjectMapper JACKSON_MAPPER = new ObjectMapper(); - - /** Internal logger. */ - protected final Logger LOGGER = LoggerFactory.getLogger(getClass().getName()); - - @Autowired - protected DseDao dseDao; - - @Autowired - protected AlphaVantageDao alphaVantageDao; - - @Autowired - protected KafkaDao kafkaDao; - - @Value("${alphavantage.waitTime: 100 }") - protected int apiWaitTime; - - @Value("${kafka.topics.ticks}") - private String topicTicks; - - /** Symbols in CSV FILE. */ - protected Set symbols; - - /** Askl to get prices. */ - protected Map < String, StockTick> initialStockPrices = null; - - /** - * Initialize connection to API AlphaVantage - */ - @PostConstruct - public void init() { - symbols = dseDao.getSymbolsNYSE(); - LOGGER.info("Symbols list retrieved from DSE. ({} items)", symbols.size()); - - initialStockPrices = alphaVantageDao.getCurrentStockTicks(symbols) - .collect(Collectors.toMap(StockTick::getSymbol, Function.identity())); - LOGGER.info("Stocks initial prices retrieved from alphaVantage REST API."); - } - - /** {@inheritDoc} */ - @Override - public void process(Exchange exchange) throws Exception { - LOGGER.info("Pushing '{}' stocks ticks to Kafka topic '{}'", initialStockPrices.size(), topicTicks); - initialStockPrices.values().stream() - // Map to Avro Message - .map(this::mapAsProducerRecord) - // Send to Kafka - .forEach(kafkaDao::sendJsonMessage); - } - - public ProducerRecord mapAsProducerRecord(StockTick sTick) { - sTick.setValue(createRandomValue(sTick.getValue())); - sTick.setValueDate(System.currentTimeMillis()); - JsonNode jsonValue = JACKSON_MAPPER.valueToTree(sTick); - return new ProducerRecord(topicTicks, sTick.getSymbol(), jsonValue); - } - - /** - * Randomly making the stock evolving with random - */ - private double createRandomValue(double lastValue) { - double up = Math.random() * 2; - double percentMove = (Math.random() * 1.0) / 100; - if (up < 1) { - lastValue -= percentMove*lastValue; - } else { - lastValue += percentMove*lastValue; - } - return lastValue; + + /** Json Jackson parser. */ + private static final ObjectMapper JACKSON_MAPPER = new ObjectMapper(); + + /** Internal logger. */ + private final Logger LOGGER = LoggerFactory.getLogger(getClass().getName()); + + @Autowired protected DseDao dseDao; + + @Autowired protected AlphaVantageDao alphaVantageDao; + + @Autowired protected KafkaDao kafkaDao; + + @Value("${alphavantage.waitTime: 100 }") + protected int apiWaitTime; + + @Value("${kafka.topics.ticks}") + private String topicTicks; + + /** Ask to get prices. */ + private Map initialStockPrices = null; + + /** Initialize connection to API AlphaVantage */ + @PostConstruct + public void init() { + // Symbols in CSV FILE. + Set symbols = dseDao.getSymbolsNYSE(); + LOGGER.info("Symbols list retrieved from DSE. ({} items)", symbols.size()); + + initialStockPrices = + alphaVantageDao + .getCurrentStockTicks(symbols) + .collect(Collectors.toMap(StockTick::getSymbol, Function.identity())); + LOGGER.info("Stocks initial prices retrieved from alphaVantage REST API."); + } + + /** {@inheritDoc} */ + @Override + public void process(Exchange exchange) throws Exception { + LOGGER.info( + "Pushing '{}' stocks ticks to Kafka topic '{}'", initialStockPrices.size(), topicTicks); + initialStockPrices + .values() + .stream() + // Map to Avro Message + .map(this::mapAsProducerRecord) + // Send to Kafka + .forEach(kafkaDao::sendJsonMessage); + } + + private ProducerRecord mapAsProducerRecord(StockTick sTick) { + sTick.setValue(createRandomValue(sTick.getValue())); + sTick.setValueDate(Instant.now()); + JsonNode jsonValue = JACKSON_MAPPER.valueToTree(sTick); + return new ProducerRecord<>(topicTicks, sTick.getSymbol(), jsonValue); + } + + /** Randomly making the stock evolving with random */ + private double createRandomValue(double lastValue) { + double up = Math.random() * 2; + double percentMove = (Math.random() * 1.0) / 100; + if (up < 1) { + lastValue -= percentMove * lastValue; + } else { + lastValue += percentMove * lastValue; } + return lastValue; + } } diff --git a/kafka-dse-producer/src/main/resources/logback.xml b/kafka-dse-producer/src/main/resources/logback.xml index ccf1c93..c1d9e35 100644 --- a/kafka-dse-producer/src/main/resources/logback.xml +++ b/kafka-dse-producer/src/main/resources/logback.xml @@ -1,20 +1,17 @@ + - - - - %d{HH:mm:ss.SSS} %magenta(%-5level) %cyan(%-45logger) : %msg%n - - - - - - - - - - - - - - - \ No newline at end of file + + + %d{HH:mm:ss.SSS} %magenta(%-5level) %cyan(%-45logger) : %msg%n + + + + + + + + + + + + diff --git a/kafka-dse-producer/src/test/java/com/datastax/demo/test/LoadAlphaVantageDataJob.java b/kafka-dse-producer/src/test/java/com/datastax/demo/test/LoadAlphaVantageDataJob.java index 3d42344..593a2d0 100644 --- a/kafka-dse-producer/src/test/java/com/datastax/demo/test/LoadAlphaVantageDataJob.java +++ b/kafka-dse-producer/src/test/java/com/datastax/demo/test/LoadAlphaVantageDataJob.java @@ -1,14 +1,12 @@ package com.datastax.demo.test; -import java.time.ZoneId; -import java.util.Date; - -import org.junit.Ignore; +import com.datastax.demo.conf.DseConfiguration; +import com.datastax.demo.dao.AlphaVantageDao; +import com.datastax.demo.dao.DseDao; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.platform.runner.JUnitPlatform; -import org.junit.runner.RunWith; import org.patriques.AlphaVantageConnector; import org.patriques.BatchStockQuotes; import org.patriques.TimeSeries; @@ -16,83 +14,59 @@ import org.patriques.input.timeseries.OutputSize; import org.patriques.output.quote.BatchStockQuotesResponse; import org.patriques.output.quote.data.StockQuote; -import org.patriques.output.timeseries.data.StockData; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.datastax.demo.conf.DseConfiguration; -import com.datastax.demo.dao.DseDao; -import com.datastax.demo.domain.Stock; - -@RunWith(JUnitPlatform.class) @ExtendWith(SpringExtension.class) -@TestPropertySource(locations="/config-test.properties") -@ContextConfiguration(classes= {DseConfiguration.class, DseDao.class}) -@Ignore -public class LoadAlphaVantageDataJob { - - @Value("${alphavantage.apiKey}") - private String apiKey; - - @Value("${alphavantage.timeout}") - private int apiTimeout; - - @Autowired - private DseDao dseDao; - - private TimeSeries stockTimeSeries; - - @Test - public void runStocks() { - AlphaVantageConnector apiConnector = new AlphaVantageConnector("2HWDTH7BA7FRBP76", apiTimeout); - BatchStockQuotes bsq = new BatchStockQuotes(apiConnector); - BatchStockQuotesResponse res = bsq.quote(dseDao.getSymbolsNYSE().toArray(new String[] {})); - for (StockQuote sq : res.getStockQuotes()) { - System.out.println(sq.getSymbol() + "-" + sq.getTimestamp() + "-" + sq.getPrice()); - } - } - - @Test - @DisplayName("Test CSV Parson") - public void load1MinData() throws Exception { - AlphaVantageConnector apiConnector = new AlphaVantageConnector("2HWDTH7BA7FRBP76", apiTimeout); - stockTimeSeries = new TimeSeries(apiConnector); - for(String symbol : dseDao.getSymbolsNYSE()) { - System.out.println("Grabbing ... " + symbol); - Thread.sleep(1000); - try { - stockTimeSeries.intraDay(symbol, Interval.ONE_MIN, OutputSize.FULL) - .getStockData().stream() - .map(item -> mapToTick(symbol, item)) - .forEach(dseDao::saveStock1Min); - - /*stockTimeSeries.intraDay(symbol, Interval.SIXTY_MIN, OutputSize.FULL) - .getStockData().stream() - .map(item -> mapToTick(symbol, item)) - .forEach(dseDao::saveStock1Hour);*/ - } catch(RuntimeException error) { - System.out.println("Error FOR " + symbol + " " + error.getMessage()); - } - - } - } - - private Stock mapToTick(String symbol, StockData item) { - Stock tick = new Stock(); - tick.setSymbol(symbol); - tick.setClose(item.getClose()); - tick.setOpen(item.getOpen()); - tick.setLow(item.getLow()); - tick.setHigh(item.getHigh()); - tick.setVolume(item.getVolume()); - tick.setValueDate(Date.from(item.getDateTime().atZone(ZoneId.systemDefault()).toInstant())); - return tick; +@TestPropertySource(locations = "/config-test.properties") +@ContextConfiguration(classes = {DseConfiguration.class, DseDao.class}) +@Disabled +class LoadAlphaVantageDataJob { + + @Value("${alphavantage.apiKey}") + private String apiKey; + + @Value("${alphavantage.timeout}") + private int apiTimeout; + + @Autowired private DseDao dseDao; + + @Test + void runStocks() { + AlphaVantageConnector apiConnector = new AlphaVantageConnector("2HWDTH7BA7FRBP76", apiTimeout); + BatchStockQuotes bsq = new BatchStockQuotes(apiConnector); + BatchStockQuotesResponse res = bsq.quote(dseDao.getSymbolsNYSE().toArray(new String[] {})); + for (StockQuote sq : res.getStockQuotes()) { + System.out.println(sq.getSymbol() + "-" + sq.getTimestamp() + "-" + sq.getPrice()); } - - - + } + + @Test + @DisplayName("Test CSV Parson") + void load1MinData() throws Exception { + AlphaVantageConnector apiConnector = new AlphaVantageConnector("2HWDTH7BA7FRBP76", apiTimeout); + TimeSeries stockTimeSeries = new TimeSeries(apiConnector); + for (String symbol : dseDao.getSymbolsNYSE()) { + System.out.println("Grabbing ... " + symbol); + Thread.sleep(1000); + try { + stockTimeSeries + .intraDay(symbol, Interval.ONE_MIN, OutputSize.FULL) + .getStockData() + .stream() + .map(item -> AlphaVantageDao.mapStockDataAsStock(symbol, item)) + .forEach(dseDao::saveStock1MinAsync); + /*stockTimeSeries.intraDay(symbol, Interval.SIXTY_MIN, OutputSize.FULL) + .getStockData().stream() + .map(item -> mapToTick(symbol, item)) + .forEach(dseDao::saveStock1HourAsync);*/ + } catch (RuntimeException error) { + System.out.println("Error FOR " + symbol + " " + error.getMessage()); + } + } + } } diff --git a/kafka-dse-producer/src/test/java/com/datastax/demo/test/TestSendMessage.java b/kafka-dse-producer/src/test/java/com/datastax/demo/test/TestSendMessage.java index 1ad4d8b..256dde3 100644 --- a/kafka-dse-producer/src/test/java/com/datastax/demo/test/TestSendMessage.java +++ b/kafka-dse-producer/src/test/java/com/datastax/demo/test/TestSendMessage.java @@ -4,20 +4,23 @@ import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; +import com.datastax.demo.conf.ProducerConfiguration; +import com.datastax.demo.dao.KafkaDao; +import com.datastax.demo.domain.StockTick; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import java.util.Collections; import java.util.Properties; import java.util.stream.StreamSupport; - import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.StringSerializer; -import org.junit.Ignore; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.platform.runner.JUnitPlatform; -import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -25,96 +28,85 @@ import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.datastax.demo.conf.ProducerConfiguration; -import com.datastax.demo.dao.KafkaDao; -import com.datastax.demo.domain.StockTick; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; - /** - * - * LOCAL: - * kafka-topics --zookeeper localhost:2181 --list - * kafka-console-consumer --topic stocks-ticks --bootstrap-server localhost:9092 - * kafka-topics --zookeeper localhost:2181 --delete --topic stocks-ticks - * - * O - * List available Topics : - * /opt/kafka/bin/kafka-topics.sh --zookeeper zookeeper:2181 --list - * - * Consumer topic stock-ticks : - * /opt/kafka/bin/kafka-console-consumer.sh —-topic testTopic --zookeeper zookeeper:2181 - * - * Create messages in Kafka : - * /opt/kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic testTopic - * - * + * LOCAL: kafka-topics --zookeeper localhost:2181 --list kafka-console-consumer --topic stocks-ticks + * --bootstrap-server localhost:9092 kafka-topics --zookeeper localhost:2181 --delete --topic + * stocks-ticks + * + *

O List available Topics : /opt/kafka/bin/kafka-topics.sh --zookeeper zookeeper:2181 --list + * + *

Consumer topic stock-ticks : /opt/kafka/bin/kafka-console-consumer.sh —-topic testTopic + * --zookeeper zookeeper:2181 + * + *

Create messages in Kafka : /opt/kafka/bin/kafka-console-producer.sh --broker-list + * localhost:9092 --topic testTopic + * * @author cedricklunven */ -@RunWith(JUnitPlatform.class) @ExtendWith(SpringExtension.class) -@TestPropertySource(locations="/config-test.properties") -@ContextConfiguration(classes= {KafkaDao.class, ProducerConfiguration.class}) -@Ignore +@TestPropertySource(locations = "/config-test.properties") +@ContextConfiguration(classes = {KafkaDao.class, ProducerConfiguration.class}) +@Disabled public class TestSendMessage { - /** Json Jackson parser. */ - protected static final ObjectMapper JACKSON_MAPPER = new ObjectMapper(); - - @Value("${kafka.topics.ticks}") - private String topicTicks; - - @Autowired - @Qualifier("producer.json") - protected KafkaProducer jsonProducer; - - @Autowired - @Qualifier("consumer.json") - private KafkaConsumer kafkaConsumer; - - @Test - public void sendMessage() throws InterruptedException { - StockTick sampleTick = new StockTick(); - sampleTick.setSymbol("MST"); - sampleTick.setValue(10.0); - sampleTick.setValueDate(System.currentTimeMillis()); - - Properties props = new Properties(); - props.put(BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); - props.put(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - props.put(VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - KafkaProducer p = new KafkaProducer<>(props); - p.send(new ProducerRecord("stocks-ticks", "VLO", "{\"symbol\":\"VLO\",\"valueDate\":1550244068123,\"value\":85.66046453803746}")); - p.close(); - - /* Send - JsonNode jsonValue = JACKSON_MAPPER.valueToTree(sampleTick); - for (int i=0;i<7;i++) { - jsonProducer.send(new ProducerRecord(topicTicks, - sampleTick.getSymbol(), jsonValue)); - System.out.println("Message sent to " + topicTicks); - } - jsonProducer.close();*/ - } - - @Test - public void receiveMessage() throws InterruptedException { - // Subscription - kafkaConsumer.subscribe(Collections.singletonList("stocks-ticks")); - System.out.println("Subscription Started to " + topicTicks); - StreamSupport.stream(kafkaConsumer.poll(100).spliterator(), false) - .map(this::mapAsStockData) - .forEach(tick -> System.out.println(tick.getSymbol())); - kafkaConsumer.close(); - } - - public StockTick mapAsStockData(ConsumerRecord msg) { - try { - return JACKSON_MAPPER.treeToValue(msg.value(), StockTick.class); - } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Cannot map nack as StockData"); - } + /** Json Jackson parser. */ + private static final ObjectMapper JACKSON_MAPPER = new ObjectMapper(); + + @Value("${kafka.topics.ticks}") + private String topicTicks; + + @Autowired + @Qualifier("producer.json") + protected KafkaProducer jsonProducer; + + @Autowired + @Qualifier("consumer.json") + private KafkaConsumer kafkaConsumer; + + @Test + void sendMessage() { + + Properties props = new Properties(); + props.put(BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + KafkaProducer p = new KafkaProducer<>(props); + p.send( + new ProducerRecord<>( + "stocks-ticks", + "VLO", + "{\"symbol\":\"VLO\",\"CqlIdentifier\":1550244068123,\"value\":85.66046453803746}")); + p.close(); + + /* + // Send + StockTick sampleTick = new StockTick("MST", Instant.now(), 10.0); + JsonNode jsonValue = JACKSON_MAPPER.valueToTree(sampleTick); + for (int i=0;i<7;i++) { + jsonProducer.send(new ProducerRecord(topicTicks, + sampleTick.getSymbol(), jsonValue)); + System.out.println("Message sent to " + topicTicks); + } + jsonProducer.close(); + */ + } + + @Test + void receiveMessage() { + // Subscription + kafkaConsumer.subscribe(Collections.singletonList("stocks-ticks")); + System.out.println("Subscription Started to " + topicTicks); + StreamSupport.stream(kafkaConsumer.poll(100).spliterator(), false) + .map(this::mapAsStockData) + .forEach(tick -> System.out.println(tick.getSymbol())); + kafkaConsumer.close(); + } + + StockTick mapAsStockData(ConsumerRecord msg) { + try { + return JACKSON_MAPPER.treeToValue(msg.value(), StockTick.class); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException("Cannot map nack as StockData"); } - + } } diff --git a/kafka-dse-producer/src/test/java/com/datastax/demo/test/TimeSeriesMachineLearning.java b/kafka-dse-producer/src/test/java/com/datastax/demo/test/TimeSeriesMachineLearning.java index a69cb4a..7651efc 100644 --- a/kafka-dse-producer/src/test/java/com/datastax/demo/test/TimeSeriesMachineLearning.java +++ b/kafka-dse-producer/src/test/java/com/datastax/demo/test/TimeSeriesMachineLearning.java @@ -2,42 +2,38 @@ import static com.github.signaflo.data.visualization.Plots.plot; -import java.io.IOException; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.platform.runner.JUnitPlatform; -import org.junit.runner.RunWith; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; - import com.datastax.demo.conf.DseConfiguration; import com.datastax.demo.dao.DseDao; import com.github.signaflo.timeseries.TestData; import com.github.signaflo.timeseries.TimeSeries; import com.github.signaflo.timeseries.model.arima.Arima; import com.github.signaflo.timeseries.model.arima.ArimaOrder; +import java.io.IOException; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit.jupiter.SpringExtension; -@RunWith(JUnitPlatform.class) @ExtendWith(SpringExtension.class) -@TestPropertySource(locations="/config-test.properties") -@ContextConfiguration(classes= {DseConfiguration.class, DseDao.class}) -public class TimeSeriesMachineLearning { +@TestPropertySource(locations = "/config-test.properties") +@ContextConfiguration(classes = {DseConfiguration.class, DseDao.class}) +class TimeSeriesMachineLearning { - @Test - public void testForecastTimeSeries() throws IOException { - // Create a timeSeries from Data in DSE - // OffsetDateTime startingDate = OffsetDateTime.of(LocalDateTime.of(2018, 11, 19, 0, 0), ZoneOffset.ofHours(0)); - // double[] myDoubles = {2, 2, 3}; - // TimeSeries timeSeries = TimeSeries.from(TimePeriod.oneHour(), startingDate, myDoubles); - TimeSeries timeSeries = TestData.livestock; - ArimaOrder modelOrder = ArimaOrder.order(0, 1, 1, 0, 1, 1); - Arima model = Arima.model(timeSeries, modelOrder); - System.out.println(model.aic()); // Get and display the model AIC - System.out.println(model.coefficients()); // Get and display the estimated coefficients - System.out.println(java.util.Arrays.toString(model.stdErrors())); - plot(model.predictionErrors()); - System.in.read(); - } + @Test + void testForecastTimeSeries() throws IOException { + // Create a timeSeries from Data in DSE + // OffsetDateTime startingDate = OffsetDateTime.of(LocalDateTime.of(2018, 11, 19, 0, 0), + // ZoneOffset.ofHours(0)); + // double[] myDoubles = {2, 2, 3}; + // TimeSeries timeSeries = TimeSeries.from(TimePeriod.oneHour(), startingDate, myDoubles); + TimeSeries timeSeries = TestData.livestock; + ArimaOrder modelOrder = ArimaOrder.order(0, 1, 1, 0, 1, 1); + Arima model = Arima.model(timeSeries, modelOrder); + System.out.println(model.aic()); // Get and display the model AIC + System.out.println(model.coefficients()); // Get and display the estimated coefficients + System.out.println(java.util.Arrays.toString(model.stdErrors())); + plot(model.predictionErrors()); + System.in.read(); + } } diff --git a/kafka-dse-webui/nohup.out b/kafka-dse-webui/nohup.out deleted file mode 100644 index ad7f1b8..0000000 --- a/kafka-dse-webui/nohup.out +++ /dev/null @@ -1,503 +0,0 @@ -[INFO] Scanning for projects... -[INFO] -[INFO] --------------------< com.datastax:kafka-dse-webui >-------------------- -[INFO] Building + kafka-dse-webui 6.7-SNAPSHOT -[INFO] --------------------------------[ jar ]--------------------------------- -[INFO] -[INFO] >>> spring-boot-maven-plugin:2.0.3.RELEASE:run (default-cli) > test-compile @ kafka-dse-webui >>> -[INFO] -[INFO] --- maven-resources-plugin:2.6:resources (default-resources) @ kafka-dse-webui --- -[WARNING] Using platform encoding (UTF-8 actually) to copy filtered resources, i.e. build is platform dependent! -[INFO] Copying 37 resources -[INFO] -[INFO] --- maven-compiler-plugin:3.7.0:compile (default-compile) @ kafka-dse-webui --- -[INFO] Nothing to compile - all classes are up to date -[INFO] -[INFO] --- maven-resources-plugin:2.6:testResources (default-testResources) @ kafka-dse-webui --- -[WARNING] Using platform encoding (UTF-8 actually) to copy filtered resources, i.e. build is platform dependent! -[INFO] Copying 2 resources -[INFO] -[INFO] --- maven-compiler-plugin:3.7.0:testCompile (default-testCompile) @ kafka-dse-webui --- -[INFO] Changes detected - recompiling the module! -[WARNING] File encoding has not been set, using platform encoding UTF-8, i.e. build is platform dependent! -[INFO] Compiling 1 source file to /Users/cedricklunven/dev/WORKSPACES/kafka-dse/kafka-dse-webui/target/test-classes -[INFO] -[INFO] <<< spring-boot-maven-plugin:2.0.3.RELEASE:run (default-cli) < test-compile @ kafka-dse-webui <<< -[INFO] -[INFO] -[INFO] --- spring-boot-maven-plugin:2.0.3.RELEASE:run (default-cli) @ kafka-dse-webui --- -[INFO] Attaching agents: [] - __ __ _ _____ -/ / /\ \ \___| |__ /\ /\ \_ \ -\ \/ \/ / _ \ '_ \/ / \ \ / /\/ - \ /\ / __/ |_) \ \_/ /\/ /_ - \/ \/ \___|_.__/ \___/\____/ - - Read Tick in DSE in realtime as Reactive - -17:37:12.188 INFO  com.datastax.demo.WebUiApplication  : Starting WebUiApplication on clunhost with PID 94085 (/Users/cedricklunven/dev/WORKSPACES/kafka-dse/kafka-dse-webui/target/classes started by cedricklunven in /Users/cedricklunven/dev/WORKSPACES/kafka-dse/kafka-dse-webui) -17:37:12.191 DEBUG com.datastax.demo.WebUiApplication  : Running with Spring Boot v2.0.3.RELEASE, Spring v5.0.8.RELEASE -17:37:12.192 INFO  com.datastax.demo.WebUiApplication  : No active profile set, falling back to default profiles: default -17:37:15.084 INFO  com.datastax.demo.conf.DseConfiguration  : Initializing connection to DSE Cluster -17:37:15.089 INFO  com.datastax.demo.conf.DseConfiguration  : + Contact Points : [127.0.0.1] -17:37:15.092 INFO  com.datastax.demo.conf.DseConfiguration  : + Listening Port : 9042 -17:37:15.565 INFO  com.datastax.demo.conf.DseConfiguration  : + Creating keyspace 'demo_kafka' (if needed) -17:37:15.652 INFO  com.datastax.demo.conf.DseConfiguration  : + Connection established to DSE Cluster \_0_/ in 568 millis. -17:37:17.643 INFO  com.datastax.demo.WebUiApplication  : Started WebUiApplication in 5.832 seconds (JVM running for 6.405) -17:40:09.932 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 1000 milliseconds -17:40:09.932 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 1000 milliseconds -17:40:10.938 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 2000 milliseconds -17:40:10.938 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 2000 milliseconds -17:40:12.214 ERROR org.springframework.boot.autoconfigure.web.reactive.error.DefaultErrorWebExceptionHandler : Failed to handle request [GET http://192.168.1.82:8082/tickers/stream/symbol/AIG] -org.springframework.data.cassandra.CassandraConnectionFailureException: Query; CQL [SELECT * FROM stocks_ticks WHERE symbol = ? LIMIT 100]; All host(s) tried for query failed (no host was tried); nested exception is com.datastax.driver.core.exceptions.NoHostAvailableException: All host(s) tried for query failed (no host was tried) - at org.springframework.data.cassandra.core.cql.CassandraExceptionTranslator.translate(CassandraExceptionTranslator.java:155) - at org.springframework.data.cassandra.core.cql.ReactiveCassandraAccessor.translate(ReactiveCassandraAccessor.java:149) - at org.springframework.data.cassandra.core.cql.ReactiveCqlTemplate.lambda$translateException$18(ReactiveCqlTemplate.java:754) - at reactor.core.publisher.Flux.lambda$onErrorMap$24(Flux.java:5424) - at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onError(FluxOnErrorResume.java:88) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onError(FluxOnAssembly.java:455) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onError(FluxOnAssembly.java:455) - at reactor.core.publisher.MonoFlatMapMany$FlatMapManyMain.onError(MonoFlatMapMany.java:193) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onError(FluxOnAssembly.java:455) - at reactor.core.publisher.MonoCreate$DefaultMonoSink.error(MonoCreate.java:166) - at org.springframework.data.cassandra.core.cql.session.DefaultBridgedReactiveSession.lambda$adaptFuture$2(DefaultBridgedReactiveSession.java:238) - at com.google.common.util.concurrent.AbstractFuture.executeListener(AbstractFuture.java:817) - at com.google.common.util.concurrent.AbstractFuture.addListener(AbstractFuture.java:595) - at com.google.common.util.concurrent.AbstractFuture$TrustedFuture.addListener(AbstractFuture.java:96) - at org.springframework.data.cassandra.core.cql.session.DefaultBridgedReactiveSession.adaptFuture(DefaultBridgedReactiveSession.java:231) - at org.springframework.data.cassandra.core.cql.session.DefaultBridgedReactiveSession.lambda$execute$0(DefaultBridgedReactiveSession.java:171) - at reactor.core.publisher.MonoCreate.subscribe(MonoCreate.java:54) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoFlatMapMany.subscribe(MonoFlatMapMany.java:49) - at reactor.core.publisher.FluxOnAssembly.subscribe(FluxOnAssembly.java:252) - at reactor.core.publisher.FluxDefer.subscribe(FluxDefer.java:55) - at reactor.core.publisher.FluxOnAssembly.subscribe(FluxOnAssembly.java:252) - at reactor.core.publisher.FluxOnErrorResume.subscribe(FluxOnErrorResume.java:47) - at reactor.core.publisher.FluxOnAssembly.subscribe(FluxOnAssembly.java:252) - at reactor.core.publisher.FluxFlatMap.subscribe(FluxFlatMap.java:97) - at reactor.core.publisher.FluxOnAssembly.subscribe(FluxOnAssembly.java:252) - at reactor.core.publisher.FluxMapFuseable.subscribe(FluxMapFuseable.java:63) - at reactor.core.publisher.FluxOnAssembly.subscribe(FluxOnAssembly.java:252) - at org.springframework.http.server.reactive.ChannelSendOperator.subscribe(ChannelSendOperator.java:74) - at reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:150) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - at reactor.core.publisher.MonoFlatMap$FlatMapInner.onNext(MonoFlatMap.java:241) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:73) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:198) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:198) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - at reactor.core.publisher.MonoIgnoreThen$ThenAcceptInner.onNext(MonoIgnoreThen.java:295) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:144) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - at reactor.core.publisher.MonoZip$ZipCoordinator.signal(MonoZip.java:247) - at reactor.core.publisher.MonoZip$ZipInner.onNext(MonoZip.java:329) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:198) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onNext(FluxDefaultIfEmpty.java:92) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:67) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:108) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:1640) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.request(FluxOnAssembly.java:532) - at reactor.core.publisher.FluxMap$MapSubscriber.request(FluxMap.java:149) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.request(FluxOnAssembly.java:532) - at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:1454) - at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:1328) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onSubscribe(FluxOnAssembly.java:516) - at reactor.core.publisher.FluxMap$MapSubscriber.onSubscribe(FluxMap.java:86) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onSubscribe(FluxOnAssembly.java:516) - at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:54) - at reactor.core.publisher.MonoCallableOnAssembly.subscribe(MonoCallableOnAssembly.java:82) - at reactor.core.publisher.MonoMap.subscribe(MonoMap.java:55) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoSwitchIfEmpty.subscribe(MonoSwitchIfEmpty.java:44) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoDefaultIfEmpty.subscribe(MonoDefaultIfEmpty.java:37) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.Mono.subscribe(Mono.java:3080) - at reactor.core.publisher.MonoZip.subscribe(MonoZip.java:128) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.drain(MonoIgnoreThen.java:153) - at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:56) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:150) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:67) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:76) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.innerNext(FluxConcatMap.java:271) - at reactor.core.publisher.FluxConcatMap$ConcatMapInner.onNext(FluxConcatMap.java:803) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:108) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:1640) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.request(FluxOnAssembly.java:532) - at reactor.core.publisher.FluxMap$MapSubscriber.request(FluxMap.java:149) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.request(FluxOnAssembly.java:532) - at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:1454) - at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:1328) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onSubscribe(FluxOnAssembly.java:516) - at reactor.core.publisher.FluxMap$MapSubscriber.onSubscribe(FluxMap.java:86) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onSubscribe(FluxOnAssembly.java:516) - at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:54) - at reactor.core.publisher.MonoCallableOnAssembly.subscribe(MonoCallableOnAssembly.java:82) - at reactor.core.publisher.MonoMap.subscribe(MonoMap.java:55) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.Mono.subscribe(Mono.java:3080) - at reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.drain(FluxConcatMap.java:418) - at reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.onSubscribe(FluxConcatMap.java:210) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onSubscribe(FluxOnAssembly.java:516) - at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:140) - at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:64) - at reactor.core.publisher.FluxOnAssembly.subscribe(FluxOnAssembly.java:252) - at reactor.core.publisher.FluxConcatMap.subscribe(FluxConcatMap.java:121) - at reactor.core.publisher.FluxOnAssembly.subscribe(FluxOnAssembly.java:252) - at reactor.core.publisher.MonoNext.subscribe(MonoNext.java:40) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoSwitchIfEmpty.subscribe(MonoSwitchIfEmpty.java:44) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:71) - at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:71) - at reactor.core.publisher.MonoPeekTerminal.subscribe(MonoPeekTerminal.java:61) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:150) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - at reactor.core.publisher.MonoZip$ZipCoordinator.signal(MonoZip.java:247) - at reactor.core.publisher.MonoZip$ZipInner.onNext(MonoZip.java:329) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onNext(FluxOnAssembly.java:450) - at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:1640) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.request(FluxOnAssembly.java:532) - at reactor.core.publisher.MonoZip$ZipInner.onSubscribe(MonoZip.java:318) - at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onSubscribe(FluxOnAssembly.java:516) - at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:54) - at reactor.core.publisher.MonoCallableOnAssembly.subscribe(MonoCallableOnAssembly.java:82) - at reactor.core.publisher.Mono.subscribe(Mono.java:3080) - at reactor.core.publisher.MonoZip.subscribe(MonoZip.java:128) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:71) - at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:71) - at reactor.core.publisher.MonoPeekTerminal.subscribe(MonoPeekTerminal.java:61) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.core.publisher.Mono.subscribe(Mono.java:3080) - at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.drain(MonoIgnoreThen.java:172) - at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:56) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:71) - at reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:70) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:71) - at reactor.core.publisher.MonoPeekTerminal.subscribe(MonoPeekTerminal.java:61) - at reactor.core.publisher.MonoOnAssembly.subscribe(MonoOnAssembly.java:76) - at reactor.ipc.netty.channel.ChannelOperations.applyHandler(ChannelOperations.java:380) - at reactor.ipc.netty.http.server.HttpServerOperations.onHandlerStart(HttpServerOperations.java:398) - at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163) - at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:404) - at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:465) - at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:884) - at java.lang.Thread.run(Thread.java:748) - Suppressed: reactor.core.publisher.FluxOnAssembly$OnAssemblyException: -Assembly trace from producer [reactor.core.publisher.MonoError] : - reactor.core.publisher.Mono.error(Mono.java:252) - reactor.core.publisher.Flux.lambda$onErrorMap$24(Flux.java:5424) - reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onError(FluxOnErrorResume.java:88) - reactor.core.publisher.MonoFlatMapMany$FlatMapManyMain.onError(MonoFlatMapMany.java:193) - reactor.core.publisher.MonoCreate$DefaultMonoSink.error(MonoCreate.java:166) - org.springframework.data.cassandra.core.cql.session.DefaultBridgedReactiveSession.lambda$adaptFuture$2(DefaultBridgedReactiveSession.java:238) - com.google.common.util.concurrent.AbstractFuture.executeListener(AbstractFuture.java:817) - com.google.common.util.concurrent.AbstractFuture.addListener(AbstractFuture.java:595) - com.google.common.util.concurrent.AbstractFuture$TrustedFuture.addListener(AbstractFuture.java:96) - org.springframework.data.cassandra.core.cql.session.DefaultBridgedReactiveSession.adaptFuture(DefaultBridgedReactiveSession.java:231) - org.springframework.data.cassandra.core.cql.session.DefaultBridgedReactiveSession.lambda$execute$0(DefaultBridgedReactiveSession.java:171) - reactor.core.publisher.MonoCreate.subscribe(MonoCreate.java:54) - reactor.core.publisher.MonoFlatMapMany.subscribe(MonoFlatMapMany.java:49) - reactor.core.publisher.FluxDefer.subscribe(FluxDefer.java:55) - reactor.core.publisher.FluxOnErrorResume.subscribe(FluxOnErrorResume.java:47) - reactor.core.publisher.FluxFlatMap.subscribe(FluxFlatMap.java:97) - reactor.core.publisher.FluxMapFuseable.subscribe(FluxMapFuseable.java:63) - org.springframework.http.server.reactive.ChannelSendOperator.subscribe(ChannelSendOperator.java:74) - reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:150) - reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - reactor.core.publisher.MonoFlatMap$FlatMapInner.onNext(MonoFlatMap.java:241) - reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:73) - reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:198) - reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:198) - reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - reactor.core.publisher.MonoIgnoreThen$ThenAcceptInner.onNext(MonoIgnoreThen.java:295) - reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:144) - reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - reactor.core.publisher.MonoZip$ZipCoordinator.signal(MonoZip.java:247) - reactor.core.publisher.MonoZip$ZipInner.onNext(MonoZip.java:329) - reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:198) - reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onNext(FluxDefaultIfEmpty.java:92) - reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:67) - reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:108) - reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:1640) - reactor.core.publisher.FluxMap$MapSubscriber.request(FluxMap.java:149) - reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:1454) - reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:1328) - reactor.core.publisher.FluxMap$MapSubscriber.onSubscribe(FluxMap.java:86) - reactor.core.publisher.MonoJust.subscribe(MonoJust.java:54) - reactor.core.publisher.MonoMap.subscribe(MonoMap.java:55) - reactor.core.publisher.MonoSwitchIfEmpty.subscribe(MonoSwitchIfEmpty.java:44) - reactor.core.publisher.MonoDefaultIfEmpty.subscribe(MonoDefaultIfEmpty.java:37) - reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - reactor.core.publisher.Mono.subscribe(Mono.java:3080) - reactor.core.publisher.MonoZip.subscribe(MonoZip.java:128) - reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.drain(MonoIgnoreThen.java:153) - reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:56) - reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:150) - reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:67) - reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:76) - reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.innerNext(FluxConcatMap.java:271) - reactor.core.publisher.FluxConcatMap$ConcatMapInner.onNext(FluxConcatMap.java:803) - reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:108) - reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:1640) - reactor.core.publisher.FluxMap$MapSubscriber.request(FluxMap.java:149) - reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:1454) - reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:1328) - reactor.core.publisher.FluxMap$MapSubscriber.onSubscribe(FluxMap.java:86) - reactor.core.publisher.MonoJust.subscribe(MonoJust.java:54) - reactor.core.publisher.MonoMap.subscribe(MonoMap.java:55) - reactor.core.publisher.Mono.subscribe(Mono.java:3080) - reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.drain(FluxConcatMap.java:418) - reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.onSubscribe(FluxConcatMap.java:210) - reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:140) - reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:64) - reactor.core.publisher.FluxConcatMap.subscribe(FluxConcatMap.java:121) - reactor.core.publisher.MonoNext.subscribe(MonoNext.java:40) - reactor.core.publisher.MonoSwitchIfEmpty.subscribe(MonoSwitchIfEmpty.java:44) - reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - reactor.core.publisher.MonoPeekTerminal.subscribe(MonoPeekTerminal.java:61) - reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:150) - reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - reactor.core.publisher.MonoZip$ZipCoordinator.signal(MonoZip.java:247) - reactor.core.publisher.MonoZip$ZipInner.onNext(MonoZip.java:329) - reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:1640) - reactor.core.publisher.MonoZip$ZipInner.onSubscribe(MonoZip.java:318) - reactor.core.publisher.MonoJust.subscribe(MonoJust.java:54) - reactor.core.publisher.Mono.subscribe(Mono.java:3080) - reactor.core.publisher.MonoZip.subscribe(MonoZip.java:128) - reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - reactor.core.publisher.MonoPeekTerminal.subscribe(MonoPeekTerminal.java:61) - reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - reactor.core.publisher.Mono.subscribe(Mono.java:3080) - reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.drain(MonoIgnoreThen.java:172) - reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:56) - reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:70) - reactor.core.publisher.MonoPeekTerminal.subscribe(MonoPeekTerminal.java:61) - reactor.ipc.netty.channel.ChannelOperations.applyHandler(ChannelOperations.java:380) - reactor.ipc.netty.http.server.HttpServerOperations.onHandlerStart(HttpServerOperations.java:398) - io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163) - io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:404) - io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:465) - io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:884) -Error has been observed by the following operator(s): - |_ MonoCreate$DefaultMonoSink.error(DefaultBridgedReactiveSession.java:238) - |_ Flux.onErrorMap(ReactiveCqlTemplate.java:368) - |_ Flux.flatMap(TickerController.java:47) - |_ Flux.map(ServerSentEventHttpMessageWriter.java:117) - |_ Mono.flatMap(DispatcherHandler.java:177) - |_ Mono.onErrorResume(DispatcherHandler.java:177) - |_ Mono.flatMap(DispatcherHandler.java:161) - |_ Mono.defer(DefaultWebFilterChain.java:72) - |_ Mono.doAfterSuccessOrError(HttpTraceWebFilter.java:99) - |_ Mono.flatMap(HttpTraceWebFilter.java:82) - |_ Mono.defer(DefaultWebFilterChain.java:72) - |_ Mono.doOnSuccess(MetricsWebFilter.java:64) - |_ Mono.doOnError(MetricsWebFilter.java:64) - |_ Mono.compose(MetricsWebFilter.java:58) - |_ Mono.defer(DefaultWebFilterChain.java:72) - -Caused by: com.datastax.driver.core.exceptions.NoHostAvailableException: All host(s) tried for query failed (no host was tried) - at com.datastax.driver.core.RequestHandler.reportNoMoreHosts(RequestHandler.java:223) - at com.datastax.driver.core.RequestHandler.access$1200(RequestHandler.java:41) - at com.datastax.driver.core.RequestHandler$SpeculativeExecution.findNextHostAndQuery(RequestHandler.java:309) - at com.datastax.driver.core.RequestHandler.startNewExecution(RequestHandler.java:111) - at com.datastax.driver.core.RequestHandler.sendRequest(RequestHandler.java:91) - at com.datastax.driver.core.SessionManager.executeAsync(SessionManager.java:124) - at com.datastax.driver.dse.DefaultDseSession.executeAsync(DefaultDseSession.java:223) - at org.springframework.data.cassandra.core.cql.session.DefaultBridgedReactiveSession.lambda$execute$0(DefaultBridgedReactiveSession.java:166) - ... 173 common frames omitted - Suppressed: reactor.core.publisher.FluxOnAssembly$OnAssemblyException: -Assembly trace from producer [reactor.core.publisher.MonoCreate] : - reactor.core.publisher.Mono.create(Mono.java:172) - org.springframework.data.cassandra.core.cql.session.DefaultBridgedReactiveSession.execute(DefaultBridgedReactiveSession.java:159) - org.springframework.data.cassandra.core.cql.ReactiveCqlTemplate.lambda$query$3(ReactiveCqlTemplate.java:367) - org.springframework.data.cassandra.core.cql.ReactiveCqlTemplate.lambda$createFlux$15(ReactiveCqlTemplate.java:710) - reactor.core.publisher.FluxDefer.subscribe(FluxDefer.java:47) - reactor.core.publisher.FluxOnErrorResume.subscribe(FluxOnErrorResume.java:47) - reactor.core.publisher.FluxFlatMap.subscribe(FluxFlatMap.java:97) - reactor.core.publisher.FluxMapFuseable.subscribe(FluxMapFuseable.java:63) - org.springframework.http.server.reactive.ChannelSendOperator.subscribe(ChannelSendOperator.java:74) - reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:150) - reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - reactor.core.publisher.MonoFlatMap$FlatMapInner.onNext(MonoFlatMap.java:241) - reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:73) - reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:198) - reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:198) - reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - reactor.core.publisher.MonoIgnoreThen$ThenAcceptInner.onNext(MonoIgnoreThen.java:295) - reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:144) - reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - reactor.core.publisher.MonoZip$ZipCoordinator.signal(MonoZip.java:247) - reactor.core.publisher.MonoZip$ZipInner.onNext(MonoZip.java:329) - reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:198) - reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onNext(FluxDefaultIfEmpty.java:92) - reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:67) - reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:108) - reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:1640) - reactor.core.publisher.FluxMap$MapSubscriber.request(FluxMap.java:149) - reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:1454) - reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:1328) - reactor.core.publisher.FluxMap$MapSubscriber.onSubscribe(FluxMap.java:86) - reactor.core.publisher.MonoJust.subscribe(MonoJust.java:54) - reactor.core.publisher.MonoMap.subscribe(MonoMap.java:55) - reactor.core.publisher.MonoSwitchIfEmpty.subscribe(MonoSwitchIfEmpty.java:44) - reactor.core.publisher.MonoDefaultIfEmpty.subscribe(MonoDefaultIfEmpty.java:37) - reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - reactor.core.publisher.Mono.subscribe(Mono.java:3080) - reactor.core.publisher.MonoZip.subscribe(MonoZip.java:128) - reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.drain(MonoIgnoreThen.java:153) - reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:56) - reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:150) - reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:67) - reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:76) - reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.innerNext(FluxConcatMap.java:271) - reactor.core.publisher.FluxConcatMap$ConcatMapInner.onNext(FluxConcatMap.java:803) - reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:108) - reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:1640) - reactor.core.publisher.FluxMap$MapSubscriber.request(FluxMap.java:149) - reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:1454) - reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:1328) - reactor.core.publisher.FluxMap$MapSubscriber.onSubscribe(FluxMap.java:86) - reactor.core.publisher.MonoJust.subscribe(MonoJust.java:54) - reactor.core.publisher.MonoMap.subscribe(MonoMap.java:55) - reactor.core.publisher.Mono.subscribe(Mono.java:3080) - reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.drain(FluxConcatMap.java:418) - reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.onSubscribe(FluxConcatMap.java:210) - reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:140) - reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:64) - reactor.core.publisher.FluxConcatMap.subscribe(FluxConcatMap.java:121) - reactor.core.publisher.MonoNext.subscribe(MonoNext.java:40) - reactor.core.publisher.MonoSwitchIfEmpty.subscribe(MonoSwitchIfEmpty.java:44) - reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - reactor.core.publisher.MonoPeekTerminal.subscribe(MonoPeekTerminal.java:61) - reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:150) - reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1083) - reactor.core.publisher.MonoZip$ZipCoordinator.signal(MonoZip.java:247) - reactor.core.publisher.MonoZip$ZipInner.onNext(MonoZip.java:329) - reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:1640) - reactor.core.publisher.MonoZip$ZipInner.onSubscribe(MonoZip.java:318) - reactor.core.publisher.MonoJust.subscribe(MonoJust.java:54) - reactor.core.publisher.Mono.subscribe(Mono.java:3080) - reactor.core.publisher.MonoZip.subscribe(MonoZip.java:128) - reactor.core.publisher.MonoFlatMap.subscribe(MonoFlatMap.java:60) - reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - reactor.core.publisher.MonoPeekTerminal.subscribe(MonoPeekTerminal.java:61) - reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:74) - reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53) - reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - reactor.core.publisher.MonoOnErrorResume.subscribe(MonoOnErrorResume.java:44) - reactor.core.publisher.Mono.subscribe(Mono.java:3080) - reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.drain(MonoIgnoreThen.java:172) - reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:56) - reactor.core.publisher.MonoPeekFuseable.subscribe(MonoPeekFuseable.java:70) - reactor.core.publisher.MonoPeekTerminal.subscribe(MonoPeekTerminal.java:61) - reactor.ipc.netty.channel.ChannelOperations.applyHandler(ChannelOperations.java:380) - reactor.ipc.netty.http.server.HttpServerOperations.onHandlerStart(HttpServerOperations.java:398) - io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163) - io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:404) - io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:465) - io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:884) -Error has been observed by the following operator(s): - |_ Mono.create(DefaultBridgedReactiveSession.java:159) - |_ Mono.flatMapMany(ReactiveCqlTemplate.java:367) - |_ Flux.defer(ReactiveCqlTemplate.java:710) - -17:40:12.940 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 4000 milliseconds -17:40:12.940 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 4000 milliseconds -17:40:16.943 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 8000 milliseconds -17:40:16.944 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 8000 milliseconds -17:40:24.949 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 16000 milliseconds -17:40:24.949 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 16000 milliseconds -17:40:40.950 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 32000 milliseconds -17:40:40.950 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 32000 milliseconds -17:41:12.953 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 64000 milliseconds -17:41:12.953 ERROR com.datastax.driver.core.ControlConnection  : [Control connection] Cannot connect to any host, scheduling retry in 64000 milliseconds -Exception in thread "File Watcher" java.lang.IllegalArgumentException: Folder must not be a file - at org.springframework.util.Assert.isTrue(Assert.java:116) - at org.springframework.boot.devtools.filewatch.FolderSnapshot.(FolderSnapshot.java:55) - at org.springframework.boot.devtools.filewatch.FileSystemWatcher$Watcher.getCurrentSnapshots(FileSystemWatcher.java:285) - at org.springframework.boot.devtools.filewatch.FileSystemWatcher$Watcher.scan(FileSystemWatcher.java:258) - at org.springframework.boot.devtools.filewatch.FileSystemWatcher$Watcher.run(FileSystemWatcher.java:243) - at java.lang.Thread.run(Thread.java:748) diff --git a/kafka-dse-webui/pom.xml b/kafka-dse-webui/pom.xml index e079993..87a7b2d 100644 --- a/kafka-dse-webui/pom.xml +++ b/kafka-dse-webui/pom.xml @@ -1,130 +1,115 @@ - - - 4.0.0 - kafka-dse-webui - + kafka-dse-webui - DAO, BEAN - - - com.datastax - kafka-dse-example - 6.7-SNAPSHOT - - - - 4.0.0-beta1 - - - - - - - com.datastax - kafka-dse-core - ${project.version} - - - - - com.datastax.oss - java-driver-core - ${dse-java-driver4.version} - - - com.datastax.oss - java-driver-query-builder - ${dse-java-driver4.version} - - - - - org.springframework.boot - spring-boot-starter-data-cassandra-reactive - - - org.springframework.boot - spring-boot-starter-webflux - - - org.springframework.boot - spring-boot-starter-thymeleaf - - - de.codecentric - spring-boot-admin-starter-client - 2.0.2 - - - org.springframework.boot - spring-boot-starter-actuator - - - - org.springframework.boot - spring-boot-devtools - true - - - - - org.webjars - bootstrap - 3.3.7 - runtime - - - org.webjars - highcharts - 5.0.8 - runtime - - - - - org.junit.platform - junit-platform-launcher - - - org.junit.platform - junit-platform-runner - - - org.junit.platform - junit-platform-console-standalone - - - org.junit.jupiter - junit-jupiter-engine - - - org.junit.jupiter - junit-jupiter-params - - - org.springframework.boot - spring-boot-starter-test - test - - - - - - - org.springframework.boot - spring-boot-maven-plugin - ${spring-boot.version} - - - - repackage - - - - - - - + + 4.0.0 + kafka-dse-webui + + kafka-dse-webui + DAO, BEAN + + com.datastax + kafka-dse-example + 6.7-SNAPSHOT + + + + + com.datastax + kafka-dse-core + ${project.version} + + + + com.datastax.dse + dse-java-driver-core + + + com.datastax.dse + dse-java-driver-reactor + + + com.datastax.oss + java-driver-core + + + com.datastax.oss + java-driver-query-builder + + + + org.springframework.boot + spring-boot-starter-webflux + + + org.springframework.boot + spring-boot-starter-thymeleaf + + + de.codecentric + spring-boot-admin-starter-client + 2.0.2 + + + org.springframework.boot + spring-boot-starter-actuator + + + org.springframework.boot + spring-boot-devtools + true + + + + org.webjars + bootstrap + 3.3.7 + runtime + + + org.webjars + highcharts + 5.0.8 + runtime + + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.jupiter + junit-jupiter-params + test + + + org.springframework.boot + spring-boot-starter-test + test + + + junit + junit + + + + + + + + org.springframework.boot + spring-boot-maven-plugin + ${spring-boot.version} + + + + repackage + + + + + + diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/WebUiApplication.java b/kafka-dse-webui/src/main/java/com/datastax/demo/WebUiApplication.java index b9ab566..0e700e2 100644 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/WebUiApplication.java +++ b/kafka-dse-webui/src/main/java/com/datastax/demo/WebUiApplication.java @@ -6,7 +6,7 @@ @SpringBootApplication public class WebUiApplication { - public static void main(String[] args) { - SpringApplication.run(WebUiApplication.class, args); - } + public static void main(String[] args) { + SpringApplication.run(WebUiApplication.class, args); + } } diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/conf/WebUiConfiguration.java b/kafka-dse-webui/src/main/java/com/datastax/demo/conf/WebUiConfiguration.java index 046821e..c88f15a 100644 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/conf/WebUiConfiguration.java +++ b/kafka-dse-webui/src/main/java/com/datastax/demo/conf/WebUiConfiguration.java @@ -1,37 +1,33 @@ package com.datastax.demo.conf; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.databind.ObjectMapper; import javax.annotation.PostConstruct; - import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.thymeleaf.spring5.SpringTemplateEngine; import org.thymeleaf.templateresolver.ClassLoaderTemplateResolver; -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.databind.ObjectMapper; - @Configuration public class WebUiConfiguration { - @Autowired - private SpringTemplateEngine templateEngine; + @Autowired private SpringTemplateEngine templateEngine; + + @PostConstruct + public void extension() { + ClassLoaderTemplateResolver templateResolver = new ClassLoaderTemplateResolver(); + templateResolver.setTemplateMode("XHTML"); + templateResolver.setPrefix("views/view-"); + templateResolver.setSuffix(".html"); + templateResolver.setCacheTTLMs(3600000L); + templateEngine.addTemplateResolver(templateResolver); + } - @PostConstruct - public void extension() { - ClassLoaderTemplateResolver templateResolver = new ClassLoaderTemplateResolver(); - templateResolver.setTemplateMode("XHTML"); - templateResolver.setPrefix("views/view-"); - templateResolver.setSuffix(".html"); - templateResolver.setCacheTTLMs(3600000L); - templateEngine.addTemplateResolver(templateResolver); - } - - @Bean - public ObjectMapper customizeJacksonConfiguration() { - ObjectMapper mapper = new ObjectMapper(); - mapper.setSerializationInclusion(Include.NON_NULL); - return mapper; - } - + @Bean + public ObjectMapper customizeJacksonConfiguration() { + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_NULL); + return mapper; + } } diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/controller/ChartController.java b/kafka-dse-webui/src/main/java/com/datastax/demo/controller/ChartController.java index ebd7cb5..bcc30dd 100644 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/controller/ChartController.java +++ b/kafka-dse-webui/src/main/java/com/datastax/demo/controller/ChartController.java @@ -7,17 +7,16 @@ /** * Home page - * - * List all ticker meta data from table 'ticker_info'. - * This tabke has been loaded by producer at startup with lines of the CSV. + * + *

List all ticker meta data from table 'ticker_info'. This table has been loaded by producer at + * startup with lines of the CSV. */ @Controller public class ChartController { - - @GetMapping("/chart/{symbol}") - public String get(Model model, @PathVariable("symbol") String symbol) throws Exception { - model.addAttribute("symbol", symbol); - return "chart"; - } + @GetMapping("/chart/{symbol}") + public String get(Model model, @PathVariable("symbol") String symbol) throws Exception { + model.addAttribute("symbol", symbol); + return "chart"; + } } diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/controller/HomeController.java b/kafka-dse-webui/src/main/java/com/datastax/demo/controller/HomeController.java index e7105c2..d2be5f8 100644 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/controller/HomeController.java +++ b/kafka-dse-webui/src/main/java/com/datastax/demo/controller/HomeController.java @@ -1,29 +1,28 @@ package com.datastax.demo.controller; +import com.datastax.demo.dao.DseDao; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.GetMapping; import org.thymeleaf.spring5.context.webflux.ReactiveDataDriverContextVariable; -import com.datastax.demo.dsedriver.RepositoryDseReactiveTicks; - /** * Home page - * - * List all ticker meta data from table 'ticker_info'. - * This tabke has been loaded by producer at startup with lines of the CSV. + * + *

List all ticker meta data from table 'ticker_info'. This table has been loaded by producer at + * startup with lines of the CSV. */ @Controller public class HomeController { - - @Autowired - private RepositoryDseReactiveTicks dseReactiveRepo; - - @GetMapping("/") - public String home(Model model) throws Exception { - model.addAttribute("tickerInfoList", new ReactiveDataDriverContextVariable( dseReactiveRepo.findAllTickerInfos(), 1)); - return "home"; - } + @Autowired private DseDao dseDao; + + @GetMapping("/") + public String home(Model model) { + model.addAttribute( + "tickerInfoList", + new ReactiveDataDriverContextVariable(dseDao.findStockInfosByExchange("NYSE"), 1)); + return "home"; + } } diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/controller/MultiChartController.java b/kafka-dse-webui/src/main/java/com/datastax/demo/controller/MultiChartController.java index 05f592d..ea6af2e 100644 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/controller/MultiChartController.java +++ b/kafka-dse-webui/src/main/java/com/datastax/demo/controller/MultiChartController.java @@ -6,16 +6,15 @@ /** * Home page - * - * List all ticker meta data from table 'ticker_info'. - * This tabke has been loaded by producer at startup with lines of the CSV. + * + *

List all ticker meta data from table 'ticker_info'. This tabke has been loaded by producer at + * startup with lines of the CSV. */ @Controller public class MultiChartController { - - @GetMapping("/multichart") - public String get(Model model) throws Exception { - return "multichart"; - } + @GetMapping("/multichart") + public String get(Model model) { + return "multichart"; + } } diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/controller/TickerController.java b/kafka-dse-webui/src/main/java/com/datastax/demo/controller/TickerController.java index 10d7fe7..b156a20 100644 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/controller/TickerController.java +++ b/kafka-dse-webui/src/main/java/com/datastax/demo/controller/TickerController.java @@ -1,59 +1,40 @@ package com.datastax.demo.controller; -import java.util.HashMap; +import com.datastax.demo.dao.DseDao; +import com.datastax.demo.domain.StockTick; import java.util.Map; - +import java.util.concurrent.ConcurrentHashMap; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.ResponseBody; - -import com.datastax.demo.domain.StockTick; -import com.datastax.demo.springdata.TickSpringDataRepository; - import reactor.core.publisher.Flux; -import reactor.core.publisher.Mono; -/** - * Service providing informations for UI. - */ +/** Service providing informations for UI. */ @Controller public class TickerController { - - /** Map. */ - private Map < String, Flux > symbolsFlux = new HashMap<>(); - - @Autowired - private TickSpringDataRepository springRepo; - - @GetMapping(path = "/tickers/streams", produces = MediaType.TEXT_EVENT_STREAM_VALUE) - @ResponseBody - public Flux fetchTickerSpringData() { - return springRepo.findAllLastSymbols().flatMap(tickData -> { - StockTick t = new StockTick(); - t.setSymbol(tickData.getTickDataKey().getSymbol()); - t.setValue(tickData.getValue()); - t.setValueDate(tickData.getTickDataKey().getMyDate().getTime()); - return Mono.just(t); - }); - } - - @GetMapping(path = "/tickers/stream/symbol/{symbol}", produces = MediaType.TEXT_EVENT_STREAM_VALUE) - @ResponseBody - public Flux fetchTickerSpringData(@PathVariable("symbol") String symbol) { - if (!symbolsFlux.containsKey(symbol)) { - Flux < StockTick > springFeed = springRepo.findBySymbol(symbol).flatMap(tickData -> { - StockTick t = new StockTick(); - t.setSymbol(tickData.getTickDataKey().getSymbol()); - t.setValue(tickData.getValue()); - t.setValueDate(tickData.getTickDataKey().getMyDate().getTime()); - return Mono.just(t); - }); - symbolsFlux.put(symbol, springFeed); - } - return symbolsFlux.get(symbol); - } -} \ No newline at end of file + /** Map. */ + // FIXME use Spring Cache + private Map> ticksBySymbolCache = new ConcurrentHashMap<>(); + + @Autowired private DseDao dseDao; + + @GetMapping(path = "/tickers/streams", produces = MediaType.TEXT_EVENT_STREAM_VALUE) + @ResponseBody + public Flux fetchLastTicks() { + return dseDao.findFirst500StockTicks(); + } + + @GetMapping( + path = "/tickers/stream/symbol/{symbol}", + produces = MediaType.TEXT_EVENT_STREAM_VALUE + ) + @ResponseBody + public Flux fetchLastTicks(@PathVariable("symbol") String symbol) { + return ticksBySymbolCache.computeIfAbsent( + symbol, s -> dseDao.findFirst100StockTicksBySymbol(s).cache()); + } +} diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/dao/DseDao.java b/kafka-dse-webui/src/main/java/com/datastax/demo/dao/DseDao.java new file mode 100644 index 0000000..ac7e81c --- /dev/null +++ b/kafka-dse-webui/src/main/java/com/datastax/demo/dao/DseDao.java @@ -0,0 +1,191 @@ +package com.datastax.demo.dao; + +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; +import static com.datastax.oss.driver.api.querybuilder.relation.Relation.column; + +import com.datastax.demo.conf.DseConstants; +import com.datastax.demo.domain.Stock; +import com.datastax.demo.domain.StockInfo; +import com.datastax.demo.domain.StockTick; +import com.datastax.dse.driver.api.reactor.ReactorDseSession; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import java.time.Instant; +import java.util.Objects; +import javax.annotation.PostConstruct; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +@Repository +public class DseDao implements DseConstants { + + private static final SimpleStatement FIND_STOCK_INFO_BY_ID = + selectFrom(STOCKS_INFOS) + .columns(EXCHANGE, NAME, INDUSTRY, SYMBOL) + .where( + column(EXCHANGE).isEqualTo(bindMarker(EXCHANGE)), + column(NAME).isEqualTo(bindMarker(NAME))) + .build(); + + private static final SimpleStatement FIND_STOCK_INFOS_BY_EXCHANGE = + selectFrom(STOCKS_INFOS) + .columns(EXCHANGE, NAME, INDUSTRY, SYMBOL) + .where(column(EXCHANGE).isEqualTo(bindMarker(EXCHANGE))) + .build(); + + private static final SimpleStatement FIND_STOCK_TICK_BY_ID = + selectFrom(STOCKS_TICKS) + .columns(SYMBOL, VALUE_DATE, VALUE) + .where( + column(SYMBOL).isEqualTo(bindMarker(SYMBOL)), + column(VALUE_DATE).isEqualTo(bindMarker(VALUE_DATE))) + .build(); + + private static final SimpleStatement FIND_FIRST_100_STOCK_TICKS_BY_SYMBOL = + selectFrom(STOCKS_TICKS) + .columns(SYMBOL, VALUE_DATE, VALUE) + .where(column(SYMBOL).isEqualTo(bindMarker(SYMBOL))) + .limit(100) + .build(); + + private static final SimpleStatement FIND_FIRST_500_STOCK_TICKS = + selectFrom(STOCKS_TICKS) + .columns(SYMBOL, VALUE_DATE, VALUE) + .where( + column(SYMBOL) + .in( + literal("BAC"), + literal("DVMT"), + literal("DIS"), + literal("IBM"), + literal("WMT"))) + .limit(500) + .build(); + + private static final SimpleStatement FIND_STOCK_MINUTE_BY_ID = + selectFrom(STOCKS_MINUTE) + .columns(SYMBOL, VALUE_DATE, OPEN, CLOSE, HIGH, LOW, VOLUME) + .where( + column(SYMBOL).isEqualTo(bindMarker(SYMBOL)), + column(VALUE_DATE).isEqualTo(bindMarker(VALUE_DATE))) + .build(); + + private static final SimpleStatement FIND_STOCK_HOUR_BY_ID = + selectFrom(STOCKS_HOUR) + .columns(SYMBOL, VALUE_DATE, OPEN, CLOSE, HIGH, LOW, VOLUME) + .where( + column(SYMBOL).isEqualTo(bindMarker(SYMBOL)), + column(VALUE_DATE).isEqualTo(bindMarker(VALUE_DATE))) + .build(); + + /** Hold Connectivity to DSE. */ + @Autowired protected ReactorDseSession dseSession; + + private PreparedStatement findStockInfoById; + private PreparedStatement findStockInfosByExchange; + private PreparedStatement findStockTickById; + private PreparedStatement findFirst100StockTicksBySymbol; + private PreparedStatement findFirst500StockTicks; + private PreparedStatement findStockMinuteById; + private PreparedStatement findStockHourById; + + @PostConstruct + public void prepareStatements() { + findStockInfoById = dseSession.prepare(FIND_STOCK_INFO_BY_ID); + findStockInfosByExchange = dseSession.prepare(FIND_STOCK_INFOS_BY_EXCHANGE); + findStockTickById = dseSession.prepare(FIND_STOCK_TICK_BY_ID); + findFirst100StockTicksBySymbol = dseSession.prepare(FIND_FIRST_100_STOCK_TICKS_BY_SYMBOL); + findFirst500StockTicks = dseSession.prepare(FIND_FIRST_500_STOCK_TICKS); + findStockMinuteById = dseSession.prepare(FIND_STOCK_MINUTE_BY_ID); + findStockHourById = dseSession.prepare(FIND_STOCK_HOUR_BY_ID); + } + + public Mono findStockInfoById(String exchange, String name) { + BoundStatement statement = + findStockInfoById + .boundStatementBuilder() + .setString(EXCHANGE, exchange) + .setString(NAME, name) + .build(); + return dseSession.executeReactive(statement).map(DseDao::mapRowToStockInfo).singleOrEmpty(); + } + + public Flux findStockInfosByExchange(String exchange) { + BoundStatement statement = + findStockInfosByExchange.boundStatementBuilder().setString(EXCHANGE, exchange).build(); + return dseSession.executeReactive(statement).map(DseDao::mapRowToStockInfo); + } + + public Mono findStockTickById(String symbol, Instant valueDate) { + BoundStatement statement = + findStockTickById + .boundStatementBuilder() + .setString(SYMBOL, symbol) + .setInstant(VALUE_DATE, valueDate) + .build(); + return dseSession.executeReactive(statement).map(DseDao::mapRowToStockTick).singleOrEmpty(); + } + + public Flux findFirst100StockTicksBySymbol(String symbol) { + BoundStatement statement = + findFirst100StockTicksBySymbol.boundStatementBuilder().setString(SYMBOL, symbol).build(); + return dseSession.executeReactive(statement).map(DseDao::mapRowToStockTick); + } + + public Flux findFirst500StockTicks() { + BoundStatement statement = findFirst100StockTicksBySymbol.bind(); + return dseSession.executeReactive(statement).map(DseDao::mapRowToStockTick); + } + + public Mono findStockMinuteById(String symbol, Instant valueDate) { + BoundStatement statement = + findStockMinuteById + .boundStatementBuilder() + .setString(SYMBOL, symbol) + .setInstant(VALUE_DATE, valueDate) + .build(); + return dseSession.executeReactive(statement).map(DseDao::mapRowToStock).singleOrEmpty(); + } + + public Mono findStockHourById(String symbol, Instant valueDate) { + BoundStatement statement = + findStockHourById + .boundStatementBuilder() + .setString(SYMBOL, symbol) + .setInstant(VALUE_DATE, valueDate) + .build(); + return dseSession.executeReactive(statement).map(DseDao::mapRowToStock).singleOrEmpty(); + } + + private static StockTick mapRowToStockTick(Row row) { + return new StockTick( + Objects.requireNonNull(row.getString(SYMBOL)), + Objects.requireNonNull(row.getInstant(VALUE_DATE)), + row.getDouble(VALUE)); + } + + private static Stock mapRowToStock(Row row) { + return new Stock( + Objects.requireNonNull(row.getString(SYMBOL)), + Objects.requireNonNull(row.getInstant(VALUE_DATE)), + row.getDouble(OPEN), + row.getDouble(CLOSE), + row.getDouble(LOW), + row.getDouble(HIGH), + row.getLong(VOLUME)); + } + + private static StockInfo mapRowToStockInfo(Row row) { + return new StockInfo( + Objects.requireNonNull(row.getString(EXCHANGE)), + Objects.requireNonNull(row.getString(NAME)), + Objects.requireNonNull(row.getString(SYMBOL)), + Objects.requireNonNull(row.getString(INDUSTRY))); + } +} diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/DseReactiveDaoSupport.java b/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/DseReactiveDaoSupport.java deleted file mode 100644 index c427442..0000000 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/DseReactiveDaoSupport.java +++ /dev/null @@ -1,162 +0,0 @@ -package com.datastax.demo.dsedriver; - -import static reactor.core.publisher.Mono.fromFuture; - -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; - -import org.apache.tinkerpop.gremlin.structure.T; -import org.springframework.beans.factory.annotation.Autowired; - -import com.datastax.demo.conf.DseConstants; -import com.datastax.demo.domain.StockTick; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.querybuilder.QueryBuilder; -import com.datastax.driver.core.querybuilder.Select; -import com.datastax.driver.dse.DseSession; -import com.datastax.driver.mapping.Mapper; -import com.datastax.driver.mapping.MappingManager; -import com.datastax.driver.mapping.Result; -import com.datastax.driver.mapping.annotations.Table; -import com.google.common.util.concurrent.FutureCallback; -import com.google.common.util.concurrent.Futures; -import com.google.common.util.concurrent.ListenableFuture; - -import reactor.core.publisher.Flux; -import reactor.core.publisher.Mono; - -/** - * Provides a way to convert objects returns by Java Driver to Mono Flux - * expected by Reactor and Spring WebFlux. - * - * @author Cedrick Lunven <= for questions - */ -public abstract class DseReactiveDaoSupport implements DseConstants { - - /** Hold Connectivity to DSE. */ - @Autowired - protected DseSession dseSession; - - /** Hold Driver Mapper to implement ORM with Cassandra. */ - @Autowired - protected MappingManager mappingManager; - - /** - * Work with simple CQL and ResultSet, will need some mapper afterward or return Flux - */ - protected Mono executeASync(ListenableFuture future) { - return Mono.create(sink -> { - try { - ListenableFuture resultSetFuture = Futures.transform(future, DseReactiveResultSet::new); - resultSetFuture.addListener(() -> { - if (resultSetFuture.isDone()) { - try { - sink.success(resultSetFuture.get()); - } - catch (ExecutionException cause) { - sink.error(cause.getCause()); - } - catch (Exception cause) { - sink.error(cause); - } - } - }, Runnable::run); - } - catch (Exception cause) { - sink.error(cause); - } - }); - } - - /** - * Working with {@link Mapper} - */ - @SuppressWarnings("hiding") - protected Mono> executeAndMapASync(ListenableFuture> resultAsyncMapper) { - return Mono.>create(sink -> { - try { - - ListenableFuture> resultSetFuture = Futures.transform(resultAsyncMapper, - DseReactiveResult::new); - - resultSetFuture.addListener(() -> { - if (resultSetFuture.isDone()) { - try { - sink.success(resultSetFuture.get()); - } - catch (ExecutionException cause) { - sink.error(cause.getCause()); - } - catch (Exception cause) { - sink.error(cause); - } - } - }, Runnable::run); - } - catch (Exception cause) { - sink.error(cause); - } - }); - } - - @SuppressWarnings("hiding") - protected Mono asMono(final ListenableFuture listenableFuture) { - return fromFuture(asCompletableFuture(listenableFuture)); - } - - @SuppressWarnings("hiding") - protected Flux asFlux(ListenableFuture> futureDriver) { - return executeAndMapASync(futureDriver) - .flatMapMany(DseReactiveResult::records) - .onErrorMap(throwable -> new IllegalStateException("Error during Select ticker info", throwable)); - } - - @SuppressWarnings("hiding") - protected CompletableFuture asCompletableFuture(final ListenableFuture listenableFuture) { - - //create an instance of CompletableFuture - CompletableFuture completable = new CompletableFuture() { - @Override - public boolean cancel(boolean mayInterruptIfRunning) { - // propagate cancel to the listenable future - boolean result = listenableFuture.cancel(mayInterruptIfRunning); - super.cancel(mayInterruptIfRunning); - return result; - } - }; - - // add callback - Futures.addCallback(listenableFuture, new FutureCallback() { - @Override - public void onSuccess(T result) { - completable.complete(result); - } - - @Override - public void onFailure(Throwable t) { - completable.completeExceptionally(t); - } - }); - return completable; - } - - /** - * A little bit 'to much' probably. - */ - @SuppressWarnings("hiding") - public Flux findAll(Class pojo) { - String tableName = StockTick.class.getAnnotation(Table.class).name(); - Select selectQuery = QueryBuilder.select().from(tableName); - return asFlux(mappingManager.mapper(pojo).mapAsync(dseSession.executeAsync(selectQuery))); - } - - /** - * Syntaxic sugar. - */ - @SuppressWarnings("hiding") - public Mono findOne(Class pojo, Object... args) { - return asMono(mappingManager.mapper(pojo).getAsync(args)); - } - - -} diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/DseReactiveResult.java b/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/DseReactiveResult.java deleted file mode 100644 index 4d863bb..0000000 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/DseReactiveResult.java +++ /dev/null @@ -1,83 +0,0 @@ -package com.datastax.demo.dsedriver; - -import java.util.concurrent.ExecutionException; - -import com.datastax.driver.mapping.Result; -import com.google.common.util.concurrent.ListenableFuture; - -import reactor.core.publisher.Flux; -import reactor.core.publisher.Mono; -import reactor.core.publisher.MonoProcessor; - -/** - * Work with Flux and Mono. - */ -public class DseReactiveResult < M > { - - /** Embedded result. */ - private final Result < M > results; - - /** - * Default constructor. - * @param results - */ - public DseReactiveResult(Result results) { - this.results = results; - } - - /** - * Create the flux with associated processor on next. - * - * @return - * la liste des records - */ - public Flux records() { - return getRecords(Mono.just(this.results)); - } - - /** - * Convert from Result to FLOW. - * - * @param nextResults - * list of elements - * @return - */ - private Flux getRecords(Mono> nextResults) { - return nextResults.flatMapMany(it -> { - Flux rows = DseReactiveResult.toRecords(it); - if (it.isFullyFetched()) { - return rows; - } - MonoProcessor> processor = MonoProcessor.create(); - return rows.doOnComplete(() -> DseReactiveResult.fetchMoreRecords(it.fetchMoreResults(), processor)) - .concatWith(getRecords(processor)); - }); - } - - static Flux toRecords(Result result) { - int prefetch = Math.max(1, result.getAvailableWithoutFetching()); - return Flux.fromIterable(result).take(prefetch); - } - - static public void fetchMoreRecords(ListenableFuture> future, MonoProcessor> sink) { - try { - future.addListener(() -> { - try { - System.out.println("FETCH"); - sink.onNext(future.get()); - sink.onComplete(); - } - catch (ExecutionException cause) { - sink.onError(cause.getCause()); - } - catch (Exception cause) { - sink.onError(cause); - } - }, Runnable::run); - } - catch (Exception cause) { - sink.onError(cause); - } - } - -} diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/DseReactiveResultSet.java b/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/DseReactiveResultSet.java deleted file mode 100644 index cc55435..0000000 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/DseReactiveResultSet.java +++ /dev/null @@ -1,85 +0,0 @@ -package com.datastax.demo.dsedriver; - -import java.util.concurrent.ExecutionException; - -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; -import com.google.common.util.concurrent.ListenableFuture; - -import reactor.core.publisher.Flux; -import reactor.core.publisher.Mono; -import reactor.core.publisher.MonoProcessor; - -/** - * ResultSet. - * - * @author DataStax Evangelist Team - */ -public class DseReactiveResultSet { - - /** simple resultset we are used to. */ - private final ResultSet resultSet; - - /** - * Reactive RS. - * - * @param resultSet - */ - public DseReactiveResultSet(ResultSet resultSet) { - this.resultSet = resultSet; - } - - /** - * Get rows. - * - * @return - * return flux of Rows with no mapping - */ - public Flux rows() { - return getRows(Mono.just(this.resultSet)); - } - - /** - * - * @param nextResults - * @return - */ - private Flux getRows(Mono nextResults) { - return nextResults.flatMapMany(it -> { - Flux rows = DseReactiveResultSet.toRows(it); - if (it.isFullyFetched()) { - return rows; - } - MonoProcessor processor = MonoProcessor.create(); - return rows.doOnComplete(() -> DseReactiveResultSet.fetchMoreRows(it.fetchMoreResults(), processor)) - .concatWith(getRows(processor)); - }); - } - - static Flux toRows(ResultSet resultSet) { - int prefetch = Math.max(1, resultSet.getAvailableWithoutFetching()); - return Flux.fromIterable(resultSet).take(prefetch); - } - - static public void fetchMoreRows(ListenableFuture future, MonoProcessor sink) { - try { - future.addListener(() -> { - try { - sink.onNext(future.get()); - sink.onComplete(); - } - catch (ExecutionException cause) { - sink.onError(cause.getCause()); - } - catch (Exception cause) { - sink.onError(cause); - } - }, Runnable::run); - } - catch (Exception cause) { - sink.onError(cause); - } - } - - -} diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/RepositoryDseReactiveTicks.java b/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/RepositoryDseReactiveTicks.java deleted file mode 100644 index a0176bc..0000000 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/dsedriver/RepositoryDseReactiveTicks.java +++ /dev/null @@ -1,46 +0,0 @@ -package com.datastax.demo.dsedriver; - -import org.springframework.stereotype.Repository; - -import com.datastax.demo.conf.DseConstants; -import com.datastax.demo.domain.StockTick; -import com.datastax.demo.domain.StockInfo; -import com.datastax.driver.core.querybuilder.QueryBuilder; -import com.datastax.driver.mapping.Result; -import com.google.common.util.concurrent.ListenableFuture; - -import reactor.core.publisher.Flux; -import reactor.core.publisher.Mono; - -@Repository -public class RepositoryDseReactiveTicks extends DseReactiveDaoSupport { - - /** - * Select * FROM ticker_info - * - * Full scan possible as tiny table with the metadata of the ticks. - */ - public Flux findAllTickerInfos() { - // Use Driver normally with Async operations and everything not blocking - ListenableFuture> futureDriver = - mappingManager.mapper(StockInfo.class) - .mapAsync(dseSession.executeAsync( - QueryBuilder.select() - .from(DseConstants.STOCKS_INFOS) - .where(QueryBuilder.eq("exchange", "NYSE")))); - return asFlux(futureDriver); - - // return findAll(TickerInfo.class); - } - - public Mono < StockInfo > findTickerInfo(String symbol) { - return findOne(StockInfo.class, symbol); - } - - public Flux findAllTickers() { - return findAll(StockTick.class); - } - - - -} diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/TickSpringDataRepository.java b/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/TickSpringDataRepository.java deleted file mode 100644 index a72543b..0000000 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/TickSpringDataRepository.java +++ /dev/null @@ -1,23 +0,0 @@ -package com.datastax.demo.springdata; - -import org.springframework.data.cassandra.repository.Query; -import org.springframework.data.repository.reactive.ReactiveCrudRepository; - -import com.datastax.demo.conf.DseConstants; -import com.datastax.demo.springdata.dto.TickData; -import com.datastax.demo.springdata.dto.TickDataPrimaryKey; - -import reactor.core.publisher.Flux; - -/** - * Retrieve all Tick from Table. - */ -public interface TickSpringDataRepository extends ReactiveCrudRepository { - - @Query("SELECT * FROM " + DseConstants.STOCKS_TICKS + " WHERE symbol = ?0 LIMIT 100") - Flux< TickData > findBySymbol(String symbol); - - @Query("SELECT * FROM " + DseConstants.STOCKS_TICKS + " WHERE symbol in ('BAC', 'DVMT','DIS','IBM','WMT') LIMIT 500") - Flux< TickData > findAllLastSymbols(); - -} \ No newline at end of file diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickData.java b/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickData.java deleted file mode 100644 index ac3eaac..0000000 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickData.java +++ /dev/null @@ -1,87 +0,0 @@ -package com.datastax.demo.springdata.dto; - -import java.io.Serializable; -import java.util.Date; - -import org.springframework.data.cassandra.core.mapping.Column; -import org.springframework.data.cassandra.core.mapping.PrimaryKey; -import org.springframework.data.cassandra.core.mapping.Table; - -import com.datastax.demo.conf.DseConstants; - -/** - * Data into Cassandra. - * - * @author DataStax Evangelist Team - */ -@Table(value = DseConstants.STOCKS_TICKS) -public class TickData implements Serializable { - - /** Serial. */ - private static final long serialVersionUID = 6761984069893402714L; - - @PrimaryKey - private TickDataPrimaryKey tickDataKey; - - /** value. */ - @Column - private double value; - - /** Keep default. */ - public TickData() {} - - /** - * Common constructor - */ - public TickData(Date datetime, TickDataPrimaryKey tickDataKey, double value) { - this.tickDataKey = tickDataKey; - this.value = value; - } - - /** {@inheritDoc} */ - @Override - public String toString() { - return "TickData [" + - "symbol=" + tickDataKey + ", " + - "value=" + value + "]"; - } - - /** - * Getter accessor for attribute 'value'. - * - * @return - * current value of 'value' - */ - public double getValue() { - return value; - } - - /** - * Setter accessor for attribute 'value'. - * @param value - * new value for 'value ' - */ - public void setValue(double value) { - this.value = value; - } - - /** - * Getter accessor for attribute 'tickDataKey'. - * - * @return - * current value of 'tickDataKey' - */ - public TickDataPrimaryKey getTickDataKey() { - return tickDataKey; - } - - /** - * Setter accessor for attribute 'tickDataKey'. - * @param tickDataKey - * new value for 'tickDataKey ' - */ - public void setTickDataKey(TickDataPrimaryKey tickDataKey) { - this.tickDataKey = tickDataKey; - } - -} diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickDataPrimaryKey.java b/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickDataPrimaryKey.java deleted file mode 100644 index 4ce5e5b..0000000 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickDataPrimaryKey.java +++ /dev/null @@ -1,71 +0,0 @@ -package com.datastax.demo.springdata.dto; - -import java.io.Serializable; -import java.util.Date; - -import org.springframework.data.cassandra.core.cql.PrimaryKeyType; -import org.springframework.data.cassandra.core.mapping.CassandraType; -import org.springframework.data.cassandra.core.mapping.PrimaryKeyClass; -import org.springframework.data.cassandra.core.mapping.PrimaryKeyColumn; - -import com.datastax.driver.core.DataType.Name; - -@PrimaryKeyClass -public class TickDataPrimaryKey implements Serializable { - - /** Serial. */ - private static final long serialVersionUID = 1142109498800363080L; - - /** - * Tick Data Partition Key - */ - @PrimaryKeyColumn(name = "symbol", ordinal = 0, type = PrimaryKeyType.PARTITIONED) - @CassandraType(type = Name.TEXT) - private String symbol; - - /** - * Tick Data Clustering Column - */ - @PrimaryKeyColumn(name = "valueDate", ordinal = 1, type = PrimaryKeyType.CLUSTERED) - @CassandraType(type = Name.TIMESTAMP) - private Date myDate; - - /** - * Getter accessor for attribute 'symbol'. - * - * @return - * current value of 'symbol' - */ - public String getSymbol() { - return symbol; - } - - /** - * Setter accessor for attribute 'symbol'. - * @param symbol - * new value for 'symbol ' - */ - public void setSymbol(String symbol) { - this.symbol = symbol; - } - - /** - * Getter accessor for attribute 'myDate'. - * - * @return - * current value of 'myDate' - */ - public Date getMyDate() { - return myDate; - } - - /** - * Setter accessor for attribute 'myDate'. - * @param myDate - * new value for 'myDate ' - */ - public void setMyDate(Date myDate) { - this.myDate = myDate; - } - -} diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickerInfoSD.java b/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickerInfoSD.java deleted file mode 100644 index b03969f..0000000 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickerInfoSD.java +++ /dev/null @@ -1,95 +0,0 @@ -package com.datastax.demo.springdata.dto; - -import java.io.Serializable; - -import org.springframework.data.cassandra.core.mapping.Column; -import org.springframework.data.cassandra.core.mapping.PrimaryKey; -import org.springframework.data.cassandra.core.mapping.Table; - -import com.datastax.demo.conf.DseConstants; - -/** - * Data into Cassandra. - * - * @author DataStax Evangelist Team - */ -@Table(value = DseConstants.STOCKS_INFOS) -public class TickerInfoSD implements Serializable { - - /** Serial. */ - private static final long serialVersionUID = 6761984069893402714L; - - @PrimaryKey - private TickerInfoSDPrimaryKey tickerInfoDataKey; - - /** value. */ - @Column - private String industry; - - /** value. */ - @Column - private String exchange; - - /** - * Default Constructor - */ - public TickerInfoSD() {} - - /** - * Getter accessor for attribute 'industry'. - * - * @return - * current value of 'industry' - */ - public String getIndustry() { - return industry; - } - - /** - * Setter accessor for attribute 'industry'. - * @param industry - * new value for 'industry ' - */ - public void setIndustry(String industry) { - this.industry = industry; - } - - /** - * Getter accessor for attribute 'exchange'. - * - * @return - * current value of 'exchange' - */ - public String getExchange() { - return exchange; - } - - /** - * Setter accessor for attribute 'exchange'. - * @param exchange - * new value for 'exchange ' - */ - public void setExchange(String exchange) { - this.exchange = exchange; - } - - /** - * Getter accessor for attribute 'tickerInfoDataKey'. - * - * @return - * current value of 'tickerInfoDataKey' - */ - public TickerInfoSDPrimaryKey getTickerInfoDataKey() { - return tickerInfoDataKey; - } - - /** - * Setter accessor for attribute 'tickerInfoDataKey'. - * @param tickerInfoDataKey - * new value for 'tickerInfoDataKey ' - */ - public void setTickerInfoDataKey(TickerInfoSDPrimaryKey tickerInfoDataKey) { - this.tickerInfoDataKey = tickerInfoDataKey; - } - -} diff --git a/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickerInfoSDPrimaryKey.java b/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickerInfoSDPrimaryKey.java deleted file mode 100644 index 105bf25..0000000 --- a/kafka-dse-webui/src/main/java/com/datastax/demo/springdata/dto/TickerInfoSDPrimaryKey.java +++ /dev/null @@ -1,70 +0,0 @@ -package com.datastax.demo.springdata.dto; - -import java.io.Serializable; - -import org.springframework.data.cassandra.core.cql.PrimaryKeyType; -import org.springframework.data.cassandra.core.mapping.CassandraType; -import org.springframework.data.cassandra.core.mapping.PrimaryKeyClass; -import org.springframework.data.cassandra.core.mapping.PrimaryKeyColumn; - -import com.datastax.driver.core.DataType.Name; - -@PrimaryKeyClass -public class TickerInfoSDPrimaryKey implements Serializable { - - /** Serial. */ - private static final long serialVersionUID = 1142109498800363080L; - - /** - * Tick Data Partition Key - */ - @PrimaryKeyColumn(name = "symbol", ordinal = 0, type = PrimaryKeyType.PARTITIONED) - @CassandraType(type = Name.TEXT) - private String symbol; - - /** - * Tick Data Clustering Column - */ - @PrimaryKeyColumn(name = "name", ordinal = 1, type = PrimaryKeyType.CLUSTERED) - @CassandraType(type = Name.TEXT) - private String name; - - /** - * Getter accessor for attribute 'symbol'. - * - * @return - * current value of 'symbol' - */ - public String getSymbol() { - return symbol; - } - - /** - * Setter accessor for attribute 'symbol'. - * @param symbol - * new value for 'symbol ' - */ - public void setSymbol(String symbol) { - this.symbol = symbol; - } - - /** - * Getter accessor for attribute 'name'. - * - * @return - * current value of 'name' - */ - public String getName() { - return name; - } - - /** - * Setter accessor for attribute 'name'. - * @param name - * new value for 'name ' - */ - public void setName(String name) { - this.name = name; - } - -} diff --git a/kafka-dse-webui/src/main/resources/logback.xml b/kafka-dse-webui/src/main/resources/logback.xml index 66d2d59..8626dec 100644 --- a/kafka-dse-webui/src/main/resources/logback.xml +++ b/kafka-dse-webui/src/main/resources/logback.xml @@ -1,16 +1,14 @@ + - - - - %d{HH:mm:ss.SSS} %magenta(%-5level) %cyan(%-45logger) : %msg%n - - - - - - - - - - - \ No newline at end of file + + + %d{HH:mm:ss.SSS} %magenta(%-5level) %cyan(%-45logger) : %msg%n + + + + + + + + + diff --git a/kafka-dse-webui/src/test/java/com/datastax/demo/test/Driver4ConnectivityTest.java b/kafka-dse-webui/src/test/java/com/datastax/demo/test/Driver4ConnectivityTest.java index 6990621..ded5854 100644 --- a/kafka-dse-webui/src/test/java/com/datastax/demo/test/Driver4ConnectivityTest.java +++ b/kafka-dse-webui/src/test/java/com/datastax/demo/test/Driver4ConnectivityTest.java @@ -5,30 +5,30 @@ @ExtendWith(SpringExtension.class) @TestPropertySource(locations="/config-test.properties") public class Driver4ConnectivityTest { - + @Test @DisplayName("Read default application.conf file") public void testConnectDse() { //CqlSession.builder().withConfigLoader(configLoader) //DriverExecutionProfile defaultProfile = config.getDefaultProfile(); //DriverExecutionProfile olapProfile = config.getProfile("olap"); - + try (CqlSession session = CqlSession.builder().build()) { - + DriverConfig config = session.getContext().config(); DriverExecutionProfile defaultProfile = config.getDefaultProfile(); System.out.println(defaultProfile.getStringList(DefaultDriverOption.CONTACT_POINTS)); - - ResultSet rs = session.execute("select release_version from system.local"); + + ResultSet rs = session.execute("select release_version from system.local"); Row row = rs.one(); System.out.println(row.getString("release_version")); - + //DriverExecutionProfile olapProfile = config.getProfile("olap"); //System.out.println(olapProfile.getString(DefaultDriverOption.REQUEST_TIMEOUT)); } - + } - + @Test public void testAsyncDse() { CompletionStage sessionStage = CqlSession.builder().buildAsync(); @@ -37,7 +37,7 @@ public void testAsyncDse() { sessionStage.executeAsync("SELECT release_version FROM system.local"); resultStage.thenAccept(resultSet -> System.out.println(Thread.currentThread().getName())); } - + import com.datastax.driver.core.Cluster; import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSetFuture; @@ -151,5 +151,5 @@ public String toString() { } } } - + }*/ diff --git a/kafka-dse-webui/webui.log b/kafka-dse-webui/webui.log deleted file mode 100644 index e69de29..0000000 diff --git a/pom.xml b/pom.xml index a9e2a95..d2c7d1e 100644 --- a/pom.xml +++ b/pom.xml @@ -1,267 +1,263 @@ - - - - - - 4.0.0 - com.datastax - kafka-dse-example - Kafka DataStax Sink Sample - pom - 6.7-SNAPSHOT - - - - - - DataStax - https://datastax.com/ - - 2018 - https://datastax.com - - Working portable demo to produce large amount of messages, sent into Kafka - Read those messages, put into DSE and display everything in Spring WebFlux UI - - - - - - - kafka-dse-core - kafka-dse-producer - kafka-dse-webui - - - - - - - - - 1.6.8 - - - 5.0.8.RELEASE - 2.0.3.RELEASE - 1.2.3 - 2.9.6 - 2.22.0 - 0.11.0.0 - 3.3.1 - - - 1.2.0 - 5.2.0 - - - 1.8 - 1.8 - 3.7.0 - 2.21.0 - - - - - - - - - - - - com.datastax.dse - dse-java-driver-core - ${dse-java-driver.version} - - - com.datastax.dse - dse-java-driver-mapping - ${dse-java-driver.version} - - - com.datastax.dse - dse-java-driver-extras - ${dse-java-driver.version} - - - com.datastax.dse - dse-java-driver-graph - ${dse-java-driver.version} - - - - - org.springframework - spring-core - ${spring.version} - - - org.springframework - spring-aop - ${spring.version} - - - org.springframework - spring-beans - ${spring.version} - - - org.springframework - spring-context - ${spring.version} - - - org.springframework - spring-expression - ${spring.version} - - - - - org.springframework.boot - spring-boot-dependencies - ${spring-boot.version} - pom - import - - - - - ch.qos.logback - logback-classic - ${logback.version} - - - ch.qos.logback - logback-core - ${logback.version} - - - - - org.junit.platform - junit-platform-launcher - ${junit-platform.version} - - - org.junit.platform - junit-platform-runner - ${junit-platform.version} - - - org.junit.platform - junit-platform-console-standalone - ${junit-platform.version} - - - org.junit.jupiter - junit-jupiter-engine - ${junit-jupiter.version} - - - org.junit.jupiter - junit-jupiter-params - ${junit-jupiter.version} - - - - - com.fasterxml.jackson.dataformat - jackson-dataformat-csv - ${jackson-dataformat.version} - - - - - org.apache.camel - camel-spring-boot-starter - ${camel.version} - - - org.apache.camel - camel-stream-starter - ${camel.version} - - - org.apache.camel - camel-kafka - ${camel.version} - - - - - org.apache.kafka - connect-api - ${kafka-connect.version} - - - org.apache.kafka - connect-json - ${kafka-connect.version} - - - io.confluent - kafka-avro-serializer - ${kafka-avro.version} - - - - - - - - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - ${maven-compiler-plugin.version} - - ${maven-compiler-plugin.source} - ${maven-compiler-plugin.target} - true - - - - - org.apache.maven.plugins - maven-surefire-plugin - ${maven-surefire-plugin.version} - - - org.junit.platform - junit-platform-surefire-provider - ${junit-platform.version} - - - org.junit.jupiter - junit-jupiter-engine - ${junit-jupiter.version} - - - - - - - - - - - - confluent - http://packages.confluent.io/maven/ - - - central - https://repo1.maven.org/maven2/ - - - - \ No newline at end of file + + + + + + 4.0.0 + com.datastax + kafka-dse-example + Kafka DataStax Sink Sample + pom + 6.7-SNAPSHOT + + + + + DataStax + https://datastax.com/ + + 2018 + https://datastax.com + Working portable demo to produce large amount of messages, sent into Kafka Read those messages, put into DSE and display everything in Spring WebFlux UI + + + + + kafka-dse-core + kafka-dse-producer + kafka-dse-webui + + + + + + + 2.0.0-rc1-a76f6cf8b-kafka-demo + 4.0.0-rc1-d17ac777b-kafka-demo + 4.0.0-rc1-d17ac777b-kafka-demo + + 5.0.8.RELEASE + 2.0.3.RELEASE + 1.2.3 + 2.9.6 + 2.22.0 + 0.11.0.0 + 3.3.1 + + 5.4.0 + + 1.8 + 1.8 + 3.7.0 + 2.22.1 + UTF-8 + UTF-8 + + + + + + + + + com.datastax.dse + dse-java-driver-core + ${dse-java-driver.version} + + + com.datastax.dse + dse-java-driver-reactor + ${dse-java-driver.version} + + + com.datastax.oss + java-driver-core + ${oss-java-driver.version} + + + com.datastax.oss + java-driver-query-builder + ${oss-java-driver.version} + + + + org.springframework + spring-core + ${spring.version} + + + org.springframework + spring-aop + ${spring.version} + + + org.springframework + spring-beans + ${spring.version} + + + org.springframework + spring-context + ${spring.version} + + + org.springframework + spring-expression + ${spring.version} + + + + org.springframework.boot + spring-boot-dependencies + ${spring-boot.version} + pom + import + + + + ch.qos.logback + logback-classic + ${logback.version} + + + ch.qos.logback + logback-core + ${logback.version} + + + + org.junit.jupiter + junit-jupiter-api + ${junit-jupiter.version} + + + org.junit.jupiter + junit-jupiter-engine + ${junit-jupiter.version} + + + org.junit.jupiter + junit-jupiter-params + ${junit-jupiter.version} + + + + com.fasterxml.jackson.dataformat + jackson-dataformat-csv + ${jackson-dataformat.version} + + + + org.apache.camel + camel-spring-boot-starter + ${camel.version} + + + org.apache.camel + camel-stream-starter + ${camel.version} + + + org.apache.camel + camel-kafka + ${camel.version} + + + + org.apache.kafka + connect-api + ${kafka-connect.version} + + + org.apache.kafka + connect-json + ${kafka-connect.version} + + + io.confluent + kafka-avro-serializer + ${kafka-avro.version} + + + org.slf4j + slf4j-log4j12 + + + + + + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + ${maven-compiler-plugin.source} + ${maven-compiler-plugin.target} + true + + + + org.apache.maven.plugins + maven-surefire-plugin + ${maven-surefire-plugin.version} + + + com.coveo + fmt-maven-plugin + 2.2.0 + + + au.com.acegi + xml-format-maven-plugin + 3.0.7 + + + + + + com.coveo + fmt-maven-plugin + + + + format + + process-sources + + + + + au.com.acegi + xml-format-maven-plugin + + + xml-format + + xml-format + + process-sources + + 2 + + + + + + + + + confluent + http://packages.confluent.io/maven/ + + + central + https://repo1.maven.org/maven2/ + + +