Skip to content

Commit

Permalink
Merge pull request #62 from Nasdaq/release-0.8.x
Browse files Browse the repository at this point in the history
Release -0.8.x
  • Loading branch information
ruchirvaninasdaq authored Jan 4, 2024
2 parents eaba38f + f87fffa commit 9085a68
Show file tree
Hide file tree
Showing 18 changed files with 544 additions and 224 deletions.
2 changes: 1 addition & 1 deletion docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ RUN mvn -B \

### Build Images ###
## SDK app ##
FROM strimzi/kafka:0.20.0-kafka-2.6.0 as sdk-app
FROM quay.io/strimzi/kafka:0.32.0-kafka-3.3.1 as sdk-app

COPY . /home/kafka

Expand Down
78 changes: 35 additions & 43 deletions ncds-sdk/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -7,119 +7,111 @@
<parent>
<groupId>com.nasdaq.ncds</groupId>
<artifactId>ncds</artifactId>
<version>0.7.0</version>
<version>0.8.4</version>
</parent>

<artifactId>ncds-sdk</artifactId>
<packaging>jar</packaging>
<properties>
<kafkaScalaVersion>kafka_2.12</kafkaScalaVersion>
<junit5.version>5.7.2</junit5.version>
<junit5PlatformProvider.version>1.3.2</junit5PlatformProvider.version>
<curatorTestVersion>2.12.0</curatorTestVersion>
<slf4jVersion>1.7.30</slf4jVersion>
<surefire.version>2.22.2</surefire.version>
<strimzi.oauth.version>0.8.1</strimzi.oauth.version>
</properties>


<name>SDK</name>
<description>Provide Development Kit to connect with Kafka</description>

<dependencies>

<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency>

<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>

<!-- https://mvnrepository.com/artifact/org.json/json -->
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20190722</version>
</dependency>

<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
</dependency>
<!-- Kafka -auth -->
<dependency>
<groupId>io.strimzi</groupId>
<artifactId>kafka-oauth-common</artifactId>
<version>${strimzi.oauth.version}</version>
</dependency>

<!-- Testing -->

<!-- Kafka -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>${kafkaScalaVersion}</artifactId>
<version>${kafka.version}</version>
<exclusions>
<!-- Don't bring in kafka's logging framework -->
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
</exclusions>
<scope>test</scope>
</dependency>
<!-- Kafka -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.12</artifactId>
</dependency>

<dependency>
<groupId>com.salesforce.kafka.test</groupId>
<artifactId>kafka-junit-core</artifactId>
<version>3.2.3</version>
<scope>test</scope>
</dependency>

<!-- JUnit5 tests -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit5.version}</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>${junit5.version}</version>
<scope>test</scope>
</dependency>

<!-- Mockito for mocks in tests -->
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>2.28.2</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-test</artifactId>
<version>${curatorTestVersion}</version>
<scope>test</scope>
</dependency>

<!-- Logging in tests -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>${slf4jVersion}</version>
<scope>test</scope>
<artifactId>slf4j-api</artifactId>
</dependency>

<!-- Testing support class -->
<dependency>
<groupId>com.github.stephenc.high-scale-lib</groupId>
<artifactId>high-scale-lib</artifactId>
<version>1.1.4</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.3.2</version>
<scope>test</scope>
</dependency>

</dependencies>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ public class NCDSClient {
/**
*
* @param securityCfg - Authentication Security Properties passed from the Client
* @param kafkaCfg
* @throws Exception - Java Exception
*/
public NCDSClient(Properties securityCfg,Properties kafkaCfg) throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,41 +32,37 @@ public class NasdaqKafkaAvroConsumer {
private KafkaConsumer kafkaConsumer;
private String clientID;

private Properties securityProps;
private Properties kafkaProps;
private Properties properties = new Properties();
private ReadSchemaTopic readSchemaTopic = new ReadSchemaTopic();

public NasdaqKafkaAvroConsumer(Properties securityCfg,Properties kafkaCfg ) throws Exception {
try {
if (kafkaCfg == null)
if (securityCfg == null) {
properties.setProperty(AuthenticationConfigLoader.OAUTH_CLIENT_ID, "unit-test"); // Just for the unit tests.
}
else {
properties.putAll(securityCfg);
}
if (kafkaCfg == null) {
if (IsItJunit.isJUnitTest()) {
Properties junitKafkaCfg = KafkaConfigLoader.loadConfig();
kafkaProps = junitKafkaCfg;
properties.putAll(junitKafkaCfg);
}
else {
throw new Exception("Kafka Configuration not Defined ");
}

else {
kafkaProps = kafkaCfg;
KafkaConfigLoader.validateAndAddSpecificProperties(kafkaProps);
}

if (securityCfg == null) {
securityProps = new Properties();
securityProps.setProperty(AuthenticationConfigLoader.OAUTH_CLIENT_ID, "unit-test"); // Just for the unit tests.
}
else {
securityProps = securityCfg;

properties.putAll(kafkaCfg);
KafkaConfigLoader.validateAndAddSpecificProperties(properties);
}
}
catch (Exception e) {
throw (e);
}
readSchemaTopic.setSecurityProps(securityProps);
readSchemaTopic.setKafkaProps(kafkaProps);
this.clientID = getClientID(securityProps);
readSchemaTopic.setSecurityProps(properties);
readSchemaTopic.setKafkaProps(properties);
this.clientID = getClientID(properties);

}

Expand All @@ -86,7 +82,7 @@ public KafkaConsumer getKafkaConsumer(String streamName) throws Exception {
kafkaConsumer = getConsumer(kafkaSchema, streamName);
TopicPartition topicPartition = new TopicPartition(streamName + ".stream",0);
kafkaConsumer.assign(Collections.singletonList(topicPartition));
if(kafkaProps.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG).equals(OffsetResetStrategy.EARLIEST.toString().toLowerCase())) {
if(properties.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG).equals(OffsetResetStrategy.EARLIEST.toString().toLowerCase())) {
return seekToMidNight(topicPartition);
}
}
Expand Down Expand Up @@ -144,21 +140,20 @@ public KafkaConsumer getKafkaConsumer(String streamName, long timestamp) throws

public KafkaAvroConsumer getConsumer(Schema avroSchema, String streamName) throws Exception {
try {
if(!IsItJunit.isJUnitTest()) {
ConfigProperties.resolveAndExportToSystemProperties(securityProps);
}
// if(!IsItJunit.isJUnitTest()) {
// ConfigProperties.resolveAndExportToSystemProperties(securityProps);
// }
//Properties kafkaProps = KafkaConfigLoader.loadConfig();

kafkaProps.put("key.deserializer", StringDeserializer.class.getName());
kafkaProps.put("value.deserializer", AvroDeserializer.class.getName());
if(!kafkaProps.containsKey(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)) {
kafkaProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, OffsetResetStrategy.EARLIEST.toString().toLowerCase());
properties.put("key.deserializer", StringDeserializer.class.getName());
properties.put("value.deserializer", AvroDeserializer.class.getName());
if(!properties.containsKey(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)) {
properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, OffsetResetStrategy.EARLIEST.toString().toLowerCase());
}
if(!kafkaProps.containsKey(ConsumerConfig.GROUP_ID_CONFIG)) {
kafkaProps.put(ConsumerConfig.GROUP_ID_CONFIG, this.clientID + "_" + streamName + "_" + getDate());
if(!properties.containsKey(ConsumerConfig.GROUP_ID_CONFIG)) {
properties.put(ConsumerConfig.GROUP_ID_CONFIG, this.clientID);// + "_" + streamName + "_" + getDate());
}
ConfigProperties.resolve(kafkaProps);
return new KafkaAvroConsumer(kafkaProps, avroSchema);
return new KafkaAvroConsumer(properties, avroSchema);
}
catch (Exception e) {
throw e;
Expand Down Expand Up @@ -211,7 +206,7 @@ public KafkaConsumer getNewsConsumer(String topic) throws Exception {
kafkaConsumer = getConsumer(newsSchema, topic);
TopicPartition topicPartition = new TopicPartition(topic + ".stream",0);
kafkaConsumer.assign(Collections.singletonList(topicPartition));
if(kafkaProps.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG).equals(OffsetResetStrategy.EARLIEST.toString().toLowerCase())) {
if(properties.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG).equals(OffsetResetStrategy.EARLIEST.toString().toLowerCase())) {
return seekToMidNight(topicPartition);
}
return kafkaConsumer;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ public ReadSchemaTopic(){
}

public Schema readSchema(String topic) throws Exception {
KafkaConsumer schemaConsumer= getConsumer("Control-"+getClientID(securityProps));
KafkaConsumer schemaConsumer= getConsumer(getClientID(securityProps));
Duration sec = Duration.ofSeconds(10);
Schema messageSchema = null;
ConsumerRecord<String,GenericRecord> lastRecord=null;
Expand Down Expand Up @@ -88,7 +88,7 @@ public Set<String> getTopics() throws Exception{

Set<String> topics = new HashSet<String>();

KafkaConsumer schemaConsumer= getConsumer("Control-"+getClientID(securityProps));
KafkaConsumer schemaConsumer= getConsumer(getClientID(securityProps));
Duration sec = Duration.ofSeconds(10);
while (true) {
ConsumerRecords<String, GenericRecord> schemaRecords = schemaConsumer.poll(sec);
Expand Down Expand Up @@ -188,4 +188,4 @@ private long getTodayMidNightTimeStamp(){
return timestampFromMidnight;
}

}
}
Loading

0 comments on commit 9085a68

Please sign in to comment.