Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added Confluent Connect parameters to kafka template #64

Merged
merged 2 commits into from
Jan 31, 2024
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -4,17 +4,29 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutionException;

import org.apache.kafka.clients.admin.Admin;
import org.apache.kafka.clients.admin.ListTopicsOptions;
import org.apache.kafka.clients.admin.ListTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.TopicBuilder;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaAdmin;
import org.springframework.kafka.core.KafkaAdmin.NewTopics;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.stereotype.Component;
import org.apache.kafka.clients.producer.ProducerConfig;

@Component
@ConfigurationProperties(prefix = "kafka.topics")
@@ -48,7 +60,7 @@ public KafkaAdmin.NewTopics createKafkaTopics() {

// Get the name and config settings for the topic
String topicName = (String)topic.getOrDefault("name", null);

System.out.println(topicName);
if (topicName == null) {
logger.error("CreateTopic {} has no topic name", topic);
break;
@@ -94,12 +106,53 @@ public KafkaAdmin.NewTopics createKafkaTopics() {
}
}


// List out existing topics
Admin adminClient = Admin.create(properties.createStreamProperties("ConflictMonitorAdminClient"));
ListTopicsOptions listTopicsOptions = new ListTopicsOptions().listInternal(true);
ListTopicsResult topicsResult = adminClient.listTopics(listTopicsOptions);
KafkaFuture<Set<String>> topicsFuture = topicsResult.names();
try {
List<String> topicNames = new ArrayList<>();
for(String topicName: topicsFuture.get()){
logger.info("Found Topic: " + topicName);
topicNames.add(topicName);
}

} catch (InterruptedException e) {
e.printStackTrace();
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd recommend using logger.error() instead of printing to stdout so that logging can be configured in a consistent way.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As of Jan 28, 2024. All scans will fail regardless of the flag. This will be corrected when we update the CM to the latest version.

} catch (ExecutionException e) {
e.printStackTrace();
}


return new NewTopics(newTopics.toArray(NewTopic[]::new));


return new NewTopics(newTopics.toArray(NewTopic[]::new));
}

@Bean
public ProducerFactory<String, String> producerFactory() {
Properties configProps = properties.createStreamProperties("conflictmonitor-producer-factory");

Map<String, Object> map = new HashMap<>();

for (Map.Entry<Object, Object> entry : configProps.entrySet()) {
String key = (String) entry.getKey();
Object value = entry.getValue();
map.put(key, value);
}

map.put(
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class);
map.put(
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
StringSerializer.class);

return new DefaultKafkaProducerFactory<String, String>(map);
}

@Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}

private static final Logger logger = LoggerFactory.getLogger(KafkaConfiguration.class);