Browse Source

add streams processor with basic risk logic

Grega Bremec 7 months ago
parent
commit
380eb82ddc

+ 5 - 0
payments-streams-processor/.dockerignore

@@ -0,0 +1,5 @@
+*
+!target/*-runner
+!target/*-runner.jar
+!target/lib/*
+!target/quarkus-app/*

+ 39 - 0
payments-streams-processor/.gitignore

@@ -0,0 +1,39 @@
+#Maven
+target/
+pom.xml.tag
+pom.xml.releaseBackup
+pom.xml.versionsBackup
+release.properties
+
+# Eclipse
+.project
+.classpath
+.settings/
+bin/
+
+# IntelliJ
+.idea
+*.ipr
+*.iml
+*.iws
+
+# NetBeans
+nb-configuration.xml
+
+# Visual Studio Code
+.vscode
+.factorypath
+
+# OSX
+.DS_Store
+
+# Vim
+*.swp
+*.swo
+
+# patch
+*.orig
+*.rej
+
+# Local environment
+.env

+ 117 - 0
payments-streams-processor/.mvn/wrapper/MavenWrapperDownloader.java

@@ -0,0 +1,117 @@
+/*
+ * Copyright 2007-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.net.*;
+import java.io.*;
+import java.nio.channels.*;
+import java.util.Properties;
+
+public class MavenWrapperDownloader {
+
+    private static final String WRAPPER_VERSION = "0.5.6";
+    /**
+     * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
+     */
+    private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
+        + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
+
+    /**
+     * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
+     * use instead of the default one.
+     */
+    private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
+            ".mvn/wrapper/maven-wrapper.properties";
+
+    /**
+     * Path where the maven-wrapper.jar will be saved to.
+     */
+    private static final String MAVEN_WRAPPER_JAR_PATH =
+            ".mvn/wrapper/maven-wrapper.jar";
+
+    /**
+     * Name of the property which should be used to override the default download url for the wrapper.
+     */
+    private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
+
+    public static void main(String args[]) {
+        System.out.println("- Downloader started");
+        File baseDirectory = new File(args[0]);
+        System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
+
+        // If the maven-wrapper.properties exists, read it and check if it contains a custom
+        // wrapperUrl parameter.
+        File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
+        String url = DEFAULT_DOWNLOAD_URL;
+        if(mavenWrapperPropertyFile.exists()) {
+            FileInputStream mavenWrapperPropertyFileInputStream = null;
+            try {
+                mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
+                Properties mavenWrapperProperties = new Properties();
+                mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
+                url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
+            } catch (IOException e) {
+                System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
+            } finally {
+                try {
+                    if(mavenWrapperPropertyFileInputStream != null) {
+                        mavenWrapperPropertyFileInputStream.close();
+                    }
+                } catch (IOException e) {
+                    // Ignore ...
+                }
+            }
+        }
+        System.out.println("- Downloading from: " + url);
+
+        File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
+        if(!outputFile.getParentFile().exists()) {
+            if(!outputFile.getParentFile().mkdirs()) {
+                System.out.println(
+                        "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
+            }
+        }
+        System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
+        try {
+            downloadFileFromURL(url, outputFile);
+            System.out.println("Done");
+            System.exit(0);
+        } catch (Throwable e) {
+            System.out.println("- Error downloading");
+            e.printStackTrace();
+            System.exit(1);
+        }
+    }
+
+    private static void downloadFileFromURL(String urlString, File destination) throws Exception {
+        if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
+            String username = System.getenv("MVNW_USERNAME");
+            char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
+            Authenticator.setDefault(new Authenticator() {
+                @Override
+                protected PasswordAuthentication getPasswordAuthentication() {
+                    return new PasswordAuthentication(username, password);
+                }
+            });
+        }
+        URL website = new URL(urlString);
+        ReadableByteChannel rbc;
+        rbc = Channels.newChannel(website.openStream());
+        FileOutputStream fos = new FileOutputStream(destination);
+        fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
+        fos.close();
+        rbc.close();
+    }
+
+}

BIN
payments-streams-processor/.mvn/wrapper/maven-wrapper.jar


+ 2 - 0
payments-streams-processor/.mvn/wrapper/maven-wrapper.properties

@@ -0,0 +1,2 @@
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
+wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar

+ 150 - 0
payments-streams-processor/pom.xml

@@ -0,0 +1,150 @@
+<?xml version="1.0"?>
+<project
+    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"
+    xmlns="http://maven.apache.org/POM/4.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>com.redhat.training.kafka.streams</groupId>
+    <artifactId>payments-streams-processor</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+
+    <properties>
+        <compiler-plugin.version>3.12.1</compiler-plugin.version>
+        <maven.compiler.release>17</maven.compiler.release>
+        <surefire-plugin.version>3.2.5</surefire-plugin.version>
+        <skipITs>true</skipITs>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+        <!-- Community BOM -->
+        <!-- <quarkus.platform.artifact-id>quarkus-universe-bom</quarkus.platform.artifact-id>
+    <quarkus.platform.group-id>io.quarkus</quarkus.platform.group-id> -->
+        <!-- RHBoK BOM -->
+        <quarkus.platform.artifact-id>quarkus-bom</quarkus.platform.artifact-id>
+        <quarkus.platform.group-id>com.redhat.quarkus.platform</quarkus.platform.group-id>
+        <quarkus.platform.version>3.8.5.SP1-redhat-00001</quarkus.platform.version>
+    </properties>
+    <dependencyManagement>
+        <dependencies>
+            <dependency>
+                <groupId>${quarkus.platform.group-id}</groupId>
+                <artifactId>${quarkus.platform.artifact-id}</artifactId>
+                <version>${quarkus.platform.version}</version>
+                <type>pom</type>
+                <scope>import</scope>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
+    <dependencies>
+        <dependency>
+            <groupId>io.quarkus</groupId>
+            <artifactId>quarkus-kafka-streams</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>io.quarkus</groupId>
+            <artifactId>quarkus-resteasy-jackson</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>io.quarkus</groupId>
+            <artifactId>quarkus-resteasy</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>io.quarkus</groupId>
+            <artifactId>quarkus-arc</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>io.quarkus</groupId>
+            <artifactId>quarkus-junit5</artifactId>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>${quarkus.platform.group-id}</groupId>
+                <artifactId>quarkus-maven-plugin</artifactId>
+                <version>${quarkus.platform.version}</version>
+                <extensions>true</extensions>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>build</goal>
+                            <goal>generate-code</goal>
+                            <goal>generate-code-tests</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>${compiler-plugin.version}</version>
+                <configuration>
+                    <compilerArgs>
+                        <arg>-parameters</arg>
+                    </compilerArgs>
+                </configuration>
+            </plugin>
+            <plugin>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <version>${surefire-plugin.version}</version>
+                <configuration>
+                    <systemPropertyVariables>
+                        <java.util.logging.manager>org.jboss.logmanager.LogManager</java.util.logging.manager>
+                        <maven.home>${maven.home}</maven.home>
+                    </systemPropertyVariables>
+                </configuration>
+            </plugin>
+            <plugin>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <version>${surefire-plugin.version}</version>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>integration-test</goal>
+                            <goal>verify</goal>
+                        </goals>
+                        <configuration>
+                            <systemPropertyVariables>
+                                <native.image.path>
+                                    ${project.build.directory}/${project.build.finalName}-runner</native.image.path>
+                                <java.util.logging.manager>org.jboss.logmanager.LogManager</java.util.logging.manager>
+                                <maven.home>${maven.home}</maven.home>
+                            </systemPropertyVariables>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+    <profiles>
+        <!-- Workaround for RocksDB on macOS. -->
+        <profile>
+            <id>jni</id>
+            <activation>
+                <os>
+                    <family>!linux</family>
+                </os>
+            </activation>
+            <dependencyManagement>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.rocksdb</groupId>
+                        <artifactId>rocksdbjni</artifactId>
+                        <version>9.6.1</version>
+                    </dependency>
+                </dependencies>
+            </dependencyManagement>
+        </profile>
+        <profile>
+            <id>native</id>
+            <activation>
+                <property>
+                    <name>native</name>
+                </property>
+            </activation>
+            <properties>
+                <skipITs>false</skipITs>
+                <quarkus.native.enabled>true</quarkus.native.enabled>
+            </properties>
+        </profile>
+    </profiles>
+</project>

+ 22 - 0
payments-streams-processor/src/main/java/com/redhat/training/kafka/model/AggregatePaymentData.java

@@ -0,0 +1,22 @@
+package com.redhat.training.kafka.model;
+
+public class AggregatePaymentData {
+    String accountId;
+    long aggregateSum;
+    public String getAccountId() {
+        return accountId;
+    }
+    public void setAccountId(String accountId) {
+        this.accountId = accountId;
+    }
+    public long getAggregateSum() {
+        return aggregateSum;
+    }
+    public void setAggregateSum(long aggregateSum) {
+        this.aggregateSum = aggregateSum;
+    }
+    public String toString() {
+        return String.format("{\"accountId\": \"%s\", \"aggregateSum\": %d}",
+                this.accountId, this.aggregateSum);
+    }
+}

+ 42 - 0
payments-streams-processor/src/main/java/com/redhat/training/kafka/model/BankAccount.java

@@ -0,0 +1,42 @@
+package com.redhat.training.kafka.model;
+
+public class BankAccount {
+    String accountNumber;
+    String customerId;
+    String customerName;
+    long balance;
+    public BankAccount(String acct, String cid, String cname, long balance) {
+        this.setAccountNumber(acct);
+        this.setCustomerId(cid);
+        this.setCustomerName(cname);
+        this.setBalance(balance);
+    }
+    public String getCustomerName() {
+        return customerName;
+    }
+    public void setCustomerName(String customerName) {
+        this.customerName = customerName;
+    }
+    public String getAccountNumber() {
+        return accountNumber;
+    }
+    public void setAccountNumber(String accountNumber) {
+        this.accountNumber = accountNumber;
+    }
+    public long getBalance() {
+        return balance;
+    }
+    public void setBalance(long balance) {
+        this.balance = balance;
+    }
+    public String getCustomerId() {
+        return customerId;
+    }
+    public void setCustomerId(String customerId) {
+        this.customerId = customerId;
+    }
+    public String toString() {
+        return String.format("{\"accountId\": \"%s\", \"customerId\": \"%s\", \"customerName\": \"%s\", \"balance\": %d}",
+                                this.accountNumber, this.customerId, this.customerName, this.balance);
+    }
+}

+ 36 - 0
payments-streams-processor/src/main/java/com/redhat/training/kafka/model/EnrichedRiskAssessment.java

@@ -0,0 +1,36 @@
+package com.redhat.training.kafka.model;
+
+public class EnrichedRiskAssessment {
+    String accountId;
+    String customerId;
+    int transactionAmount;
+    int riskScore;
+    public String getAccountId() {
+        return accountId;
+    }
+    public void setAccountId(String accountId) {
+        this.accountId = accountId;
+    }
+    public String getCustomerId() {
+        return customerId;
+    }
+    public void setCustomerId(String customerId) {
+        this.customerId = customerId;
+    }
+    public int getTransactionAmount() {
+        return transactionAmount;
+    }
+    public void setTransactionAmount(int transactionAmount) {
+        this.transactionAmount = transactionAmount;
+    }
+    public int getRiskScore() {
+        return riskScore;
+    }
+    public void setRiskScore(int riskScore) {
+        this.riskScore = riskScore;
+    }
+    public String toString() {
+        return String.format("{\"accountId\": \"%s\", \"customerId\": \"%s\", \"amount\": %d, \"riskScore\": %d}",
+                this.accountId, this.customerId, this.transactionAmount, this.riskScore);
+    }
+}

+ 18 - 0
payments-streams-processor/src/main/java/com/redhat/training/kafka/model/PaymentTransaction.java

@@ -0,0 +1,18 @@
+package com.redhat.training.kafka.model;
+
+public class PaymentTransaction {
+    String account;
+    int amount;
+    public int getAmount() {
+        return amount;
+    }
+    public void setAmount(int amount) {
+        this.amount = amount;
+    }
+    public String getAccount() {
+        return account;
+    }
+    public void setAccount(String account) {
+        this.account = account;
+    }
+}

+ 18 - 0
payments-streams-processor/src/main/java/com/redhat/training/kafka/model/RiskAssessment.java

@@ -0,0 +1,18 @@
+package com.redhat.training.kafka.model;
+
+public class RiskAssessment {
+    String customerId;
+    int assessmentScore;
+    public String getCustomerId() {
+        return customerId;
+    }
+    public void setCustomerId(String customerId) {
+        this.customerId = customerId;
+    }
+    public int getAssessmentScore() {
+        return assessmentScore;
+    }
+    public void setAssessmentScore(int assessmentScore) {
+        this.assessmentScore = assessmentScore;
+    }
+}

+ 164 - 0
payments-streams-processor/src/main/java/com/redhat/training/kafka/streams/PaymentsStream.java

@@ -0,0 +1,164 @@
+package com.redhat.training.kafka.streams;
+
+import java.time.Duration;
+import java.util.Map;
+import java.util.logging.Logger;
+
+import jakarta.enterprise.context.ApplicationScoped;
+import jakarta.enterprise.inject.Produces;
+
+import org.apache.kafka.common.serialization.Serde;
+import org.apache.kafka.common.serialization.Serdes;
+import org.apache.kafka.streams.KeyValue;
+import org.apache.kafka.streams.StreamsBuilder;
+import org.apache.kafka.streams.Topology;
+import org.apache.kafka.streams.kstream.Branched;
+import org.apache.kafka.streams.kstream.Consumed;
+import org.apache.kafka.streams.kstream.GlobalKTable;
+import org.apache.kafka.streams.kstream.Joined;
+import org.apache.kafka.streams.kstream.KStream;
+import org.apache.kafka.streams.kstream.KTable;
+import org.apache.kafka.streams.kstream.KeyValueMapper;
+import org.apache.kafka.streams.kstream.Materialized;
+import org.apache.kafka.streams.kstream.Named;
+import org.apache.kafka.streams.kstream.Produced;
+import org.apache.kafka.streams.kstream.Suppressed;
+import org.apache.kafka.streams.kstream.TimeWindows;
+import org.apache.kafka.streams.kstream.Windowed;
+import org.apache.kafka.streams.kstream.WindowedSerdes;
+
+import com.redhat.training.kafka.model.AggregatePaymentData;
+import com.redhat.training.kafka.model.BankAccount;
+import com.redhat.training.kafka.model.EnrichedRiskAssessment;
+import com.redhat.training.kafka.model.PaymentTransaction;
+import com.redhat.training.kafka.model.RiskAssessment;
+
+import io.quarkus.kafka.client.serialization.ObjectMapperSerde;
+
+import static org.apache.kafka.streams.kstream.Suppressed.BufferConfig.unbounded;
+
+@ApplicationScoped
+public class PaymentsStream {
+    private final Logger LOG = Logger.getLogger(PaymentsStream.class.getName());
+
+    // Deserializer for message keys.
+    private final Serde<String> stringSerde = Serdes.String();
+
+    // Serializer for message values
+    private final Serde<Integer> integerSerde = Serdes.Integer();
+
+    private final ObjectMapperSerde<PaymentTransaction> ptSerde = new ObjectMapperSerde<>(PaymentTransaction.class);
+    private final ObjectMapperSerde<BankAccount> baSerde = new ObjectMapperSerde<>(BankAccount.class);
+    private final ObjectMapperSerde<RiskAssessment> raSerde = new ObjectMapperSerde<>(RiskAssessment.class);
+    private final ObjectMapperSerde<EnrichedRiskAssessment> eraSerde = new ObjectMapperSerde<>(EnrichedRiskAssessment.class);
+    private final ObjectMapperSerde<AggregatePaymentData> apdSerde = new ObjectMapperSerde<>(AggregatePaymentData.class);
+    private final Serde<Long> longSerde = Serdes.Long();
+
+    @Produces
+    public Topology buildTopology() {
+        StreamsBuilder builder = new StreamsBuilder();
+
+        // Input data: payments topic (use producer's default profile exec:java),
+        //              bank account data (use producer -Pbank-account-data exec:java to initialize), and
+        //              risk status updates (use producer -Prisk-assessment-updates exec:java to start updates)
+        KStream<String,Integer> payments = builder.stream("payments", Consumed.with(stringSerde, integerSerde));
+        KTable<String,BankAccount> acctTable = builder.table("account-data",
+                                                            Consumed.with(stringSerde, baSerde));
+        GlobalKTable<String,RiskAssessment> riskStatusTable = builder.globalTable("customer-risk-status",
+                                                             Consumed.with(stringSerde,raSerde));
+
+        // Basically logging: emit the input of String,Integer in the form of String,PaymentTransaction on transaction-data topic.
+        payments
+            .map(new KeyValueMapper<String, Integer, KeyValue<String, PaymentTransaction>>() {
+                public KeyValue<String, PaymentTransaction> apply(String key, Integer value) {
+                    PaymentTransaction pt = new PaymentTransaction();
+                    pt.setAccount(key);
+                    pt.setAmount(value);
+                    return new KeyValue<>(key, pt);
+            }},
+            Named.as("transform-int-to-payment-transaction"))
+            .to("transaction-data", Produced.with(stringSerde, ptSerde));
+
+        // The "main" part:
+        //  - filter for large payments only,
+        //  - create data container object;
+        //  - join on account table and enrich data;
+        //  - join on risk status table, enrich again;
+        // Finally, apply risk assessment logic in a filter and send to "large-payments" if needed.
+        payments
+            .filter((acctId, amt) -> amt > 10000)
+            .mapValues((amt) -> {
+                EnrichedRiskAssessment era = new EnrichedRiskAssessment();
+                era.setTransactionAmount(amt.intValue());
+                return era;
+            })
+            .join(acctTable,
+                        (era, acct) -> {
+                            era.setAccountId(acct.getAccountNumber());
+                            era.setCustomerId(acct.getCustomerId());
+                            return era;
+                        })
+            .join(riskStatusTable,
+                (acctId, era) -> era.getCustomerId(),
+                (era, risk) -> {
+                    era.setRiskScore(risk.getAssessmentScore());
+                    return era;
+                }
+            )
+            .peek((k, v) -> LOG.info("Considering for further evaluation: " + v))
+            .filter((acctId, era) -> {
+                // for payments above 75k we need a low risk score
+                if (era.getTransactionAmount() > 75000 && era.getRiskScore() > 2) {
+                    return true;
+                // above 50k we need medium-low score
+                } else if (era.getTransactionAmount() > 50000 && era.getRiskScore() > 4) {
+                    return true;
+                // above 25k we allow up to medium-high score
+                } else if (era.getTransactionAmount() > 25000 && era.getRiskScore() > 6) {
+                    return true;
+                // anything else allows a risk score up to 8
+                } else if (era.getRiskScore() > 8) {
+                    return true;
+                }
+                // any other payments are accepted
+                return false;
+            })
+            .peek((k, v) -> LOG.info("Sending record " + v + " to large-payments topic."))
+            .to("large-payments", Produced.with(stringSerde, eraSerde));
+
+        // Produce aggregated payment data (how many aggregate transactions in the last 30 seconds of time window).
+        payments
+            .groupByKey()
+            .windowedBy(TimeWindows.ofSizeAndGrace(Duration.ofSeconds(30), Duration.ofSeconds(5)))
+            .aggregate(() -> 0L,
+                         (acctId, amt, agg) -> agg + amt,
+                         Materialized.with(stringSerde, longSerde))
+            .suppress(Suppressed.untilWindowCloses(unbounded()))
+            .toStream()
+            .peek((acctId, seenAmt) -> LOG.info("Account " + acctId + " aggregated payments: " + seenAmt))
+            .map((acctId, newAggregate) -> {
+                AggregatePaymentData apd = new AggregatePaymentData();
+                apd.setAccountId(acctId.key());
+                apd.setAggregateSum(newAggregate);
+                return new KeyValue<String, AggregatePaymentData>(acctId.key(), apd);
+            })
+            .to("aggregate-data",
+                Produced.with(stringSerde, apdSerde));
+
+        // Update each account with the latest balance available.
+        payments.join(acctTable,
+                        (amt, acct) -> {
+                            acct.setBalance(acct.getBalance() + amt.longValue());
+                            return acct;
+                        })
+            .peek((acctId, acct) -> LOG.info("Account " + acctId + " updated with new balance: " + acct.getBalance()))
+            .to("account-data",
+                        Produced.with(stringSerde, baSerde));
+
+        Topology t = builder.build();
+
+        LOG.info("Topology: " + t.describe().toString());
+
+        return t;
+    }
+}

+ 5 - 0
payments-streams-processor/src/main/resources/application.properties

@@ -0,0 +1,5 @@
+quarkus.kafka.devservices.enabled = false
+
+quarkus.kafka-streams.bootstrap-servers = localhost:9092,localhost:9192,localhost:9292
+quarkus.kafka-streams.application-id = large-payment-processor
+quarkus.kafka-streams.topics = payments,account-data,customer-risk-status,transaction-data,large-payments,aggregate-data