Skip to content

Commit

Permalink
GCP PubSub Source (#209)
Browse files Browse the repository at this point in the history
The code was not wiring in the headers mapping. The change just ensures the SourceRecord created contains the headers as returned by the headers mapper

Co-authored-by: stheppi <[email protected]>
  • Loading branch information
stheppi and stheppi authored Feb 3, 2025
1 parent 59856da commit 8651362
Showing 1 changed file with 12 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@

import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.header.ConnectHeaders;
import org.apache.kafka.connect.header.Header;
import org.apache.kafka.connect.header.Headers;
import org.apache.kafka.connect.source.SourceRecord;

import io.lenses.streamreactor.common.config.base.intf.Converter;
Expand All @@ -34,14 +37,22 @@ public class SourceRecordConverter extends Converter<PubSubMessageData, SourceRe

@Override
protected SourceRecord convert(final PubSubMessageData source) throws ConfigException {

final Headers headers = new ConnectHeaders();
mappingConfig.getHeaderMapper().mapHeaders(source).forEach((k, v) -> {
headers.add(k, v, Schema.STRING_SCHEMA);
});
return new SourceRecord(
source.getSourcePartition().toMap(),
source.getSourceOffset().toMap(),
source.getTargetTopicName(),
null,
getKeySchema(),
getKey(source),
getValueSchema(),
getValue(source)
getValue(source),
System.currentTimeMillis(),
headers
);
}

Expand Down

0 comments on commit 8651362

Please sign in to comment.