Getting tls: bad certificate error when try to write avro type message
Opened this issue · 1 comments
Hi @mostafa,
I have written k6 script using xk6-kafka extension for writing avro type message into one topic with 'insecureSkipTlsVerify: true'. But i was getting below error message. Please let me know what went wrong. Are the jks files needed when we write avro messages?
ERRO[0007] GoError: Failed to get schema from schema registry, OriginalError: Get "https://topic.internal.any.v1-value/versions/latest": remote error: tls: bad certificate
default at github.com/mostafa/xk6-kafka.(*Kafka).schemaRegistryClientClass.func1 (native)
at file:///Users/kafka/replicatorShared.js:95:53(53) executor=shared-iterations scenario=default source=stacktrace
My script:
import {
Writer,
Reader,
Connection,
SchemaRegistry,
SCHEMA_TYPE_AVRO,
SASL_SCRAM_SHA512,
TLS_1_2,
KEY,
VALUE,
TOPIC_NAME_STRATEGY,
RECORD_NAME_STRATEGY
} from "k6/x/kafka";
import exec from 'k6/execution';
import encoding from 'k6/encoding';
//Transport document event & Producer - below - Consumer & Producer Service
const brokers = [“xxx:443"]; //Update as per service- bootstrapserver
const topic = "topic.internal.any.v1"; //Update as per service
const schemaRegistryUrl = “xxx”;
export function setup() {
const saslConfig = {
username: xxx
password: xxx
algorithm: SASL_SCRAM_SHA512,
};
const tlsConfig = {
enableTls: true,
insecureSkipTlsVerify: true,
minVersion: TLS_1_2,
};
return {
saslConfig: saslConfig,
tlsConfig: tlsConfig
}
}
export default function (data) {
const writer = new Writer({
brokers: brokers,
topic: topic,
partition: 0,
sasl: data.saslConfig,
tls: data.tlsConfig,
});
const connection = new Connection({
address: brokers[0],
sasl: data.saslConfig,
tls: data.tlsConfig,
});
const schemaRegistry = new SchemaRegistry(
{
url: schemaRegistryUrl,
tls: data.tlsConfig
});
const valueSchemaObject = schemaRegistry.getSchema({
subject: "topic.internal.any.v1-value",
version: 1,
schemaType: SCHEMA_TYPE_AVRO,
});
let messages = [{
value: schemaRegistry.serialize({
data:{
"pubSet": [
{
"event": {
"eventId": 166130471,
"eventName": "EMP.Confirm_Shipment_Closed",
"gemstsutc": "2023-02-06 06:34:29.602712",
"sourceSystem": "GCSS"
},
"shipmentTPDocs": {
"shipmentTPDoc": [
{
"fkShipmentVersionPricing": "PEVDQCLXZ1ALB",
"tpdocExtReferenceId": "",
"tpdocInstanceId": "SXMPGGLXZ1DLB",
"tpdocVersionExtReferenceId": "",
"tpdocVersionInstanceId": "VV7PGGLXZ1DLB"
}
]
},
]
},
schema: valueSchemaObject,
schemaType: SCHEMA_TYPE_AVRO,
}),
headers:{
"__replicator_id": "topic.internal.any.v1"
}
}];
writer.produce({messages: messages});
/* let readMessages = reader.consume({
// msk.orderrequest.customsservices.consumergroup.v1,
limit: 2,
}); */
console.log("consumerKafka.js - Producer/writer - Event - ", schemaRegistry.deserialize({
data: messages,
schemaType: SCHEMA_TYPE_AVRO
}));
/* console.log("consumerKafka.js - consumer/reader - message key - ", schemaRegistry.deserialize({
//data: readMessages[0].key,
//data: readMessages[0].value,
data: readMessages,
schemaType: SCHEMA_TYPE_JSON
})); */
writer.close();
//reader.close();
connection.close();
}
Write any type of message, including string, JSON or Avro doesn't require JKS files, rather it's the Writer and Reader that needs to connect to the TLS server of Kafka using the keys and certificates inside JKS files. So, JKS files are required if you're using TLS with custom certificates.