Commit 4bac745b authored by Your Name's avatar Your Name

add kafka test

parent 99233b73
This diff is collapsed.
// Either import the module object
import * as kafka from "k6/x/kafka";
// Or individual classes and constants
import {
Writer,
Reader,
Connection,
SchemaRegistry,
SCHEMA_TYPE_STRING,
} from "k6/x/kafka";
// Creates a new Writer object to produce messages to Kafka
const writer = new Writer({
// WriterConfig object
brokers: ["192.168.1.10:9092"],
topic: "my-topic",
});
const reader = new Reader({
// ReaderConfig object
brokers: ["192.168.1.10:9092"],
topic: "my-topic",
});
const connection = new Connection({
// ConnectionConfig object
address: "192.168.1.10:9092",
});
const schemaRegistry = new SchemaRegistry();
// Can accept a SchemaRegistryConfig object
if (__VU == 0) {
// Create a topic on initialization (before producing messages)
connection.createTopic({
// TopicConfig object
topic: "my-topic",
});
}
export default function () {
// Fetch the list of all topics
//const topics = connection.listTopics();
// console.log(topics); // list of topics
// Produces message to Kafka
writer.produce({
// ProduceConfig object
messages: [
// Message object(s)
{
key: schemaRegistry.serialize({
data: "my-key",
schemaType: SCHEMA_TYPE_STRING,
}),
value: schemaRegistry.serialize({
data: "my-valuemy-valuemy-valuemy-valuemy-valuemy-valuemy-valuemy-valuemy-valuemy-valuemy-valuemy-valuemy-valuemy-valuemy-valuemy-value",
schemaType: SCHEMA_TYPE_STRING,
}),
},
],
});
// Consume messages from Kafka
// let messages = reader.consume({
// // ConsumeConfig object
// limit: 10,
// });
// your messages
// console.log(messages);
// You can use checks to verify the contents,
// length and other properties of the message(s)
// To serialize the data back into a string, you should use
// the deserialize method of the Schema Registry client. You
// can use it inside a check, as shown in the example scripts.
// let deserializedValue = schemaRegistry.deserialize({
// data: messages[0].value,
// schemaType: SCHEMA_TYPE_STRING,
// });
}
export function teardown(data) {
// Delete the topic
//connection.deleteTopic("my-topic");
// Close all connections
writer.close();
reader.close();
connection.close();
}
/*
This is a k6 test script that imports the xk6-kafka and
tests Kafka with a 200 byte array messages per iteration.
*/
import { check } from "k6";
import {
Writer,
Reader,
Connection,
SchemaRegistry,
SCHEMA_TYPE_BYTES,
} from "k6/x/kafka"; // import kafka extension
const brokers = ["192.168.1.10:9092"];
const topic = "xk6_kafka_byte_array_topic";
const writer = new Writer({
brokers: brokers,
topic: topic,
autoCreateTopic: true,
});
const reader = new Reader({
brokers: brokers,
topic: topic,
});
const connection = new Connection({
address: brokers[0],
});
const schemaRegistry = new SchemaRegistry();
if (__VU == 0) {
connection.createTopic({ topic: topic });
}
const payload = "byte array payload";
export default function () {
for (let index = 0; index < 100; index++) {
let messages = [
{
// The data type of the key is a string
key: schemaRegistry.serialize({
data: Array.from("test-id-abc-" + index, (x) => x.charCodeAt(0)),
schemaType: SCHEMA_TYPE_BYTES,
}),
// The data type of the value is a byte array
value: schemaRegistry.serialize({
data: Array.from(payload, (x) => x.charCodeAt(0)),
schemaType: SCHEMA_TYPE_BYTES,
}),
},
{
key: schemaRegistry.serialize({
data: Array.from("test-id-def-" + index, (x) => x.charCodeAt(0)),
schemaType: SCHEMA_TYPE_BYTES,
}),
value: schemaRegistry.serialize({
data: Array.from(payload, (x) => x.charCodeAt(0)),
schemaType: SCHEMA_TYPE_BYTES,
}),
},
];
writer.produce({
messages: messages,
});
}
// Read 10 messages only
let messages = reader.consume({ limit: 10 });
check(messages, {
"10 messages returned": (msgs) => msgs.length == 10,
"key starts with 'test-id-' string": (msgs) =>
String.fromCharCode(
...schemaRegistry.deserialize({
data: msgs[0].key,
schemaType: SCHEMA_TYPE_BYTES,
}),
).startsWith("test-id-"),
"value is correct": (msgs) =>
String.fromCharCode(
...schemaRegistry.deserialize({
data: msgs[0].value,
schemaType: SCHEMA_TYPE_BYTES,
}),
) == payload,
});
}
// export function teardown(data) {
// if (__VU == 0) {
// // Delete the topic
// connection.deleteTopic(topic);
// }
// writer.close();
// reader.close();
// connection.close();
// }
/*
This is a k6 test script that imports the xk6-kafka and
tests Kafka with a 200 JSON messages per iteration.
*/
import { check } from "k6";
// import * as kafka from "k6/x/kafka";
import {
Writer,
Reader,
Connection,
SchemaRegistry,
SCHEMA_TYPE_STRING,
} from "k6/x/kafka"; // import kafka extension
// Prints module-level constants
// console.log(kafka);
const brokers = ["192.168.1.10:9092"];
const topic = "xk6_kafka_json_topic";
const writer = new Writer({
brokers: brokers,
topic: topic,
autoCreateTopic: true,
});
const reader = new Reader({
brokers: brokers,
topic: topic,
});
const connection = new Connection({
address: brokers[0],
});
const schemaRegistry = new SchemaRegistry();
if (__VU == 0) {
connection.createTopic({ topic: topic });
}
export const options = {
thresholds: {
// Base thresholds to see if the writer or reader is working
kafka_writer_error_count: ["count == 0"],
kafka_reader_error_count: ["count == 0"],
},
};
export default function () {
for (let index = 0; index < 100; index++) {
let messages = [
{
key: schemaRegistry.serialize({
data: "test-key-string",
schemaType: SCHEMA_TYPE_STRING,
}),
value: schemaRegistry.serialize({
data: "test-value-string",
schemaType: SCHEMA_TYPE_STRING,
}),
headers: {
mykey: "myvalue",
},
offset: index,
partition: 0,
time: new Date(), // Will be converted to timestamp automatically
},
{
key: schemaRegistry.serialize({
data: "test-key-string",
schemaType: SCHEMA_TYPE_STRING,
}),
value: schemaRegistry.serialize({
data: "test-value-string",
schemaType: SCHEMA_TYPE_STRING,
}),
headers: {
mykey: "myvalue",
},
},
];
writer.produce({ messages: messages });
}
// Read 10 messages only
let messages = reader.consume({ limit: 10 });
check(messages, {
"10 messages are received": (messages) => messages.length == 10,
});
check(messages[0], {
"Topic equals to xk6_kafka_json_topic": (msg) => msg["topic"] == topic,
"Key is a string and is correct": (msg) =>
schemaRegistry.deserialize({
data: msg.key,
schemaType: SCHEMA_TYPE_STRING,
}) == "test-key-string",
"Value is a string and is correct": (msg) =>
typeof schemaRegistry.deserialize({
data: msg.value,
schemaType: SCHEMA_TYPE_STRING,
}) == "string" &&
schemaRegistry.deserialize({
data: msg.value,
schemaType: SCHEMA_TYPE_STRING,
}) == "test-value-string",
"Header equals {'mykey': 'myvalue'}": (msg) =>
"mykey" in msg.headers &&
String.fromCharCode(...msg.headers["mykey"]) == "myvalue",
"Time is past": (msg) => new Date(msg["time"]) < new Date(),
"Partition is zero": (msg) => msg["partition"] == 0,
"Offset is gte zero": (msg) => msg["offset"] >= 0,
"High watermark is gte zero": (msg) => msg["highWaterMark"] >= 0,
});
}
export function teardown(data) {
if (__VU == 0) {
// Delete the topic
connection.deleteTopic(topic);
}
writer.close();
reader.close();
connection.close();
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment