mirror of
https://github.com/kremalicious/umami.git
synced 2024-06-29 01:07:55 +02:00
remove test files
This commit is contained in:
parent
41d4b4c402
commit
57d37381d9
|
@ -1,76 +0,0 @@
|
|||
// import the `Kafka` instance from the kafkajs library
|
||||
const { Kafka } = require('kafkajs');
|
||||
|
||||
// the client ID lets kafka know who's producing the messages
|
||||
const clientId = 'my-app';
|
||||
// we can define the list of brokers in the cluster
|
||||
const brokers = ['localhost:9092', 'localhost:9093', 'localhost:9094'];
|
||||
// this is the topic to which we want to write messages
|
||||
const topic = 'event';
|
||||
|
||||
// initialize a new kafka client and initialize a producer from it
|
||||
const kafka = new Kafka({ clientId, brokers });
|
||||
const { Partitioners } = require('kafkajs');
|
||||
|
||||
const producer = kafka.producer({ createPartitioner: Partitioners.DefaultPartitioner });
|
||||
|
||||
// we define an async function that writes a new message each second
|
||||
async function produce_event() {
|
||||
await producer.connect();
|
||||
let i = 0;
|
||||
|
||||
// after the produce has connected, we start an interval timer
|
||||
setInterval(async () => {
|
||||
try {
|
||||
// send a message to the configured topic with
|
||||
// the key and value formed from the current value of `i`
|
||||
let y = Math.random()
|
||||
.toString(36)
|
||||
.replace(/[^a-z]+/g, '')
|
||||
.substr(0, 5);
|
||||
let z = Math.random()
|
||||
.toString(36)
|
||||
.replace(/[^a-z]+/g, '')
|
||||
.substr(0, 5);
|
||||
let x = {
|
||||
event_uuid: '00fea66e-a433-536d-a13d-2d873fab0a08',
|
||||
website_id: i,
|
||||
session_uuid: '00fea66e-a433-536d-a13d-2d873fab0a08',
|
||||
created_at: '2020-07-18 11:53:33',
|
||||
url: y,
|
||||
event_name: z,
|
||||
};
|
||||
|
||||
await producer.send({
|
||||
topic,
|
||||
messages: [
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
],
|
||||
});
|
||||
i++;
|
||||
} catch (err) {
|
||||
console.error('could not write message ' + err);
|
||||
}
|
||||
}, 4);
|
||||
}
|
||||
|
||||
module.exports = produce_event;
|
|
@ -1,75 +0,0 @@
|
|||
// import the `Kafka` instance from the kafkajs library
|
||||
const { Kafka } = require('kafkajs');
|
||||
|
||||
// the client ID lets kafka know who's producing the messages
|
||||
const clientId = 'my-app';
|
||||
// we can define the list of brokers in the cluster
|
||||
const brokers = ['localhost:9092', 'localhost:9093', 'localhost:9094'];
|
||||
// this is the topic to which we want to write messages
|
||||
const topic = 'pageview';
|
||||
|
||||
// initialize a new kafka client and initialize a producer from it
|
||||
const kafka = new Kafka({ clientId, brokers });
|
||||
const { Partitioners } = require('kafkajs');
|
||||
|
||||
const producer = kafka.producer({ createPartitioner: Partitioners.DefaultPartitioner });
|
||||
|
||||
// we define an async function that writes a new message each second
|
||||
async function produce_pageview() {
|
||||
await producer.connect();
|
||||
let i = 0;
|
||||
|
||||
// after the produce has connected, we start an interval timer
|
||||
setInterval(async () => {
|
||||
try {
|
||||
// send a message to the configured topic with
|
||||
// the key and value formed from the current value of `i`
|
||||
let y = Math.random()
|
||||
.toString(36)
|
||||
.replace(/[^a-z]+/g, '')
|
||||
.substr(0, 5);
|
||||
let z = Math.random()
|
||||
.toString(36)
|
||||
.replace(/[^a-z]+/g, '')
|
||||
.substr(0, 5);
|
||||
let x = {
|
||||
website_id: i,
|
||||
session_uuid: '00fea66e-a433-536d-a13d-2d873fab0a08',
|
||||
created_at: '2020-07-18 11:53:33',
|
||||
url: y,
|
||||
referrer: z,
|
||||
};
|
||||
|
||||
await producer.send({
|
||||
topic,
|
||||
messages: [
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
],
|
||||
});
|
||||
i++;
|
||||
} catch (err) {
|
||||
console.error('could not write message ' + err);
|
||||
}
|
||||
}, 4);
|
||||
}
|
||||
|
||||
module.exports = produce_pageview;
|
|
@ -1,79 +0,0 @@
|
|||
// import the `Kafka` instance from the kafkajs library
|
||||
const { Kafka } = require('kafkajs');
|
||||
|
||||
// the client ID lets kafka know who's producing the messages
|
||||
const clientId = 'my-app';
|
||||
// we can define the list of brokers in the cluster
|
||||
const brokers = ['localhost:9092', 'localhost:9093', 'localhost:9094'];
|
||||
// this is the topic to which we want to write messages
|
||||
const topic = 'session';
|
||||
|
||||
// initialize a new kafka client and initialize a producer from it
|
||||
const kafka = new Kafka({ clientId, brokers });
|
||||
const { Partitioners } = require('kafkajs');
|
||||
|
||||
const producer = kafka.producer({ createPartitioner: Partitioners.DefaultPartitioner });
|
||||
|
||||
// we define an async function that writes a new message each second
|
||||
async function produce_session() {
|
||||
await producer.connect();
|
||||
let i = 0;
|
||||
|
||||
// after the produce has connected, we start an interval timer
|
||||
setInterval(async () => {
|
||||
try {
|
||||
// send a message to the configured topic with
|
||||
// the key and value formed from the current value of `i`
|
||||
let y = Math.random()
|
||||
.toString(36)
|
||||
.replace(/[^a-z]+/g, '')
|
||||
.substr(0, 5);
|
||||
let z = Math.random()
|
||||
.toString(36)
|
||||
.replace(/[^a-z]+/g, '')
|
||||
.substr(0, 5);
|
||||
const x = {
|
||||
session_uuid: '00fea66e-a433-536d-a13d-2d873fab0a08',
|
||||
website_id: i,
|
||||
hostname: z,
|
||||
browser: y,
|
||||
os: z,
|
||||
device: y,
|
||||
screen: z,
|
||||
language: y,
|
||||
country: z,
|
||||
};
|
||||
|
||||
await producer.send({
|
||||
topic,
|
||||
messages: [
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
{
|
||||
key: 'my-key',
|
||||
value: JSON.stringify(x),
|
||||
},
|
||||
],
|
||||
});
|
||||
i++;
|
||||
} catch (err) {
|
||||
console.error('could not write message ' + err);
|
||||
}
|
||||
}, 4);
|
||||
}
|
||||
|
||||
module.exports = produce_session;
|
|
@ -1,29 +0,0 @@
|
|||
const produce_session = require('./sessionProducer');
|
||||
const produce_pageview = require('./pageviewProducer');
|
||||
const produce_event = require('./eventProducer');
|
||||
|
||||
// call the `produce` function and log an error if it occurs
|
||||
produce_pageview().catch(err => {
|
||||
console.error('error in producer_pageview: ', err);
|
||||
});
|
||||
|
||||
produce_session().catch(err => {
|
||||
console.error('error in producer_session: ', err);
|
||||
});
|
||||
|
||||
produce_event().catch(err => {
|
||||
console.error('error in producer_event: ', err);
|
||||
});
|
||||
|
||||
// const { Kafka } = require('kafkajs')
|
||||
|
||||
// const KAFKA_URL="kafka://localhost:9092/";
|
||||
// const KAFKA_BROKER="localhost:9092,localhost:9093,localhost:9094"
|
||||
|
||||
// const url = new URL(KAFKA_URL);
|
||||
// const database = url.pathname.replace('/', '');
|
||||
// var brokers = KAFKA_BROKER.split(',');
|
||||
|
||||
// console.log(url);
|
||||
// console.log(database);
|
||||
// console.log(brokers);
|
Loading…
Reference in New Issue
Block a user