diff --git a/kafka/eventProducer.js b/kafka/eventProducer.js deleted file mode 100644 index dfe95182..00000000 --- a/kafka/eventProducer.js +++ /dev/null @@ -1,76 +0,0 @@ -// import the `Kafka` instance from the kafkajs library -const { Kafka } = require('kafkajs'); - -// the client ID lets kafka know who's producing the messages -const clientId = 'my-app'; -// we can define the list of brokers in the cluster -const brokers = ['localhost:9092', 'localhost:9093', 'localhost:9094']; -// this is the topic to which we want to write messages -const topic = 'event'; - -// initialize a new kafka client and initialize a producer from it -const kafka = new Kafka({ clientId, brokers }); -const { Partitioners } = require('kafkajs'); - -const producer = kafka.producer({ createPartitioner: Partitioners.DefaultPartitioner }); - -// we define an async function that writes a new message each second -async function produce_event() { - await producer.connect(); - let i = 0; - - // after the produce has connected, we start an interval timer - setInterval(async () => { - try { - // send a message to the configured topic with - // the key and value formed from the current value of `i` - let y = Math.random() - .toString(36) - .replace(/[^a-z]+/g, '') - .substr(0, 5); - let z = Math.random() - .toString(36) - .replace(/[^a-z]+/g, '') - .substr(0, 5); - let x = { - event_uuid: '00fea66e-a433-536d-a13d-2d873fab0a08', - website_id: i, - session_uuid: '00fea66e-a433-536d-a13d-2d873fab0a08', - created_at: '2020-07-18 11:53:33', - url: y, - event_name: z, - }; - - await producer.send({ - topic, - messages: [ - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - ], - }); - i++; - } catch (err) { - console.error('could not write message ' + err); - } - }, 4); -} - -module.exports = produce_event; diff --git a/kafka/pageviewProducer.js b/kafka/pageviewProducer.js deleted file mode 100644 index 16551b4c..00000000 --- a/kafka/pageviewProducer.js +++ /dev/null @@ -1,75 +0,0 @@ -// import the `Kafka` instance from the kafkajs library -const { Kafka } = require('kafkajs'); - -// the client ID lets kafka know who's producing the messages -const clientId = 'my-app'; -// we can define the list of brokers in the cluster -const brokers = ['localhost:9092', 'localhost:9093', 'localhost:9094']; -// this is the topic to which we want to write messages -const topic = 'pageview'; - -// initialize a new kafka client and initialize a producer from it -const kafka = new Kafka({ clientId, brokers }); -const { Partitioners } = require('kafkajs'); - -const producer = kafka.producer({ createPartitioner: Partitioners.DefaultPartitioner }); - -// we define an async function that writes a new message each second -async function produce_pageview() { - await producer.connect(); - let i = 0; - - // after the produce has connected, we start an interval timer - setInterval(async () => { - try { - // send a message to the configured topic with - // the key and value formed from the current value of `i` - let y = Math.random() - .toString(36) - .replace(/[^a-z]+/g, '') - .substr(0, 5); - let z = Math.random() - .toString(36) - .replace(/[^a-z]+/g, '') - .substr(0, 5); - let x = { - website_id: i, - session_uuid: '00fea66e-a433-536d-a13d-2d873fab0a08', - created_at: '2020-07-18 11:53:33', - url: y, - referrer: z, - }; - - await producer.send({ - topic, - messages: [ - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - ], - }); - i++; - } catch (err) { - console.error('could not write message ' + err); - } - }, 4); -} - -module.exports = produce_pageview; diff --git a/kafka/sessionProducer.js b/kafka/sessionProducer.js deleted file mode 100644 index ebc15b93..00000000 --- a/kafka/sessionProducer.js +++ /dev/null @@ -1,79 +0,0 @@ -// import the `Kafka` instance from the kafkajs library -const { Kafka } = require('kafkajs'); - -// the client ID lets kafka know who's producing the messages -const clientId = 'my-app'; -// we can define the list of brokers in the cluster -const brokers = ['localhost:9092', 'localhost:9093', 'localhost:9094']; -// this is the topic to which we want to write messages -const topic = 'session'; - -// initialize a new kafka client and initialize a producer from it -const kafka = new Kafka({ clientId, brokers }); -const { Partitioners } = require('kafkajs'); - -const producer = kafka.producer({ createPartitioner: Partitioners.DefaultPartitioner }); - -// we define an async function that writes a new message each second -async function produce_session() { - await producer.connect(); - let i = 0; - - // after the produce has connected, we start an interval timer - setInterval(async () => { - try { - // send a message to the configured topic with - // the key and value formed from the current value of `i` - let y = Math.random() - .toString(36) - .replace(/[^a-z]+/g, '') - .substr(0, 5); - let z = Math.random() - .toString(36) - .replace(/[^a-z]+/g, '') - .substr(0, 5); - const x = { - session_uuid: '00fea66e-a433-536d-a13d-2d873fab0a08', - website_id: i, - hostname: z, - browser: y, - os: z, - device: y, - screen: z, - language: y, - country: z, - }; - - await producer.send({ - topic, - messages: [ - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - { - key: 'my-key', - value: JSON.stringify(x), - }, - ], - }); - i++; - } catch (err) { - console.error('could not write message ' + err); - } - }, 4); -} - -module.exports = produce_session; diff --git a/kafka/testrun.js b/kafka/testrun.js deleted file mode 100644 index 3e4f540f..00000000 --- a/kafka/testrun.js +++ /dev/null @@ -1,29 +0,0 @@ -const produce_session = require('./sessionProducer'); -const produce_pageview = require('./pageviewProducer'); -const produce_event = require('./eventProducer'); - -// call the `produce` function and log an error if it occurs -produce_pageview().catch(err => { - console.error('error in producer_pageview: ', err); -}); - -produce_session().catch(err => { - console.error('error in producer_session: ', err); -}); - -produce_event().catch(err => { - console.error('error in producer_event: ', err); -}); - -// const { Kafka } = require('kafkajs') - -// const KAFKA_URL="kafka://localhost:9092/"; -// const KAFKA_BROKER="localhost:9092,localhost:9093,localhost:9094" - -// const url = new URL(KAFKA_URL); -// const database = url.pathname.replace('/', ''); -// var brokers = KAFKA_BROKER.split(','); - -// console.log(url); -// console.log(database); -// console.log(brokers);