Removed custom date format. Use ISO string everywhere.

This commit is contained in:
Mike Cao 2024-08-13 22:16:21 -07:00
parent 0bd57bb158
commit 04de691893
7 changed files with 7 additions and 25 deletions

View File

@ -84,7 +84,6 @@
"cross-spawn": "^7.0.3", "cross-spawn": "^7.0.3",
"date-fns": "^2.23.0", "date-fns": "^2.23.0",
"date-fns-tz": "^1.1.4", "date-fns-tz": "^1.1.4",
"dateformat": "^5.0.3",
"debug": "^4.3.4", "debug": "^4.3.4",
"del": "^6.0.0", "del": "^6.0.0",
"detect-browser": "^5.2.0", "detect-browser": "^5.2.0",

View File

@ -1,5 +1,4 @@
import { ClickHouseClient, createClient } from '@clickhouse/client'; import { ClickHouseClient, createClient } from '@clickhouse/client';
import dateFormat from 'dateformat';
import debug from 'debug'; import debug from 'debug';
import { CLICKHOUSE } from 'lib/db'; import { CLICKHOUSE } from 'lib/db';
import { DEFAULT_PAGE_SIZE, OPERATORS } from './constants'; import { DEFAULT_PAGE_SIZE, OPERATORS } from './constants';
@ -63,10 +62,6 @@ function getDateSQL(field: string, unit: string, timezone?: string) {
return `date_trunc('${unit}', ${field})`; return `date_trunc('${unit}', ${field})`;
} }
function getDateFormat(date: Date) {
return `'${dateFormat(date, 'UTC:yyyy-mm-dd HH:MM:ss')}'`;
}
function mapFilter(column: string, operator: string, name: string, type: string = 'String') { function mapFilter(column: string, operator: string, name: string, type: string = 'String') {
const value = `{${name}:${type}}`; const value = `{${name}:${type}}`;
@ -224,7 +219,6 @@ export default {
connect, connect,
getDateStringSQL, getDateStringSQL,
getDateSQL, getDateSQL,
getDateFormat,
getFilterQuery, getFilterQuery,
parseFilters, parseFilters,
pagedQuery, pagedQuery,

View File

@ -1,4 +1,3 @@
import dateFormat from 'dateformat';
import debug from 'debug'; import debug from 'debug';
import { Kafka, Mechanism, Producer, RecordMetadata, SASLOptions, logLevel } from 'kafkajs'; import { Kafka, Mechanism, Producer, RecordMetadata, SASLOptions, logLevel } from 'kafkajs';
import { KAFKA, KAFKA_PRODUCER } from 'lib/db'; import { KAFKA, KAFKA_PRODUCER } from 'lib/db';
@ -56,10 +55,6 @@ async function getProducer(): Promise<Producer> {
return producer; return producer;
} }
function getDateFormat(date: Date, format?: string): string {
return dateFormat(date, format ? format : 'UTC:yyyy-mm-dd HH:MM:ss');
}
async function sendMessage( async function sendMessage(
topic: string, topic: string,
message: { [key: string]: string | number }, message: { [key: string]: string | number },
@ -107,7 +102,6 @@ export default {
producer, producer,
log, log,
connect, connect,
getDateFormat,
sendMessage, sendMessage,
sendMessages, sendMessages,
}; };

View File

@ -136,9 +136,9 @@ async function clickhouseQuery(data: {
...args ...args
} = data; } = data;
const { insert } = clickhouse; const { insert } = clickhouse;
const { getDateFormat, sendMessage } = kafka; const { sendMessage } = kafka;
const eventId = uuid(); const eventId = uuid();
const createdAt = getDateFormat(new Date()); const createdAt = new Date().toISOString();
const message = { const message = {
...args, ...args,

View File

@ -61,7 +61,7 @@ async function clickhouseQuery(data: {
const { websiteId, sessionId, eventId, urlPath, eventName, eventData, createdAt } = data; const { websiteId, sessionId, eventId, urlPath, eventName, eventData, createdAt } = data;
const { insert } = clickhouse; const { insert } = clickhouse;
const { getDateFormat, sendMessages } = kafka; const { sendMessages } = kafka;
const jsonKeys = flattenJSON(eventData); const jsonKeys = flattenJSON(eventData);
@ -76,7 +76,7 @@ async function clickhouseQuery(data: {
data_type: dataType, data_type: dataType,
string_value: getStringValue(value, dataType), string_value: getStringValue(value, dataType),
number_value: dataType === DATA_TYPE.number ? value : null, number_value: dataType === DATA_TYPE.number ? value : null,
date_value: dataType === DATA_TYPE.date ? getDateFormat(value) : null, date_value: dataType === DATA_TYPE.date ? value?.toISOString() : null,
created_at: createdAt, created_at: createdAt,
}; };
}); });

View File

@ -81,8 +81,8 @@ async function clickhouseQuery(data: {
const { websiteId, sessionId, sessionData } = data; const { websiteId, sessionId, sessionData } = data;
const { insert } = clickhouse; const { insert } = clickhouse;
const { getDateFormat, sendMessages } = kafka; const { sendMessages } = kafka;
const createdAt = getDateFormat(new Date()); const createdAt = new Date().toISOString();
const jsonKeys = flattenJSON(sessionData); const jsonKeys = flattenJSON(sessionData);
@ -94,7 +94,7 @@ async function clickhouseQuery(data: {
data_type: dataType, data_type: dataType,
string_value: getStringValue(value, dataType), string_value: getStringValue(value, dataType),
number_value: dataType === DATA_TYPE.number ? value : null, number_value: dataType === DATA_TYPE.number ? value : null,
date_value: dataType === DATA_TYPE.date ? getDateFormat(value) : null, date_value: dataType === DATA_TYPE.date ? value?.toISOString() : null,
created_at: createdAt, created_at: createdAt,
}; };
}); });

View File

@ -4625,11 +4625,6 @@ date-fns@^2.23.0, date-fns@^2.29.3:
dependencies: dependencies:
"@babel/runtime" "^7.21.0" "@babel/runtime" "^7.21.0"
dateformat@^5.0.3:
version "5.0.3"
resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-5.0.3.tgz#fe2223eff3cc70ce716931cb3038b59a9280696e"
integrity sha512-Kvr6HmPXUMerlLcLF+Pwq3K7apHpYmGDVqrxcDasBg86UcKeTSNWbEzU8bwdXnxnR44FtMhJAxI4Bov6Y/KUfA==
dayjs@^1.10.4: dayjs@^1.10.4:
version "1.11.12" version "1.11.12"
resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.12.tgz#5245226cc7f40a15bf52e0b99fd2a04669ccac1d" resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.12.tgz#5245226cc7f40a15bf52e0b99fd2a04669ccac1d"