fix clickhouse date timezone issues

This commit is contained in:
Francis Cao 2022-08-08 22:09:18 -07:00
parent fc3c39be3c
commit 55ba7d95ab
7 changed files with 7647 additions and 4840 deletions

View File

@ -3,7 +3,7 @@ CREATE TABLE pageview
(
website_id UInt32,
session_uuid UUID,
created_at DateTime,
created_at DateTime('UTC'),
url String,
referrer String
)
@ -14,7 +14,7 @@ CREATE TABLE pageview
CREATE TABLE pageview_queue (
website_id UInt32,
session_uuid UUID,
created_at DateTime,
created_at DateTime('UTC'),
url String,
referrer String
)
@ -40,7 +40,7 @@ CREATE TABLE session
(
session_uuid UUID,
website_id UInt32,
created_at DateTime,
created_at DateTime('UTC'),
hostname LowCardinality(String),
browser LowCardinality(String),
os LowCardinality(String),
@ -56,7 +56,7 @@ CREATE TABLE session
CREATE TABLE session_queue (
session_uuid UUID,
website_id UInt32,
created_at DateTime,
created_at DateTime('UTC'),
hostname LowCardinality(String),
browser LowCardinality(String),
os LowCardinality(String),
@ -92,7 +92,7 @@ CREATE TABLE event
event_uuid UUID,
website_id UInt32,
session_uuid UUID,
created_at DateTime,
created_at DateTime('UTC'),
url String,
event_name String
)
@ -104,7 +104,7 @@ CREATE TABLE event_queue (
event_uuid UUID,
website_id UInt32,
session_uuid UUID,
created_at DateTime,
created_at DateTime('UTC'),
url String,
event_name String
)

View File

@ -1,6 +1,7 @@
import { PrismaClient } from '@prisma/client';
import { ClickHouse } from 'clickhouse';
import { Kafka } from 'kafkajs';
import dateFormat from 'dateformat';
import chalk from 'chalk';
import {
MYSQL,
@ -164,13 +165,13 @@ export function getDateQuery(field, unit, timezone) {
export function getDateQueryClickhouse(field, unit, timezone) {
if (timezone) {
return `date_trunc('${unit}', ${field},'${timezone}')`;
return `date_trunc('${unit}', ${field}, '${timezone}')`;
}
return `date_trunc('${unit}', ${field})`;
}
export function getDateFormatClickhouse(date) {
return `parseDateTimeBestEffort('${date.toUTCString()}')`;
return `'${dateFormat(date, 'UTC:yyyy-mm-dd HH:MM:ss')}'`;
}
export function getBetweenDatesClickhouse(field, start_at, end_at) {
@ -357,3 +358,7 @@ export async function kafkaProducer(params, topic) {
],
});
}
export function getDateFormatKafka(date) {
return dateFormat(date, 'UTC:yyyy-mm-dd HH:MM:ss');
}

View File

@ -66,6 +66,7 @@
"cross-spawn": "^7.0.3",
"date-fns": "^2.23.0",
"date-fns-tz": "^1.1.4",
"dateformat": "^5.0.3",
"del": "^6.0.0",
"detect-browser": "^5.2.0",
"dotenv": "^10.0.0",

View File

@ -1,6 +1,7 @@
import { CLICKHOUSE, RELATIONAL, KAFKA, URL_LENGTH } from 'lib/constants';
import {
getDateFormatClickhouse,
getDateFormatKafka,
prisma,
rawQueryClickhouse,
runAnalyticsQuery,
@ -50,7 +51,7 @@ async function clickhouseQuery(website_id, { event_uuid, session_uuid, url, even
return rawQueryClickhouse(
`
insert into umami_dev.event (created_at, website_id, session_uuid, url, event_name)
insert into umami.event (created_at, website_id, session_uuid, url, event_name)
values (${getDateFormatClickhouse(new Date())}, $1, $2, $3, $4);`,
params,
);
@ -61,6 +62,7 @@ async function kafkaQuery(website_id, { event_uuid, session_uuid, url, event_nam
event_uuid: event_uuid,
website_id: website_id,
session_uuid: session_uuid,
created_at: getDateFormatKafka(new Date()),
url: url?.substr(0, URL_LENGTH),
event_name: event_name?.substr(0, 50),
};

View File

@ -1,6 +1,7 @@
import { CLICKHOUSE, RELATIONAL, KAFKA, URL_LENGTH } from 'lib/constants';
import {
getDateFormatClickhouse,
getDateFormatKafka,
prisma,
rawQueryClickhouse,
runAnalyticsQuery,
@ -39,7 +40,7 @@ async function clickhouseQuery(website_id, { session_uuid, url, referrer }) {
return rawQueryClickhouse(
`
insert into umami_dev.pageview (created_at, website_id, session_uuid, url, referrer)
insert into umami.pageview (created_at, website_id, session_uuid, url, referrer)
values (${getDateFormatClickhouse(new Date())}, $1, $2, $3, $4);`,
params,
);
@ -49,6 +50,7 @@ async function kafkaQuery(website_id, { session_uuid, url, referrer }) {
const params = {
website_id: website_id,
session_uuid: session_uuid,
created_at: getDateFormatKafka(new Date()),
url: url?.substr(0, URL_LENGTH),
referrer: referrer?.substr(0, URL_LENGTH),
};

View File

@ -1,6 +1,7 @@
import { CLICKHOUSE, RELATIONAL, KAFKA } from 'lib/constants';
import {
getDateFormatClickhouse,
getDateFormatKafka,
prisma,
rawQueryClickhouse,
runAnalyticsQuery,
@ -48,7 +49,7 @@ async function clickhouseQuery(
];
await rawQueryClickhouse(
`insert into umami_dev.session (created_at, session_uuid, website_id, hostname, browser, os, device, screen, language, country)
`insert into umami.session (created_at, session_uuid, website_id, hostname, browser, os, device, screen, language, country)
values (${getDateFormatClickhouse(new Date())}, $1, $2, $3, $4, $5, $6, $7, $8, $9);`,
params,
);
@ -63,6 +64,7 @@ async function kafkaQuery(
const params = {
session_uuid: session_uuid,
website_id: website_id,
created_at: getDateFormatKafka(new Date()),
hostname: hostname,
browser: browser,
os: os,

12453
yarn.lock

File diff suppressed because it is too large Load Diff