2022-08-28 06:38:35 +02:00
|
|
|
import prisma from 'lib/prisma';
|
2022-08-26 07:04:32 +02:00
|
|
|
import clickhouse from 'lib/clickhouse';
|
2022-08-28 06:38:35 +02:00
|
|
|
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
2022-11-10 07:46:50 +01:00
|
|
|
import cache from 'lib/cache';
|
2022-07-12 23:14:36 +02:00
|
|
|
|
2022-12-13 04:45:38 +01:00
|
|
|
export async function getWebsiteStats(
|
|
|
|
...args: [websiteId: string, data: { startDate: Date; endDate: Date; filters: object }]
|
|
|
|
) {
|
2022-08-28 06:38:35 +02:00
|
|
|
return runQuery({
|
|
|
|
[PRISMA]: () => relationalQuery(...args),
|
2022-07-25 18:47:11 +02:00
|
|
|
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
2022-07-21 06:31:26 +02:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2022-12-13 04:45:38 +01:00
|
|
|
async function relationalQuery(
|
|
|
|
websiteId: string,
|
|
|
|
data: { startDate: Date; endDate: Date; filters: object },
|
|
|
|
) {
|
|
|
|
const { startDate, endDate, filters = {} } = data;
|
2022-08-28 06:38:35 +02:00
|
|
|
const { getDateQuery, getTimestampInterval, parseFilters, rawQuery } = prisma;
|
2022-12-13 04:45:38 +01:00
|
|
|
const params = [startDate, endDate];
|
|
|
|
const { filterQuery, joinSession } = parseFilters(filters, params);
|
2022-07-12 23:14:36 +02:00
|
|
|
|
|
|
|
return rawQuery(
|
2022-08-28 06:38:35 +02:00
|
|
|
`select sum(t.c) as "pageviews",
|
2022-07-12 23:14:36 +02:00
|
|
|
count(distinct t.session_id) as "uniques",
|
|
|
|
sum(case when t.c = 1 then 1 else 0 end) as "bounces",
|
2022-07-23 07:42:01 +02:00
|
|
|
sum(t.time) as "totaltime"
|
2022-07-12 23:14:36 +02:00
|
|
|
from (
|
2022-08-28 06:38:35 +02:00
|
|
|
select pageview.session_id,
|
|
|
|
${getDateQuery('pageview.created_at', 'hour')},
|
|
|
|
count(*) c,
|
|
|
|
${getTimestampInterval('pageview.created_at')} as "time"
|
|
|
|
from pageview
|
2022-10-12 04:37:38 +02:00
|
|
|
join website
|
|
|
|
on pageview.website_id = website.website_id
|
2022-08-28 06:38:35 +02:00
|
|
|
${joinSession}
|
2022-11-01 07:42:37 +01:00
|
|
|
where website.website_id='${websiteId}'
|
2022-10-22 06:33:23 +02:00
|
|
|
and pageview.created_at between $1 and $2
|
2022-12-13 04:45:38 +01:00
|
|
|
${filterQuery}
|
2022-08-28 06:38:35 +02:00
|
|
|
group by 1, 2
|
|
|
|
) t`,
|
2022-07-12 23:14:36 +02:00
|
|
|
params,
|
|
|
|
);
|
|
|
|
}
|
2022-07-21 06:31:26 +02:00
|
|
|
|
2022-12-13 04:45:38 +01:00
|
|
|
async function clickhouseQuery(
|
|
|
|
websiteId: string,
|
|
|
|
data: { startDate: Date; endDate: Date; filters: object },
|
|
|
|
) {
|
|
|
|
const { startDate, endDate, filters = {} } = data;
|
2022-08-28 06:38:35 +02:00
|
|
|
const { rawQuery, getDateQuery, getBetweenDates, parseFilters } = clickhouse;
|
2022-11-10 07:46:50 +01:00
|
|
|
const website = await cache.fetchWebsite(websiteId);
|
|
|
|
const params = [websiteId, website?.revId || 0];
|
2022-12-13 04:45:38 +01:00
|
|
|
const { filterQuery } = parseFilters(filters, params);
|
2022-07-21 06:31:26 +02:00
|
|
|
|
2022-08-28 06:38:35 +02:00
|
|
|
return rawQuery(
|
|
|
|
`select
|
|
|
|
sum(t.c) as "pageviews",
|
2022-10-09 01:12:33 +02:00
|
|
|
count(distinct t.session_id) as "uniques",
|
2022-08-28 06:38:35 +02:00
|
|
|
sum(if(t.c = 1, 1, 0)) as "bounces",
|
|
|
|
sum(if(max_time < min_time + interval 1 hour, max_time-min_time, 0)) as "totaltime"
|
|
|
|
from (
|
2022-10-09 01:12:33 +02:00
|
|
|
select session_id,
|
2022-09-12 18:55:34 +02:00
|
|
|
${getDateQuery('created_at', 'day')} time_series,
|
2022-08-28 06:38:35 +02:00
|
|
|
count(*) c,
|
|
|
|
min(created_at) min_time,
|
|
|
|
max(created_at) max_time
|
2022-09-12 18:55:34 +02:00
|
|
|
from event
|
2023-01-06 04:39:29 +01:00
|
|
|
where event_type = 1
|
2022-09-12 18:55:34 +02:00
|
|
|
and website_id = $1
|
2022-11-10 07:46:50 +01:00
|
|
|
and rev_id = $2
|
2022-12-13 04:45:38 +01:00
|
|
|
and ${getBetweenDates('created_at', startDate, endDate)}
|
|
|
|
${filterQuery}
|
2022-10-09 01:12:33 +02:00
|
|
|
group by session_id, time_series
|
2022-08-28 06:38:35 +02:00
|
|
|
) t;`,
|
2022-07-21 06:31:26 +02:00
|
|
|
params,
|
|
|
|
);
|
|
|
|
}
|