umami/queries/analytics/stats/getWebsiteStats.ts

89 lines
3.1 KiB
TypeScript
Raw Normal View History

2022-08-28 06:38:35 +02:00
import prisma from 'lib/prisma';
2022-08-26 07:04:32 +02:00
import clickhouse from 'lib/clickhouse';
2022-08-28 06:38:35 +02:00
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
2023-06-15 07:32:06 +02:00
import { DEFAULT_CREATED_AT, EVENT_TYPE } from 'lib/constants';
2023-04-02 02:38:35 +02:00
import { loadWebsite } from 'lib/query';
2022-07-12 23:14:36 +02:00
export async function getWebsiteStats(
2023-04-02 00:44:30 +02:00
...args: [
websiteId: string,
data: { startDate: Date; endDate: Date; type?: string; filters: object },
]
) {
2022-08-28 06:38:35 +02:00
return runQuery({
[PRISMA]: () => relationalQuery(...args),
2022-07-25 18:47:11 +02:00
[CLICKHOUSE]: () => clickhouseQuery(...args),
2022-07-21 06:31:26 +02:00
});
}
async function relationalQuery(
websiteId: string,
2023-04-02 00:44:30 +02:00
criteria: { startDate: Date; endDate: Date; filters: object },
) {
2023-04-02 00:44:30 +02:00
const { startDate, endDate, filters = {} } = criteria;
const { toUuid, getDateQuery, getTimestampInterval, parseFilters, rawQuery } = prisma;
2023-04-02 02:38:35 +02:00
const website = await loadWebsite(websiteId);
2023-06-15 07:32:06 +02:00
const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
const params: any = [websiteId, resetDate, startDate, endDate];
const { filterQuery, joinSession } = parseFilters(filters, params);
2022-07-12 23:14:36 +02:00
return rawQuery(
2022-08-28 06:38:35 +02:00
`select sum(t.c) as "pageviews",
2022-07-12 23:14:36 +02:00
count(distinct t.session_id) as "uniques",
sum(case when t.c = 1 then 1 else 0 end) as "bounces",
2022-07-23 07:42:01 +02:00
sum(t.time) as "totaltime"
2022-07-12 23:14:36 +02:00
from (
select website_event.session_id,
${getDateQuery('website_event.created_at', 'hour')},
2022-08-28 06:38:35 +02:00
count(*) c,
${getTimestampInterval('website_event.created_at')} as "time"
from website_event
2022-10-12 04:37:38 +02:00
join website
on website_event.website_id = website.website_id
2022-08-28 06:38:35 +02:00
${joinSession}
2023-04-19 18:13:16 +02:00
where event_type = ${EVENT_TYPE.pageView}
and website.website_id = $1${toUuid()}
and website_event.created_at >= $2
and website_event.created_at between $3 and $4
${filterQuery}
2022-08-28 06:38:35 +02:00
group by 1, 2
) t`,
2022-07-12 23:14:36 +02:00
params,
);
}
2022-07-21 06:31:26 +02:00
async function clickhouseQuery(
websiteId: string,
2023-04-02 00:44:30 +02:00
criteria: { startDate: Date; endDate: Date; filters: object },
) {
2023-04-02 00:44:30 +02:00
const { startDate, endDate, filters = {} } = criteria;
const { rawQuery, getDateFormat, getDateQuery, getBetweenDates, parseFilters } = clickhouse;
2023-04-02 02:38:35 +02:00
const website = await loadWebsite(websiteId);
2023-06-15 07:32:06 +02:00
const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
const params = { websiteId };
const { filterQuery } = parseFilters(filters, params);
2022-07-21 06:31:26 +02:00
2022-08-28 06:38:35 +02:00
return rawQuery(
`select
sum(t.c) as "pageviews",
2022-10-09 01:12:33 +02:00
count(distinct t.session_id) as "uniques",
2022-08-28 06:38:35 +02:00
sum(if(t.c = 1, 1, 0)) as "bounces",
sum(if(max_time < min_time + interval 1 hour, max_time-min_time, 0)) as "totaltime"
from (
2022-10-09 01:12:33 +02:00
select session_id,
2022-09-12 18:55:34 +02:00
${getDateQuery('created_at', 'day')} time_series,
2022-08-28 06:38:35 +02:00
count(*) c,
min(created_at) min_time,
max(created_at) max_time
2023-03-29 20:06:12 +02:00
from website_event
where event_type = ${EVENT_TYPE.pageView}
and website_id = {websiteId:UUID}
and created_at >= ${getDateFormat(resetDate)}
and ${getBetweenDates('created_at', startDate, endDate)}
${filterQuery}
2022-10-09 01:12:33 +02:00
group by session_id, time_series
2022-08-28 06:38:35 +02:00
) t;`,
2022-07-21 06:31:26 +02:00
params,
);
}