umami/queries/analytics/pageview/getPageviewFunnel.ts

115 lines
2.7 KiB
TypeScript
Raw Normal View History

2023-05-09 08:46:58 +02:00
import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma';
export async function getPageviewFunnel(
...args: [
websiteId: string,
criteria: {
windowMinutes: number;
startDate: Date;
endDate: Date;
urls: string[];
},
]
) {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
async function relationalQuery(
websiteId: string,
criteria: {
windowMinutes: number;
startDate: Date;
endDate: Date;
urls: string[];
},
2023-05-12 01:42:58 +02:00
): Promise<
{
2023-05-18 20:17:35 +02:00
x: string;
y: number;
2023-05-12 01:42:58 +02:00
}[]
> {
2023-05-09 08:46:58 +02:00
const { windowMinutes, startDate, endDate, urls } = criteria;
const { rawQuery, getFunnelQuery, toUuid } = prisma;
const { levelQuery, sumQuery, urlFilterQuery } = getFunnelQuery(urls, windowMinutes);
const params: any = [websiteId, startDate, endDate, ...urls];
return rawQuery(
`WITH level0 AS (
2023-06-20 19:22:12 +02:00
select distinct session_id, url_path, referrer_path, created_at
2023-05-12 01:42:58 +02:00
from website_event
where url_path in (${urlFilterQuery})
and website_id = $1${toUuid()}
and created_at between $2 and $3
),level1 AS (
2023-06-20 19:22:12 +02:00
select distinct session_id, url_path as level_1_url, created_at as level_1_created_at
2023-05-12 01:42:58 +02:00
from level0
where url_path = $4
)${levelQuery}
SELECT ${sumQuery}
2023-06-20 19:22:12 +02:00
from level${urls.length};
2023-05-12 01:42:58 +02:00
`,
2023-05-09 08:46:58 +02:00
params,
2023-05-12 01:42:58 +02:00
).then((a: { [key: string]: number }) => {
2023-06-20 19:22:12 +02:00
return urls.map((b, i) => ({ x: b, y: a[0][`level${i + 1}`] || 0 }));
2023-05-12 01:42:58 +02:00
});
2023-05-09 08:46:58 +02:00
}
async function clickhouseQuery(
websiteId: string,
criteria: {
windowMinutes: number;
startDate: Date;
endDate: Date;
urls: string[];
},
2023-05-12 01:42:58 +02:00
): Promise<
{
2023-05-18 20:17:35 +02:00
x: string;
y: number;
2023-05-12 01:42:58 +02:00
}[]
> {
2023-05-09 08:46:58 +02:00
const { windowMinutes, startDate, endDate, urls } = criteria;
const { rawQuery, getBetweenDates, getFunnelQuery } = clickhouse;
const { columnsQuery, conditionQuery, urlParams } = getFunnelQuery(urls);
const params = {
websiteId,
window: windowMinutes * 60,
...urlParams,
};
2023-05-12 01:42:58 +02:00
return rawQuery<{ level: number; count: number }[]>(
2023-05-09 08:46:58 +02:00
`
SELECT level,
count(*) AS count
FROM (
SELECT session_id,
2023-06-20 19:22:12 +02:00
windowFunnel({window:UInt32})
2023-05-09 08:46:58 +02:00
(
2023-05-12 01:42:58 +02:00
created_at
2023-05-09 08:46:58 +02:00
${columnsQuery}
) AS level
FROM website_event
WHERE website_id = {websiteId:UUID}
and ${getBetweenDates('created_at', startDate, endDate)}
GROUP BY 1
)
GROUP BY level
ORDER BY level ASC;
`,
params,
2023-05-15 23:03:42 +02:00
).then(results => {
return urls.map((a, i) => ({
2023-05-18 20:17:35 +02:00
x: a,
y: results[i + 1]?.count || 0,
2023-05-15 23:03:42 +02:00
}));
2023-05-12 01:42:58 +02:00
});
2023-05-09 08:46:58 +02:00
}