umami/queries/analytics/reports/getFunnel.ts

120 lines
2.8 KiB
TypeScript
Raw Normal View History

2023-05-09 08:46:58 +02:00
import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma';
export async function getFunnel(
2023-05-09 08:46:58 +02:00
...args: [
websiteId: string,
criteria: {
windowMinutes: number;
startDate: Date;
endDate: Date;
urls: string[];
},
]
) {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
async function relationalQuery(
websiteId: string,
criteria: {
windowMinutes: number;
startDate: Date;
endDate: Date;
urls: string[];
},
2023-05-12 01:42:58 +02:00
): Promise<
{
2023-05-18 20:17:35 +02:00
x: string;
y: number;
z: number;
2023-05-12 01:42:58 +02:00
}[]
> {
2023-05-09 08:46:58 +02:00
const { windowMinutes, startDate, endDate, urls } = criteria;
2023-07-24 20:57:46 +02:00
const { rawQuery, getFunnelQuery, toUuid } = prisma;
2023-07-25 23:54:56 +02:00
const { levelQuery, sumQuery, urlParams } = getFunnelQuery(
urls,
endDate,
websiteId,
windowMinutes,
);
2023-05-09 08:46:58 +02:00
2023-07-25 23:54:56 +02:00
const params: any = [websiteId, startDate, endDate, ...urlParams];
2023-05-09 08:46:58 +02:00
return rawQuery(
2023-07-25 22:23:44 +02:00
`WITH level1 AS (
select distinct session_id, created_at
2023-05-12 01:42:58 +02:00
from website_event
where website_id = {{websiteId}}${toUuid()}
2023-07-25 08:06:16 +02:00
and created_at between {{startDate}} and {{endDate}}
2023-07-25 22:23:44 +02:00
and url_path = $4)
${levelQuery}
2023-07-24 20:57:46 +02:00
${sumQuery}
ORDER BY level;`,
2023-05-09 08:46:58 +02:00
params,
).then(results => {
return urls.map((a, i) => ({
x: a,
y: results[i]?.count || 0,
2023-07-24 20:57:46 +02:00
z: (1 - (Number(results[i]?.count) * 1.0) / Number(results[i - 1]?.count)) * 100 || 0, // drop off
}));
2023-05-12 01:42:58 +02:00
});
2023-05-09 08:46:58 +02:00
}
async function clickhouseQuery(
websiteId: string,
criteria: {
windowMinutes: number;
startDate: Date;
endDate: Date;
urls: string[];
},
2023-05-12 01:42:58 +02:00
): Promise<
{
2023-05-18 20:17:35 +02:00
x: string;
y: number;
2023-07-25 22:23:44 +02:00
z: number;
2023-05-12 01:42:58 +02:00
}[]
> {
2023-05-09 08:46:58 +02:00
const { windowMinutes, startDate, endDate, urls } = criteria;
const { rawQuery, getFunnelQuery } = clickhouse;
2023-07-25 22:23:44 +02:00
const { levelQuery, sumQuery, urlFilterQuery, urlParams } = getFunnelQuery(urls, windowMinutes);
2023-05-09 08:46:58 +02:00
const params = {
websiteId,
startDate,
endDate,
2023-05-09 08:46:58 +02:00
...urlParams,
};
2023-05-12 01:42:58 +02:00
return rawQuery<{ level: number; count: number }[]>(
2023-05-09 08:46:58 +02:00
`
2023-07-25 22:23:44 +02:00
WITH level0 AS (
select distinct session_id, url_path, referrer_path, created_at
from umami.website_event
where url_path in (${urlFilterQuery})
and website_id = {websiteId:UUID}
and created_at between {startDate:DateTime64} and {endDate:DateTime64}
2023-07-25 22:23:44 +02:00
), level1 AS (
select *
from level0
where url_path = {url0:String})
${levelQuery}
select *
2023-07-21 06:13:29 +02:00
from (
2023-07-25 22:23:44 +02:00
${sumQuery}
) ORDER BY level;`,
2023-05-09 08:46:58 +02:00
params,
2023-05-15 23:03:42 +02:00
).then(results => {
return urls.map((a, i) => ({
2023-05-18 20:17:35 +02:00
x: a,
2023-07-24 20:57:46 +02:00
y: results[i]?.count || 0,
z: (1 - (Number(results[i]?.count) * 1.0) / Number(results[i - 1]?.count)) * 100 || 0, // drop off
2023-05-15 23:03:42 +02:00
}));
2023-05-12 01:42:58 +02:00
});
2023-05-09 08:46:58 +02:00
}