Updated queries to use cache.

This commit is contained in:
Mike Cao 2023-04-01 17:38:35 -07:00
parent 74192cd695
commit 728e4cff5b
9 changed files with 59 additions and 42 deletions

View File

@ -7,6 +7,7 @@ import { getTeamUser, getTeamUserById } from 'queries';
import { getTeamWebsite, getTeamWebsiteByTeamMemberId } from 'queries/admin/teamWebsite'; import { getTeamWebsite, getTeamWebsiteByTeamMemberId } from 'queries/admin/teamWebsite';
import { validate } from 'uuid'; import { validate } from 'uuid';
import { Auth } from './types'; import { Auth } from './types';
import { loadWebsite } from './query';
const log = debug('umami:auth'); const log = debug('umami:auth');
@ -66,7 +67,7 @@ export async function canViewWebsite({ user, shareToken }: Auth, websiteId: stri
return true; return true;
} }
const website = await cache.fetchWebsite(websiteId); const website = await loadWebsite(websiteId);
if (website.userId) { if (website.userId) {
return user.id === website.userId; return user.id === website.userId;
@ -98,7 +99,7 @@ export async function canUpdateWebsite({ user }: Auth, websiteId: string) {
return false; return false;
} }
const website = await cache.fetchWebsite(websiteId); const website = await loadWebsite(websiteId);
if (website.userId) { if (website.userId) {
return user.id === website.userId; return user.id === website.userId;
@ -112,7 +113,7 @@ export async function canDeleteWebsite({ user }: Auth, websiteId: string) {
return true; return true;
} }
const website = await cache.fetchWebsite(websiteId); const website = await loadWebsite(websiteId);
if (website.userId) { if (website.userId) {
return user.id === website.userId; return user.id === website.userId;

View File

@ -1,5 +1,5 @@
import cache from 'lib/cache'; import cache from 'lib/cache';
import { getWebsite } from 'queries'; import { getWebsite, getSession, getUser } from 'queries';
import { Website } from './types'; import { Website } from './types';
export async function loadWebsite(websiteId: string): Promise<Website> { export async function loadWebsite(websiteId: string): Promise<Website> {
@ -17,3 +17,35 @@ export async function loadWebsite(websiteId: string): Promise<Website> {
return website; return website;
} }
export async function loadSession(sessionId: string): Promise<Website> {
let session;
if (cache.enabled) {
session = await cache.fetchSession(sessionId);
} else {
session = await getSession({ id: sessionId });
}
if (!session) {
return null;
}
return session;
}
export async function loadUser(userId: string): Promise<Website> {
let user;
if (cache.enabled) {
user = await cache.fetchUser(userId);
} else {
user = await getUser({ id: userId });
}
if (!user || user.deletedAt) {
return null;
}
return user;
}

View File

@ -1,11 +1,11 @@
import cache from 'lib/cache';
import clickhouse from 'lib/clickhouse'; import clickhouse from 'lib/clickhouse';
import { secret, uuid } from 'lib/crypto'; import { secret, uuid } from 'lib/crypto';
import { getClientInfo, getJsonBody } from 'lib/detect'; import { getClientInfo, getJsonBody } from 'lib/detect';
import { parseToken } from 'next-basics'; import { parseToken } from 'next-basics';
import { CollectRequestBody, NextApiRequestCollect } from 'pages/api/send'; import { CollectRequestBody, NextApiRequestCollect } from 'pages/api/send';
import { createSession, getSession, getWebsite } from 'queries'; import { createSession } from 'queries';
import { validate } from 'uuid'; import { validate } from 'uuid';
import { loadSession, loadWebsite } from './query';
export async function findSession(req: NextApiRequestCollect) { export async function findSession(req: NextApiRequestCollect) {
const { payload } = getJsonBody<CollectRequestBody>(req); const { payload } = getJsonBody<CollectRequestBody>(req);
@ -33,15 +33,9 @@ export async function findSession(req: NextApiRequestCollect) {
} }
// Find website // Find website
let website; const website = await loadWebsite(websiteId);
if (cache.enabled) { if (!website) {
website = await cache.fetchWebsite(websiteId);
} else {
website = await getWebsite({ id: websiteId });
}
if (!website || website.deletedAt) {
throw new Error(`Website not found: ${websiteId}`); throw new Error(`Website not found: ${websiteId}`);
} }
@ -68,13 +62,7 @@ export async function findSession(req: NextApiRequestCollect) {
} }
// Find session // Find session
let session; let session = await loadSession(websiteId);
if (cache.enabled) {
session = await cache.fetchSession(sessionId);
} else {
session = await getSession({ id: sessionId });
}
// Create a session if not found // Create a session if not found
if (!session) { if (!session) {

View File

@ -14,6 +14,7 @@ export type KafkaTopics = ObjectValues<typeof KAFKA_TOPIC>;
export interface EventData { export interface EventData {
[key: string]: number | string | EventData | number[] | string[] | EventData[]; [key: string]: number | string | EventData | number[] | string[] | EventData[];
} }
export interface Auth { export interface Auth {
user?: { user?: {
id: string; id: string;

View File

@ -1,10 +1,9 @@
import prisma from 'lib/prisma'; import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse'; import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db'; import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import cache from 'lib/cache';
import { WebsiteEventMetric } from 'lib/types'; import { WebsiteEventMetric } from 'lib/types';
import { EVENT_TYPE } from 'lib/constants'; import { EVENT_TYPE } from 'lib/constants';
import { getWebsite } from 'queries'; import { loadWebsite } from 'lib/query';
export async function getEventMetrics( export async function getEventMetrics(
...args: [ ...args: [
@ -49,7 +48,7 @@ async function relationalQuery(
}, },
) { ) {
const { toUuid, rawQuery, getDateQuery, getFilterQuery } = prisma; const { toUuid, rawQuery, getDateQuery, getFilterQuery } = prisma;
const website = await getWebsite({ id: websiteId }); const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt; const resetDate = website?.resetAt || website?.createdAt;
const params: any = [websiteId, resetDate, startDate, endDate]; const params: any = [websiteId, resetDate, startDate, endDate];
const filterQuery = getFilterQuery(filters, params); const filterQuery = getFilterQuery(filters, params);
@ -91,7 +90,7 @@ async function clickhouseQuery(
}, },
) { ) {
const { rawQuery, getDateQuery, getDateFormat, getBetweenDates, getFilterQuery } = clickhouse; const { rawQuery, getDateQuery, getDateFormat, getBetweenDates, getFilterQuery } = clickhouse;
const website = await cache.fetchWebsite(websiteId); const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt; const resetDate = website?.resetAt || website?.createdAt;
const params = { websiteId }; const params = { websiteId };

View File

@ -1,9 +1,8 @@
import cache from 'lib/cache';
import clickhouse from 'lib/clickhouse'; import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db'; import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma'; import prisma from 'lib/prisma';
import { WebsiteEventDataMetric } from 'lib/types'; import { WebsiteEventDataMetric } from 'lib/types';
import { getWebsite } from 'queries'; import { loadWebsite } from 'lib/query';
export async function getEventData( export async function getEventData(
...args: [ ...args: [
@ -49,7 +48,7 @@ async function relationalQuery(
) { ) {
const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data; const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data;
const { toUuid, rawQuery, getEventDataFilterQuery, getDateQuery } = prisma; const { toUuid, rawQuery, getEventDataFilterQuery, getDateQuery } = prisma;
const website = await getWebsite({ id: websiteId }); const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt; const resetDate = website?.resetAt || website?.createdAt;
const params: any = [websiteId, resetDate, startDate, endDate, eventName || '']; const params: any = [websiteId, resetDate, startDate, endDate, eventName || ''];
@ -99,7 +98,7 @@ async function clickhouseQuery(
const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data; const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data;
const { rawQuery, getDateFormat, getBetweenDates, getDateQuery, getEventDataFilterQuery } = const { rawQuery, getDateFormat, getBetweenDates, getDateQuery, getEventDataFilterQuery } =
clickhouse; clickhouse;
const website = await cache.fetchWebsite(websiteId); const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt; const resetDate = website?.resetAt || website?.createdAt;
const params = { websiteId }; const params = { websiteId };

View File

@ -1,9 +1,8 @@
import cache from 'lib/cache';
import clickhouse from 'lib/clickhouse'; import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db'; import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma'; import prisma from 'lib/prisma';
import { EVENT_TYPE } from 'lib/constants'; import { EVENT_TYPE } from 'lib/constants';
import { getWebsite } from 'queries'; import { loadWebsite } from 'lib/query';
export async function getPageviewStats( export async function getPageviewStats(
...args: [ ...args: [
@ -47,7 +46,7 @@ async function relationalQuery(
sessionKey = 'session_id', sessionKey = 'session_id',
} = criteria; } = criteria;
const { toUuid, getDateQuery, parseFilters, rawQuery } = prisma; const { toUuid, getDateQuery, parseFilters, rawQuery } = prisma;
const website = await getWebsite({ id: websiteId }); const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt; const resetDate = website?.resetAt || website?.createdAt;
const params: any = [websiteId, resetDate, startDate, endDate]; const params: any = [websiteId, resetDate, startDate, endDate];
const { filterQuery, joinSession } = parseFilters(filters, params); const { filterQuery, joinSession } = parseFilters(filters, params);
@ -95,7 +94,7 @@ async function clickhouseQuery(
getDateQuery, getDateQuery,
getBetweenDates, getBetweenDates,
} = clickhouse; } = clickhouse;
const website = await cache.fetchWebsite(websiteId); const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt; const resetDate = website?.resetAt || website?.createdAt;
const params = { websiteId }; const params = { websiteId };
const { filterQuery } = parseFilters(filters, params); const { filterQuery } = parseFilters(filters, params);

View File

@ -1,9 +1,8 @@
import prisma from 'lib/prisma'; import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse'; import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db'; import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import cache from 'lib/cache'; import { EVENT_TYPE } from 'lib/constants';
import { EVENT_TYPE, FILTER_COLUMNS } from 'lib/constants'; import { loadWebsite } from 'lib/query';
import { getWebsite } from 'queries';
export async function getSessionMetrics( export async function getSessionMetrics(
...args: [ ...args: [
@ -21,7 +20,7 @@ async function relationalQuery(
websiteId: string, websiteId: string,
criteria: { startDate: Date; endDate: Date; column: string; filters: object }, criteria: { startDate: Date; endDate: Date; column: string; filters: object },
) { ) {
const website = await getWebsite({ id: websiteId }); const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt; const resetDate = website?.resetAt || website?.createdAt;
const { startDate, endDate, column, filters = {} } = criteria; const { startDate, endDate, column, filters = {} } = criteria;
const { toUuid, parseFilters, rawQuery } = prisma; const { toUuid, parseFilters, rawQuery } = prisma;
@ -55,7 +54,7 @@ async function clickhouseQuery(
) { ) {
const { startDate, endDate, column, filters = {} } = data; const { startDate, endDate, column, filters = {} } = data;
const { getDateFormat, parseFilters, getBetweenDates, rawQuery } = clickhouse; const { getDateFormat, parseFilters, getBetweenDates, rawQuery } = clickhouse;
const website = await cache.fetchWebsite(websiteId); const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt; const resetDate = website?.resetAt || website?.createdAt;
const params = { websiteId }; const params = { websiteId };
const { filterQuery } = parseFilters(filters, params); const { filterQuery } = parseFilters(filters, params);

View File

@ -1,9 +1,8 @@
import prisma from 'lib/prisma'; import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse'; import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db'; import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import cache from 'lib/cache';
import { EVENT_TYPE } from 'lib/constants'; import { EVENT_TYPE } from 'lib/constants';
import { getWebsite } from 'queries'; import { loadWebsite } from 'lib/query';
export async function getWebsiteStats( export async function getWebsiteStats(
...args: [ ...args: [
@ -23,7 +22,7 @@ async function relationalQuery(
) { ) {
const { startDate, endDate, filters = {} } = criteria; const { startDate, endDate, filters = {} } = criteria;
const { toUuid, getDateQuery, getTimestampInterval, parseFilters, rawQuery } = prisma; const { toUuid, getDateQuery, getTimestampInterval, parseFilters, rawQuery } = prisma;
const website = await getWebsite({ id: websiteId }); const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt; const resetDate = website?.resetAt || website?.createdAt;
const params: any = [websiteId, resetDate, startDate, endDate]; const params: any = [websiteId, resetDate, startDate, endDate];
const { filterQuery, joinSession } = parseFilters(filters, params); const { filterQuery, joinSession } = parseFilters(filters, params);
@ -58,7 +57,7 @@ async function clickhouseQuery(
) { ) {
const { startDate, endDate, filters = {} } = criteria; const { startDate, endDate, filters = {} } = criteria;
const { rawQuery, getDateFormat, getDateQuery, getBetweenDates, parseFilters } = clickhouse; const { rawQuery, getDateFormat, getDateQuery, getBetweenDates, parseFilters } = clickhouse;
const website = await cache.fetchWebsite(websiteId); const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt; const resetDate = website?.resetAt || website?.createdAt;
const params = { websiteId }; const params = { websiteId };
const { filterQuery } = parseFilters(filters, params); const { filterQuery } = parseFilters(filters, params);