Updated queries to use cache.

This commit is contained in:
Mike Cao 2023-04-01 17:38:35 -07:00
parent 74192cd695
commit 728e4cff5b
9 changed files with 59 additions and 42 deletions

View File

@ -7,6 +7,7 @@ import { getTeamUser, getTeamUserById } from 'queries';
import { getTeamWebsite, getTeamWebsiteByTeamMemberId } from 'queries/admin/teamWebsite';
import { validate } from 'uuid';
import { Auth } from './types';
import { loadWebsite } from './query';
const log = debug('umami:auth');
@ -66,7 +67,7 @@ export async function canViewWebsite({ user, shareToken }: Auth, websiteId: stri
return true;
}
const website = await cache.fetchWebsite(websiteId);
const website = await loadWebsite(websiteId);
if (website.userId) {
return user.id === website.userId;
@ -98,7 +99,7 @@ export async function canUpdateWebsite({ user }: Auth, websiteId: string) {
return false;
}
const website = await cache.fetchWebsite(websiteId);
const website = await loadWebsite(websiteId);
if (website.userId) {
return user.id === website.userId;
@ -112,7 +113,7 @@ export async function canDeleteWebsite({ user }: Auth, websiteId: string) {
return true;
}
const website = await cache.fetchWebsite(websiteId);
const website = await loadWebsite(websiteId);
if (website.userId) {
return user.id === website.userId;

View File

@ -1,5 +1,5 @@
import cache from 'lib/cache';
import { getWebsite } from 'queries';
import { getWebsite, getSession, getUser } from 'queries';
import { Website } from './types';
export async function loadWebsite(websiteId: string): Promise<Website> {
@ -17,3 +17,35 @@ export async function loadWebsite(websiteId: string): Promise<Website> {
return website;
}
export async function loadSession(sessionId: string): Promise<Website> {
let session;
if (cache.enabled) {
session = await cache.fetchSession(sessionId);
} else {
session = await getSession({ id: sessionId });
}
if (!session) {
return null;
}
return session;
}
export async function loadUser(userId: string): Promise<Website> {
let user;
if (cache.enabled) {
user = await cache.fetchUser(userId);
} else {
user = await getUser({ id: userId });
}
if (!user || user.deletedAt) {
return null;
}
return user;
}

View File

@ -1,11 +1,11 @@
import cache from 'lib/cache';
import clickhouse from 'lib/clickhouse';
import { secret, uuid } from 'lib/crypto';
import { getClientInfo, getJsonBody } from 'lib/detect';
import { parseToken } from 'next-basics';
import { CollectRequestBody, NextApiRequestCollect } from 'pages/api/send';
import { createSession, getSession, getWebsite } from 'queries';
import { createSession } from 'queries';
import { validate } from 'uuid';
import { loadSession, loadWebsite } from './query';
export async function findSession(req: NextApiRequestCollect) {
const { payload } = getJsonBody<CollectRequestBody>(req);
@ -33,15 +33,9 @@ export async function findSession(req: NextApiRequestCollect) {
}
// Find website
let website;
const website = await loadWebsite(websiteId);
if (cache.enabled) {
website = await cache.fetchWebsite(websiteId);
} else {
website = await getWebsite({ id: websiteId });
}
if (!website || website.deletedAt) {
if (!website) {
throw new Error(`Website not found: ${websiteId}`);
}
@ -68,13 +62,7 @@ export async function findSession(req: NextApiRequestCollect) {
}
// Find session
let session;
if (cache.enabled) {
session = await cache.fetchSession(sessionId);
} else {
session = await getSession({ id: sessionId });
}
let session = await loadSession(websiteId);
// Create a session if not found
if (!session) {

View File

@ -14,6 +14,7 @@ export type KafkaTopics = ObjectValues<typeof KAFKA_TOPIC>;
export interface EventData {
[key: string]: number | string | EventData | number[] | string[] | EventData[];
}
export interface Auth {
user?: {
id: string;

View File

@ -1,10 +1,9 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import cache from 'lib/cache';
import { WebsiteEventMetric } from 'lib/types';
import { EVENT_TYPE } from 'lib/constants';
import { getWebsite } from 'queries';
import { loadWebsite } from 'lib/query';
export async function getEventMetrics(
...args: [
@ -49,7 +48,7 @@ async function relationalQuery(
},
) {
const { toUuid, rawQuery, getDateQuery, getFilterQuery } = prisma;
const website = await getWebsite({ id: websiteId });
const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt;
const params: any = [websiteId, resetDate, startDate, endDate];
const filterQuery = getFilterQuery(filters, params);
@ -91,7 +90,7 @@ async function clickhouseQuery(
},
) {
const { rawQuery, getDateQuery, getDateFormat, getBetweenDates, getFilterQuery } = clickhouse;
const website = await cache.fetchWebsite(websiteId);
const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt;
const params = { websiteId };

View File

@ -1,9 +1,8 @@
import cache from 'lib/cache';
import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma';
import { WebsiteEventDataMetric } from 'lib/types';
import { getWebsite } from 'queries';
import { loadWebsite } from 'lib/query';
export async function getEventData(
...args: [
@ -49,7 +48,7 @@ async function relationalQuery(
) {
const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data;
const { toUuid, rawQuery, getEventDataFilterQuery, getDateQuery } = prisma;
const website = await getWebsite({ id: websiteId });
const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt;
const params: any = [websiteId, resetDate, startDate, endDate, eventName || ''];
@ -99,7 +98,7 @@ async function clickhouseQuery(
const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data;
const { rawQuery, getDateFormat, getBetweenDates, getDateQuery, getEventDataFilterQuery } =
clickhouse;
const website = await cache.fetchWebsite(websiteId);
const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt;
const params = { websiteId };

View File

@ -1,9 +1,8 @@
import cache from 'lib/cache';
import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma';
import { EVENT_TYPE } from 'lib/constants';
import { getWebsite } from 'queries';
import { loadWebsite } from 'lib/query';
export async function getPageviewStats(
...args: [
@ -47,7 +46,7 @@ async function relationalQuery(
sessionKey = 'session_id',
} = criteria;
const { toUuid, getDateQuery, parseFilters, rawQuery } = prisma;
const website = await getWebsite({ id: websiteId });
const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt;
const params: any = [websiteId, resetDate, startDate, endDate];
const { filterQuery, joinSession } = parseFilters(filters, params);
@ -95,7 +94,7 @@ async function clickhouseQuery(
getDateQuery,
getBetweenDates,
} = clickhouse;
const website = await cache.fetchWebsite(websiteId);
const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt;
const params = { websiteId };
const { filterQuery } = parseFilters(filters, params);

View File

@ -1,9 +1,8 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import cache from 'lib/cache';
import { EVENT_TYPE, FILTER_COLUMNS } from 'lib/constants';
import { getWebsite } from 'queries';
import { EVENT_TYPE } from 'lib/constants';
import { loadWebsite } from 'lib/query';
export async function getSessionMetrics(
...args: [
@ -21,7 +20,7 @@ async function relationalQuery(
websiteId: string,
criteria: { startDate: Date; endDate: Date; column: string; filters: object },
) {
const website = await getWebsite({ id: websiteId });
const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt;
const { startDate, endDate, column, filters = {} } = criteria;
const { toUuid, parseFilters, rawQuery } = prisma;
@ -55,7 +54,7 @@ async function clickhouseQuery(
) {
const { startDate, endDate, column, filters = {} } = data;
const { getDateFormat, parseFilters, getBetweenDates, rawQuery } = clickhouse;
const website = await cache.fetchWebsite(websiteId);
const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt;
const params = { websiteId };
const { filterQuery } = parseFilters(filters, params);

View File

@ -1,9 +1,8 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import cache from 'lib/cache';
import { EVENT_TYPE } from 'lib/constants';
import { getWebsite } from 'queries';
import { loadWebsite } from 'lib/query';
export async function getWebsiteStats(
...args: [
@ -23,7 +22,7 @@ async function relationalQuery(
) {
const { startDate, endDate, filters = {} } = criteria;
const { toUuid, getDateQuery, getTimestampInterval, parseFilters, rawQuery } = prisma;
const website = await getWebsite({ id: websiteId });
const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt;
const params: any = [websiteId, resetDate, startDate, endDate];
const { filterQuery, joinSession } = parseFilters(filters, params);
@ -58,7 +57,7 @@ async function clickhouseQuery(
) {
const { startDate, endDate, filters = {} } = criteria;
const { rawQuery, getDateFormat, getDateQuery, getBetweenDates, parseFilters } = clickhouse;
const website = await cache.fetchWebsite(websiteId);
const website = await loadWebsite(websiteId);
const resetDate = website?.resetAt || website?.createdAt;
const params = { websiteId };
const { filterQuery } = parseFilters(filters, params);