diff --git a/apps/wapi-ai-chatbot/src/utils/cache.ts b/apps/wapi-ai-chatbot/src/utils/cache.ts index 9e72e6c..3985e97 100644 --- a/apps/wapi-ai-chatbot/src/utils/cache.ts +++ b/apps/wapi-ai-chatbot/src/utils/cache.ts @@ -1,28 +1,27 @@ -import { AiConversationRoleEnum, ConversationMessageType } from '~/types' import { caching } from 'cache-manager' const cacheStore = caching({ - store: 'memory' + store: 'memory' }) export async function cacheData(params: { key: string; data: any; ttl?: number }) { - const { key, ttl, data } = params - await cacheStore.set(key, data, { ...(ttl ? { ttl: ttl } : {}) }) + const { key, ttl, data } = params + await cacheStore.set(key, data, { ...(ttl ? { ttl: ttl } : {}) }) } export async function getCachedData(key: string): Promise { - const response = await cacheStore.get(key) - console.log(response) - return response as T + const response = await cacheStore.get(key) + console.log(response) + return response as T } export function computeCacheKey(params: { id: string; context: string }) { - return `${params.id}-${params.context}` + return `${params.id}-${params.context}` } export function getConversationContextCacheKey(phoneNumber: string) { - return computeCacheKey({ - id: phoneNumber, - context: 'conversation' - }) + return computeCacheKey({ + id: phoneNumber, + context: 'conversation' + }) }