Skip to content

Commit

Permalink
Merge pull request #11 from oslabs-beta/dev
Browse files Browse the repository at this point in the history
Obsidian 4.0.0
  • Loading branch information
sarkamedo committed Dec 16, 2021
2 parents c981150 + cb26f34 commit 5c801d3
Show file tree
Hide file tree
Showing 28 changed files with 16,795 additions and 266 deletions.
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
- Configurable caching options, giving you complete control over your cache
- Fullstack integration, leveraging client-side and server-side caching to streamline your caching strategy
- Support for the full GraphQL convention
- Support for server-side cache invalidation
- Optional GraphQL DoS attack mitigation security module

## Overview
Expand Down Expand Up @@ -164,6 +165,10 @@ github for a demo with some example code to play with:

## Authors

[Yogi Paturu](https://github.com/YogiPaturu)
[Michael Chin](https://github.com/mikechin37)
[Dana Flury](https://github.com/dmflury)
[Sardor Akhmedov](https://github.com/sarkamedo)
[Christopher Berry](https://github.com/cjamesb)
[Olivia Yeghiazarian](https://github.com/Olivia-code)
[Michael Melville](https://github.com/meekle)
Expand Down
238 changes: 74 additions & 164 deletions src/Obsidian.ts
Original file line number Diff line number Diff line change
@@ -1,18 +1,16 @@
import { graphql } from 'https://cdn.pika.dev/[email protected]';
import { renderPlaygroundPage } from 'https://deno.land/x/[email protected]/graphql-playground-html/render-playground-html.ts';
import { makeExecutableSchema } from 'https://deno.land/x/[email protected]/graphql-tools/schema/makeExecutableSchema.ts';
import LFUCache from './Browser/lfuBrowserCache.js';
import { Cache } from './quickCache.js';
import queryDepthLimiter from './DoSSecurity.ts';
import {restructure} from './restructure.ts';
import {invalidateCacheCheck} from './invalidateCacheCheck.js';
import {normalizeResult, cachePrimaryFields} from './astNormalize.js'
import {rebuildFromQuery} from './rebuild.js'
import {mapSelectionSet} from './mapSelections.js'

import { restructure } from './restructure.ts';
import { rebuildFromQuery } from './rebuild.js'
import { normalizeObject } from './normalize.ts'
import { transformResponse, detransformResponse } from './transformResponse.ts'
import { isMutation, invalidateCache } from './invalidateCacheCheck.ts'

interface Constructable<T> {
new (...args: any): T & OakRouter;
new(...args: any): T & OakRouter;
}

interface OakRouter {
Expand All @@ -28,12 +26,12 @@ export interface ObsidianRouterOptions<T> {
resolvers: ResolversProps;
context?: (ctx: any) => any;
usePlayground?: boolean;
useCache?: boolean;
useCache?: boolean; // trivial parameter
redisPort?: number;
policy?: string;
maxmemory?: string;
maxQueryDepth?: number;
useQueryCache?: boolean;
useQueryCache?: boolean; // trivial parameter
useRebuildCache?: boolean;
customIdentifier?: Array<string>;
}
Expand Down Expand Up @@ -61,200 +59,112 @@ export async function ObsidianRouter<T>({
usePlayground = false,
useCache = true,
redisPort = 6379,
policy,
maxmemory,
policy = 'allkeys-lru',
maxmemory = '2000mb',
maxQueryDepth = 0,
useQueryCache = true,
useRebuildCache = true,
customIdentifier = ["id", "__typename"],
}: ObsidianRouterOptions<T>): Promise<T> {
redisPortExport = redisPort;
const router = new Router();

const schema = makeExecutableSchema({ typeDefs, resolvers });

// If using LFU Browser Caching, the following cache line needs to be uncommented.
//const cache = new LFUCache(50);

// If using Redis caching, the following lines need to be uncommented.

const cache = new Cache();

// clear redis cache when restarting the server

// const cache = new LFUCache(50); // If using LFU Browser Caching, uncomment line
const cache = new Cache(); // If using Redis caching, uncomment line
cache.cacheClear();

// set redis configurations

if (policy || maxmemory) {
if (policy || maxmemory) { // set redis configurations
cache.configSet('maxmemory-policy', policy);
cache.configSet('maxmemory', maxmemory);
}

await router.post(path, async (ctx: any) => {
var t0 = performance.now();

const t0 = performance.now();
const { response, request } = ctx;

if (request.hasBody) {
try {
const contextResult = context ? await context(ctx) : undefined;
let body = await request.body().value;


// If a securty limit is set for maxQueryDepth, invoke queryDepthLimiter
// which throws error if query depth exceeds maximum
if (maxQueryDepth) queryDepthLimiter(body.query, maxQueryDepth);

// we run restructre to get rid of variables and fragments


body = {query : restructure(body)};


const isSubscription = await invalidateCacheCheck(body);
if (isSubscription===true){
console.log("it's a subscription");
//we still have unaltered access to the original request
//and the original context at ctx. You can forward any part
//of that on to a graphql subscription server that you need to.
//here we're just bypassing the cache.
//and exiting out of the rest of the functionality early
//if it's a subscription.
//
//If you need to upgrade the context, you should still be able to do so
//without any interference.
const result = await (graphql as any)(
schema,
body.query,
resolvers,
contextResult,
body.variables || undefined,
body.operationName || undefined
);

// Send database response to client //
response.status = 200;
response.body = result;
return;
}

// Variable to block the normalization of mutations //
let toNormalize = true;

if (useCache) {

// Send query off to be destructured and found in Redis if possible //

let obsidianReturn
if (useQueryCache) {
obsidianReturn = await cache.read(body.query);
}
if (!obsidianReturn && useRebuildCache) {

const rebuildReturn = await rebuildFromQuery(body.query);


obsidianReturn = rebuildReturn
}

if (obsidianReturn) {

response.status = 200;
response.body = obsidianReturn;
var t1 = performance.now();
console.log(
'%c Obsidian retrieved data from cache and took ' +
(t1 - t0) +
' milliseconds.', "background: #222; color: #00FF00"
);

if (useQueryCache) {
await cache.write(body.query, obsidianReturn, false);
}
return;
}
if (!request.hasBody) return;
try {
const contextResult = context ? await context(ctx) : undefined;
let body = await request.body().value;
if (maxQueryDepth) queryDepthLimiter(body.query, maxQueryDepth); // If a securty limit is set for maxQueryDepth, invoke queryDepthLimiter, which throws error if query depth exceeds maximum
body = { query: restructure(body) }; // Restructre gets rid of variables and fragments from the query
let cacheQueryValue = await cache.read(body.query)
// Is query in cache?
if (useCache && useQueryCache && cacheQueryValue) {
let detransformedCacheQueryValue = await detransformResponse(body.query, cacheQueryValue)
if (!detransformedCacheQueryValue) {
// cache was evicted if any partial cache is missing, which causes detransformResponse to return undefined
cacheQueryValue = undefined;
} else {
response.status = 200;
response.body = detransformedCacheQueryValue;
const t1 = performance.now();
console.log(
'%c Obsidian retrieved data from cache and took ' +
(t1 - t0) +
' milliseconds.', "background: #222; color: #00FF00"
);
}

// if not in cache, it will fetch a new response from database
const result = await (graphql as any)(
}; // If not in cache:
if (useCache && useQueryCache && !cacheQueryValue) {
const gqlResponse = await (graphql as any)(
schema,
body.query,
resolvers,
contextResult,
body.variables || undefined,
body.operationName || undefined
);

// Send database response to client //
response.status = 200;
response.body = result;

//cache of whole query completely non normalized
//boolean to allow the full query cache
if (useQueryCache && useCache) {
await cache.write(body.query, result, false);
}

// Normalize response and store in cache //
if (useCache && toNormalize && !result.errors && useRebuildCache) {

//run to map alias
let map = mapSelectionSet(body.query)

// this normalizeds the result and saves to REDIS
let normalized
// uses base id, __typename if given customIdentifer array is not populated
if (customIdentifier.length === 0) {
normalized = await normalizeResult(result, map)
}
// this uses the custom identifier if given
else{

normalized = await normalizeResult(result, map, customIdentifier)
const normalizedGQLResponse = normalizeObject(gqlResponse, customIdentifier);
if (isMutation(body)) {
const queryString = await request.body().value;
invalidateCache(normalizedGQLResponse, queryString.query);
}
// If read query: run query, normalize GQL response, transform GQL response, write to cache, and write pieces of normalized GQL response objects
else {
const transformedGQLResponse = transformResponse(gqlResponse, customIdentifier);
await cache.write(body.query, transformedGQLResponse, false);
for (const key in normalizedGQLResponse) {
await cache.cacheWriteObject(key, normalizedGQLResponse[key]);
}

await cachePrimaryFields(normalized, body.query, map)



}
var t1 = performance.now();
response.status = 200;
response.body = gqlResponse;
const t1 = performance.now();
console.log(
'%c Obsidian received new data and took ' + (t1 - t0) + ' milliseconds', 'background: #222; color: #FFFF00'
'%c Obsidian received new data and took ' +
(t1 - t0) +
' milliseconds', 'background: #222; color: #FFFF00'
);

return;
} catch (error) {
response.status = 200;
response.body = {
data: null,
errors: [
{
message: error.message ? error.message : error,
},
],
};
console.error('Error: ', error.message);
return;
}
} catch (error) {
response.status = 400;
response.body = {
data: null,
errors: [
{
message: error.message ? error.message : error,
},
],
};
console.error('Error: ', error.message);
}
});

// serve graphql playground
await router.get(path, async (ctx: any) => {
const { request, response } = ctx;
if (usePlayground) {
const prefersHTML = request.accepts('text/html');
const optionsObj:any = {
'schema.polling.enable': false, // enables automatic schema polling
}
const optionsObj: any = {
'schema.polling.enable': false, // enables automatic schema polling
}

if (prefersHTML) {

const playground = renderPlaygroundPage({
endpoint: request.url.origin + path,
subscriptionEndpoint: request.url.origin,
settings : optionsObj
settings: optionsObj
});
response.status = 200;
response.body = playground;
Expand Down
Loading

0 comments on commit 5c801d3

Please sign in to comment.