-
-
Notifications
You must be signed in to change notification settings - Fork 112
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #77 from oslabs-beta/main
Obsidian Version 8.0
- Loading branch information
Showing
41 changed files
with
1,362 additions
and
3,472 deletions.
There are no files selected for viewing
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,35 +1,61 @@ | ||
import * as React from "https://esm.sh/react@18"; | ||
import LFUCache from '../src/Browser/lfuBrowserCache.js'; | ||
import LRUCache from '../src/Browser/lruBrowserCache.js'; | ||
import WTinyLFUCache from "../src/Browser/wTinyLFUBrowserCache.js"; | ||
import { insertTypenames } from '../src/Browser/insertTypenames.js'; | ||
import { sha256 } from 'https://denopkg.com/chiefbiiko/[email protected]/mod.ts'; | ||
|
||
const cacheContext = React.createContext(); | ||
|
||
function ObsidianWrapper(props) { | ||
const { algo, capacity } = props | ||
const [cache, setCache] = React.useState(new LFUCache(Number(capacity || 2000))); | ||
if(algo === 'LRU') setCache(new LRUCache(Number(capacity || 2000))); // You have to put your Google Chrome Obsidian developer tool extension id to connect Obsidian Wrapper with dev tool | ||
const chromeExtensionId = 'apcpdmmbhhephobnmnllbklplpaoiemo'; | ||
// initialice cache in local storage | ||
//window.localStorage.setItem('cache', JSON.stringify(cache)); | ||
// props to be inputted by user when using the Obsdian Wrapper | ||
const { algo, capacity, searchTerms, useCache, persistQueries } = props; | ||
// if useCache hasn't been set, default caching to true | ||
let caching = true; | ||
// if it has been set to false, turn client-side caching off | ||
if (useCache === false) caching = false; | ||
|
||
// algo defaults to LFU, capacity defaults to 2000 | ||
const setAlgoCap = (algo, capacity) => { | ||
let cache; | ||
if(caching && algo === 'LRU'){ | ||
cache = new LRUCache(Number(capacity || 2000)) | ||
} else if (caching && algo === 'W-TinyLFU'){ | ||
cache = new WTinyLFUCache(Number(capacity || 2000)) | ||
} else if (caching) { | ||
cache = new LFUCache(Number(capacity || 2000)) | ||
} | ||
return cache; | ||
} | ||
|
||
// once cache is initialized, cannot setCache | ||
// state for cache is initialized based on developer settings in wrapper | ||
// to successfully change between algo types for testing, kill the server, change the algo type in wrapper, then restart server | ||
const [cache, setCache] = React.useState(setAlgoCap(algo, capacity)); | ||
|
||
// FOR DEVTOOL - listening for message from content.js to be able to send algo type and capacity to devtool | ||
window.addEventListener('message', msg => { | ||
if(msg.data.type === 'algocap'){ | ||
window.postMessage({ | ||
algo: algo ? algo : 'LFU', | ||
capacity: capacity ? capacity : 2000 | ||
}) | ||
} | ||
}); | ||
|
||
async function query(query, options = {}) { | ||
// dev tool messages | ||
// FOR DEVTOOL - startTime is used to calculate the performance of the cache | ||
// startDate is to find out when query was made, this data is passed to devtools | ||
const startTime = Date.now(); | ||
/* | ||
chrome.runtime.sendMessage(chromeExtensionId, { query: query }); | ||
chrome.runtime.sendMessage(chromeExtensionId, { | ||
cache: window.localStorage.getItem('cache'), | ||
}); | ||
*/ | ||
const startDate = new Date(Date.now()); | ||
|
||
// set the options object default properties if not provided | ||
const { | ||
endpoint = '/graphql', | ||
cacheRead = true, | ||
cacheWrite = true, | ||
cacheRead = !caching ? false : true, | ||
cacheWrite = !caching ? false : true, | ||
pollInterval = null, | ||
wholeQuery = true, | ||
wholeQuery = false, //Note: logic for true is currently nonfunctional | ||
} = options; | ||
|
||
// when pollInterval is not null the query will be sent to the server every inputted number of milliseconds | ||
|
@@ -45,70 +71,101 @@ function ObsidianWrapper(props) { | |
return interval; | ||
} | ||
|
||
// when cacheRead set to true | ||
if (cacheRead) { | ||
// when cacheRead set to true & we are utilizing client side caching | ||
if (cacheRead && caching) { | ||
let resObj; | ||
// when the developer decides to only utilize whole query for cache | ||
if (!wholeQuery) resObj = await cache.readWholeQuery(query); | ||
if (wholeQuery) resObj = await cache.readWholeQuery(query); | ||
// attempt to read from the cache | ||
else resObj = await cache.read(query); | ||
// check if query is stored in cache | ||
if (resObj) { | ||
// returning cached response as a promise | ||
const cacheHitResponseTime = Date.now() - startTime; | ||
|
||
// Allow for access of the response time | ||
// const cacheCopy = {...cache}; | ||
// cacheCopy.callTime = cacheHitResponseTime; | ||
// setCache(cacheCopy); | ||
resObj['time'] = cacheHitResponseTime | ||
// FOR DEVTOOL - sends message to content.js with query metrics when query is a hit | ||
window.postMessage({ | ||
type: 'query', | ||
time: cacheHitResponseTime, | ||
date: startDate.toDateString().slice(0, 24), | ||
query: query, | ||
hit: true | ||
}); | ||
|
||
console.log( | ||
"From cacheRead: Here's the response time on the front end: ", | ||
cacheHitResponseTime | ||
); | ||
/*chrome.runtime.sendMessage(chromeExtensionId, { | ||
cacheHitResponseTime: cacheHitResponseTime, | ||
});*/ | ||
return new Promise((resolve, reject) => resolve(resObj)); | ||
} | ||
// execute graphql fetch request if cache miss | ||
return new Promise((resolve, reject) => resolve(hunt(query))); | ||
// when cacheRead set to false | ||
} | ||
if (!cacheRead) { | ||
// when cacheRead set to false & not using client-side cache | ||
if (!cacheRead || !caching) { | ||
return new Promise((resolve, reject) => resolve(hunt(query))); | ||
} | ||
|
||
// when cache miss or on intervals | ||
// function to be called on cache miss or on intervals or not looking in the cache | ||
async function hunt(query) { | ||
if (wholeQuery) query = insertTypenames(query); | ||
if (!wholeQuery) query = insertTypenames(query); | ||
try { | ||
// send fetch request with query | ||
const resJSON = await fetch(endpoint, { | ||
method: 'POST', | ||
headers: { | ||
'Content-Type': 'application/json', | ||
Accept: 'application/json', | ||
}, | ||
body: JSON.stringify({ query }), | ||
}); | ||
let resJSON; | ||
// IF WE ARE USING PERSIST QUERIES | ||
if (persistQueries) { | ||
// SEND THE HASH | ||
const hash = sha256(query, 'utf8', 'hex'); | ||
resJSON = await fetch(endpoint, { | ||
method: 'POST', | ||
headers: { | ||
'Content-Type': 'application/json', | ||
Accept: 'application/json', | ||
}, | ||
body: JSON.stringify({ hash }), | ||
}); | ||
|
||
// IF HASH WAS NOT FOUND IN HASH TABLE | ||
if (resJSON.status === 204) { | ||
// SEND NEW REQUEST WITH HASH AND QUERY | ||
resJSON = await fetch(endpoint, { | ||
method: 'POST', | ||
headers: { | ||
'Content-Type': 'application/json', | ||
Accept: 'application/json', | ||
}, | ||
body: JSON.stringify({ hash, query }), | ||
}); | ||
|
||
} | ||
|
||
// IF WE ARE NOT USING PERSIST QUERIES | ||
} else { | ||
// JUST SEND THE QUERY ONLY | ||
resJSON = await fetch(endpoint, { | ||
method: 'POST', | ||
headers: { | ||
'Content-Type': 'application/json', | ||
Accept: 'application/json', | ||
}, | ||
body: JSON.stringify({ query }), | ||
}); | ||
} | ||
|
||
const resObj = await resJSON.json(); | ||
const deepResObj = { ...resObj }; | ||
// update result in cache if cacheWrite is set to true | ||
if (cacheWrite && resObj.data[Object.keys(resObj.data)[0]] !== null) { | ||
if (!wholeQuery) cache.writeWholeQuery(query, deepResObj); | ||
if (cacheWrite && caching && resObj.data[Object.keys(resObj.data)[0]] !== null) { | ||
if (wholeQuery) cache.writeWholeQuery(query, deepResObj); | ||
else if(resObj.data[Object.keys(resObj.data)[0]].length > cache.capacity) console.log('Please increase cache capacity'); | ||
else cache.write(query, deepResObj); | ||
else cache.write(query, deepResObj, searchTerms); | ||
} | ||
const cacheMissResponseTime = Date.now() - startTime; | ||
/*chrome.runtime.sendMessage(chromeExtensionId, { | ||
cacheMissResponseTime: cacheMissResponseTime, | ||
});*/ | ||
resObj['time'] = cacheMissResponseTime | ||
console.log( | ||
"After the hunt: Here's the response time on the front end: ", | ||
cacheMissResponseTime | ||
); | ||
|
||
// FOR DEVTOOL - sends message to content.js when query is a miss | ||
window.postMessage({ | ||
type: 'query', | ||
time: cacheMissResponseTime, | ||
date: startDate.toDateString().slice(0, 24), | ||
query: query, | ||
hit: false | ||
}); | ||
|
||
return resObj; | ||
} catch (e) { | ||
console.log(e); | ||
|
@@ -121,20 +178,19 @@ function ObsidianWrapper(props) { | |
cache.cacheClear(); | ||
} | ||
|
||
// NOTE - FOR DEVTOOL - no messages are currently being passed for mutations | ||
// so some logic in content.js and background.js may be missing to handle mutations | ||
|
||
// breaking out writethrough logic vs. non-writethrough logic | ||
async function mutate(mutation, options = {}) { | ||
// dev tool messages | ||
// chrome.runtime.sendMessage(chromeExtensionId, { | ||
// mutation: mutation, | ||
// }); | ||
const startTime = Date.now(); | ||
mutation = insertTypenames(mutation); | ||
const { | ||
endpoint = '/graphql', | ||
cacheWrite = true, | ||
cacheWrite = !caching ? false : true, | ||
toDelete = false, | ||
update = null, | ||
writeThrough = true, // not true | ||
writeThrough = true, // unsure if boolean is symantically backwards or not | ||
} = options; | ||
try { | ||
if (!writeThrough) { | ||
|
@@ -147,9 +203,6 @@ function ObsidianWrapper(props) { | |
endpoint | ||
); | ||
const deleteMutationResponseTime = Date.now() - startTime; | ||
chrome.runtime.sendMessage(chromeExtensionId, { | ||
deleteMutationResponseTime: deleteMutationResponseTime, | ||
}); | ||
return responseObj; | ||
} else { | ||
// for add mutation | ||
|
@@ -168,15 +221,9 @@ function ObsidianWrapper(props) { | |
// GQL call to make changes and synchronize database | ||
console.log('WriteThrough - false ', responseObj); | ||
const addOrUpdateMutationResponseTime = Date.now() - startTime; | ||
chrome.runtime.sendMessage(chromeExtensionId, { | ||
addOrUpdateMutationResponseTime: addOrUpdateMutationResponseTime, | ||
}); | ||
return responseObj; | ||
} | ||
} else { | ||
// copy-paste mutate logic from 4. | ||
|
||
// use cache.write instead of cache.writeThrough | ||
const responseObj = await fetch(endpoint, { | ||
method: 'POST', | ||
headers: { | ||
|
@@ -185,18 +232,18 @@ function ObsidianWrapper(props) { | |
}, | ||
body: JSON.stringify({ query: mutation }), | ||
}).then((resp) => resp.json()); | ||
if (!cacheWrite) return responseObj; | ||
if (!cacheWrite || !caching) return responseObj; | ||
// first behaviour when delete cache is set to true | ||
if (toDelete) { | ||
cache.write(mutation, responseObj, true); | ||
cache.write(mutation, responseObj, searchTerms, true); | ||
return responseObj; | ||
} | ||
// second behaviour if update function provided | ||
if (update) { | ||
update(cache, responseObj); | ||
} | ||
|
||
if(!responseObj.errors) cache.write(mutation, responseObj); | ||
if(!responseObj.errors) cache.write(mutation, responseObj, searchTerms); | ||
// third behaviour just for normal update (no-delete, no update function) | ||
console.log('WriteThrough - true ', responseObj); | ||
return responseObj; | ||
|
Oops, something went wrong.