Skip to content

Commit

Permalink
Merge pull request #52 from oslabs-beta/master
Browse files Browse the repository at this point in the history
Feat: Obsidian 3.0 - Alias, Fragments, Garbage Collection, LFU Caching
  • Loading branch information
travismfrank authored Apr 15, 2021
2 parents dc8d038 + f9ca654 commit 1a9c787
Show file tree
Hide file tree
Showing 27 changed files with 1,408 additions and 197 deletions.
2 changes: 0 additions & 2 deletions .env

This file was deleted.

17 changes: 17 additions & 0 deletions .github/pull_request_template.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Checklist

- [ ] Bugfix
- [ ] New feature
- [ ] Refactor

# Related Issue

- the problem you are solving goes here.

# Solution

- solution to the problem goes here here. Why did you solve this problem the way you did?

# Additional Info

- Any additional information or context
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
dump.rdb
.DS_Store
.env
server.tsx
server2.tsx
sampleServer.tsx
15 changes: 0 additions & 15 deletions .vscode/settings.json

This file was deleted.

5 changes: 3 additions & 2 deletions documentation/garbage-collection-doc.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class Cache {
* - remove any hash reference that is a member of the deleted hash Set
* - for any hash reference that has not been deleted
* - add that hash to a Set of accessible hashes
* - recrusively trace that hash and continue removing any deleted hash references and updating the Set of accesible hashes
* - recursively trace that hash and continue removing any deleted hash references and updating the Set of accesible hashes
* 4. remove any hashes that are not a member of the accessible hash Set
*/

Expand Down Expand Up @@ -85,7 +85,8 @@ const cacheAfterGC = {
'favoriteMovie(id:2)': 'Movie~2',
"addMovie(input: {title: 'The Fugitive', releaseYear: 1993, genre: ACTION })":
'Movie~5',
'deleteMovie(id:4)': 'Movie~4',
// 'deleteMovie(id:4)': 'Movie~4', // mistake?
'deleteMovie(id:3)': 'Movie~3',
},

'Movie~1': {
Expand Down
168 changes: 116 additions & 52 deletions src/CacheClassBrowser.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,14 @@ export default class Cache {
if (rootQuery[queryHash]) {
// get the hashs to populate from the existent query in the cache
const arrayHashes = rootQuery[queryHash];
// Determines responseObject property labels - use alias if applicable, otherwise use name
const respObjProp = queries[query].alias ?? queries[query].name;
// invoke populateAllHashes and add data objects to the response object for each input query
responseObject[queries[query].name] = await this.populateAllHashes(
responseObject[respObjProp] = await this.populateAllHashes(
arrayHashes,
queries[query].fields
);
if (!responseObject[queries[query].name]) return undefined;
if (!responseObject[respObjProp]) return undefined;

// no match with ROOT_QUERY return null or ...
} else {
Expand All @@ -60,25 +62,110 @@ export default class Cache {
await this.cacheWrite(hash, resFromNormalize[hash]);
}
}
return;
}

gc() {
// garbageCollection; garbage collection: removes any inaccessible hashes from the cache
const badHashes = getBadHashes();
const goodHashes = rootQueryCleaner(badHashes);
const goodHashes2 = getGoodHashes(badHashes, goodHashes);
removeInaccessibleHashes(badHashes, goodHashes2);
}

// remove hashes that are flagged for deletion and store records of them in a set badHashes for removal inside root queries
getBadHashes() {
const badHashes = new Set();
for (let key in this.storage) {
if (key === 'ROOT_QUERY' || key === 'ROOT_MUTATION') continue;
if (this.storage[key] === 'DELETED') {
badHashes.add(key);
delete this.storage[key];
}
}
return badHashes;
}

// go through root queries, remove all instances of bad hashes, add remaining hashes into goodHashes set
rootQueryCleaner(badHashes) {
const goodHashes = new Set();
const rootQuery = this.storage['ROOT_QUERY'];
for (let key in rootQuery) {
if (Array.isArray(rootQuery[key])) {
rootQuery[key] = rootQuery[key].filter((x) => !badHashes.has(x));
if (rootQuery[key].length === 0) delete rootQuery[key];
for (let el of rootQuery[key]) goodHashes.add(el);
} else
badHashes.has(rootQuery[key])
? delete rootQuery[key]
: goodHashes.add(rootQuery[key]);
}
return goodHashes;
}

// Go through the cache, check good hashes for any nested hashes and add them to goodHashes set
getGoodHashes(badHashes, goodHashes) {
for (let key in this.storage) {
if (key === 'ROOT_QUERY' || key === 'ROOT_MUTATION') continue;
for (let i in this.storage[key]) {
if (Array.isArray(this.storage[key][i])) {
for (let el of this.storage[key][i]) {
if (el.includes('~') && !badHashes.has(el)) {
goodHashes.add(el);
}
}
} else if (typeof this.storage[key][i] === 'string') {
if (
this.storage[key][i].includes('~') &&
!badHashes.has(this.storage[key][i])
) {
goodHashes.add(this.storage[key][i]);
}
}
}
}
return goodHashes;
}

// Remove inaccessible hashes by checking if they are in goodhashes set or not
removeInaccessibleHashes(badHashes, goodHashes) {
for (let key in this.storage) {
if (key === 'ROOT_QUERY' || key === 'ROOT_MUTATION') continue;
if (!goodHashes.has(key)) delete this.storage[key];
for (let i in this.storage[key]) {
if (Array.isArray(this.storage[key][i])) {
this.storage[key][i] = this.storage[key][i].filter(
(x) => !badHashes.has(x)
);
} else if (typeof this.storage[key][i] === 'string') {
if (
this.storage[key][i].includes('~') &&
badHashes.has(this.storage[key][i])
) {
delete this.storage[key][i];
}
}
}
}
}

// cache read/write helper methods
async cacheRead(hash) {
return this.storage[hash];
}

async cacheWrite(hash, value) {
this.storage[hash] = value;
}

async cacheDelete(hash) {
delete this.storage[hash];
}

async cacheClear() {
this.storage = { ROOT_QUERY: {}, ROOT_MUTATION: {} };
this.storage = {
ROOT_QUERY: {},
ROOT_MUTATION: {},
};
}

// functionality to stop polling
Expand All @@ -87,81 +174,58 @@ export default class Cache {
}

writeWholeQuery(queryStr, respObj) {
let hash = queryStr.replace(/\s/g, '');
const hash = queryStr.replace(/\s/g, '');
this.cacheWrite(ROOT_QUERY[hash], respObj);
return respObj;
}

readWholeQuery(queryStr) {
let hash = queryStr.replace(/\s/g, '');
const hash = queryStr.replace(/\s/g, '');
const root = this.cacheRead('ROOT_QUERY');
if (root[hash]) return { data: root[hash] };
else return undefined;
return undefined;
}

// specialized helper methods
async populateAllHashes(allHashesFromQuery, fields) {
if (Array.isArray(allHashesFromQuery)) {
// include the hashname for each hash
if (!allHashesFromQuery.length) return [];
const hyphenIdx = allHashesFromQuery[0].indexOf('~');
const typeName = allHashesFromQuery[0].slice(0, hyphenIdx);
return allHashesFromQuery.reduce(async (acc, hash) => {
// for each hash from the input query, build the response object
const readVal = await this.cacheRead(hash);
if (readVal === 'DELETED') return acc;
const dataObj = {};
for (const field in fields) {
if (readVal[field] === 'DELETED') continue;
// for each field in the fields input query, add the corresponding value from the cache if the field is not another array of hashs
if (readVal[field] === undefined && field !== '__typename') {
return undefined;
} else if (typeof fields[field] !== 'object') {
// add the typename for the type
if (field === '__typename') {
dataObj[field] = typeName;
} else dataObj[field] = readVal[field];
} else {
// case where the field from the input query is an array of hashes, recursively invoke populateAllHashes
dataObj[field] = await this.populateAllHashes(
readVal[field],
fields[field]
);
if (dataObj[field] === undefined) return undefined;
}
}
// acc is an array of response object for each hash
const resolvedProm = await Promise.resolve(acc);
resolvedProm.push(dataObj);
return resolvedProm;
}, []);
}
// Case where allHashesFromQuery has only one hash and is not an array but a single string
const hash = allHashesFromQuery;
const readVal = await this.cacheRead(hash);
if (readVal !== 'DELETED') {
// include the typename for each hash
const hyphenIdx = hash.indexOf('~');
const typeName = hash.slice(0, hyphenIdx);
// include the hashname for each hash
if (!allHashesFromQuery.length) return [];
const hyphenIdx = allHashesFromQuery[0].indexOf('~');
const typeName = allHashesFromQuery[0].slice(0, hyphenIdx);
return allHashesFromQuery.reduce(async (acc, hash) => {
// for each hash from the input query, build the response object
const readVal = await this.cacheRead(hash);
// return undefine if hash has been garbage collected
if (readVal === undefined) return undefined;
if (readVal === 'DELETED') return acc;
const dataObj = {};
for (const field in fields) {
if (readVal[field] === 'DELETED') continue;
if (!readVal[field] && field !== '__typename') {
// for each field in the fields input query, add the corresponding value from the cache if the field is not another array of hashs
if (readVal[field] === undefined && field !== '__typename') {
return undefined;
} else if (typeof fields[field] !== 'object') {
// add the typename for the type
if (field === '__typename') {
dataObj[field] = typeName;
} else dataObj[field] = readVal[field];
} else {
// case where the field from the input query is an array of hashes, recursively invoke populateAllHashes
dataObj[field] = await this.populateAllHashes(
readVal[field],
fields[field]
);
if (dataObj[field] === undefined) return undefined;
}
}
return dataObj;
}
// acc is an array within a Response object for each hash
try {
const resolvedProm = await Promise.resolve(acc);
resolvedProm.push(dataObj);
return resolvedProm;
} catch (error) {
return undefined;
}
}, []);
}
}
Loading

0 comments on commit 1a9c787

Please sign in to comment.