diff --git a/.babelrc b/.babelrc new file mode 100644 index 0000000..d11319e --- /dev/null +++ b/.babelrc @@ -0,0 +1,3 @@ +{ + "presets": ["es2015", "stage-0"], +} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5d831db --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +lib +*.log +node_modules +.idea diff --git a/.npmignore b/.npmignore new file mode 100644 index 0000000..9342e09 --- /dev/null +++ b/.npmignore @@ -0,0 +1,4 @@ +src/ +__tests__/ +node_modules +.idea diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..ec9cf53 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Matt Krick + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..d182a38 --- /dev/null +++ b/README.md @@ -0,0 +1,136 @@ +[![npm version](https://badge.fury.io/js/cashay.svg)](https://badge.fury.io/js/cashay) +#cashay +relay for the rest of us + +WIP. Be a pal. Make a PR. + +##Installation +`npm i -S cashay` + +##Setup + +Just like relay, the goal is to get the benefits of graphql while minimizing client payload. +To do so, we'll make a script that writes the GraphQL schema to a JSON file. +Since this is specific to your project, you'll need to write this yourself. +For example, I put the clientSchema in my `build` folder. +I also need to drain my database connection pool that is filled when the `rootSchema` is accessed. +So, my file looks like this: + +```javascript +// updateSchema.js +require('babel-register'); +require('babel-polyfill'); + +const path = require('path'); +const rootSchema = require('../src/server/graphql/rootSchema'); +const graphql = require('graphql').graphql; +const introspectionQuery = require('graphql/utilities').introspectionQuery; +const r = require('../src/server/database/rethinkdriver'); + +(async () => { + const result = await graphql(rootSchema, introspectionQuery); + if (result.errors) { + console.log(result.errors) + } else { + fs.writeFileSync(path.join(__dirname, '../build/clientSchema.json'), JSON.stringify(result, null, 2)); + } + r.getPool().drain(); +})(); +``` +I recommend writing an npm script and executing it whenever your GraphQL schema changes. +If you want to get fancy, you can put a watcher on your GraphQL folder to run it on file change. + +Next, we'll need to make a babel plugin. Don't worry, cashay already has a babel plugin factory. All you need to do is inject the schema you just made: + +```javascript +// cashayPlugin.js +const createPlugin = require('cashay/lib/babel-plugin'); +const schema = require('../build/schema.json'); +module.exports = createPlugin(schema); +``` + +Now, we need to include that plugin in our `.babelrc`: + +```javascript +{ + "plugins": [ + ["./cashayPlugin.js"] + ] +} +``` +Success! Now Babel will statically analyze all of our queries and give each one a bespoke schema. +This means our client bundle stays tiny. + +##Usage + +Cashay provides 3 useful items: + +```javascript +import {CashayQL, Cashay, cashayReducer} from 'cashay'; +``` + +Prefixing all your query strings with `CashayQL` tells Babel to do its magic. For example: + +```javascript +const queryString = CashayQL` +query { + getComments { + id + body + } +}` +``` + +`Cashay` is a class that takes a redux store and transport (AKA fetcher function). + +```javascript +const cashay = new Cashay({store: myReduxStore, transport: graphQLFetcher}); +``` + +Your transport should call your GraphQL endpoint and return an object with a `data` and `error` prop. +If you call multiple GraphQL servers, you'll need multiple transports. + +```javascript +export const fetchGraphQL = async graphParams => { + const authToken = localStorage.getItem('myToken'); + const res = await fetch('http://localhost:3000/graphql', { + method: 'post', + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${authToken}` + }, + body: JSON.stringify(graphParams) + }); + const {data, errors} = await res.json(); + return {data, error: getPrettyErrors(errors)} +} +``` + +`cashayReducer` is as easy; just add it to your `combineReducers`. + + +##API + +```javascript +cashay.query(queryString, options) +``` + +Calling `query` will fetch your queryString from the graphQL server and put it in your redux store. +Currently, it's very naive. +It doesn't reduce the query. +It doesn't know if there are pending fetches. +It doesn't run the reducer in a webworker. + +But, you can use it for predictive fetches. +For example, call it when someone hovers their mouse over the "load data" button. +No need to wait for the `Relay.container` to load. + +Ha! Take that relay! :smile:. + +##Contributing + +There is a LOT of work to be done. Join the fun, check out the issues, and make a PR. + +##License + +MIT diff --git a/package.json b/package.json new file mode 100644 index 0000000..a8a6b92 --- /dev/null +++ b/package.json @@ -0,0 +1,44 @@ +{ + "name": "cashay", + "version": "0.1.0", + "description": "relay for the rest of us", + "main": "lib/index.js", + "scripts": { + "clean": "rimraf lib", + "lint": "xo src/index.js --esnext --space --fix", + "build": "babel --presets es2015,stage-0 -d lib/ src/", + "watch": "babel -w --presets es2015,stage-0 -d lib/ src/", + "prepublish": "npm run clean && npm run build", + "test": "ava ./src/**/__tests__/**/*-test.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/mattkrick/cashay.git" + }, + "keywords": [ + "relay", + "client", + "cache", + "redux" + ], + "author": "Matt Krick ", + "license": "MIT", + "bugs": { + "url": "https://github.com/mattkrick/cashay/issues" + }, + "homepage": "https://github.com/mattkrick/cashay#readme", + "devDependencies": { + "ava": "^0.11.0", + "babel-cli": "^6.4.5", + "babel-preset-es2015": "^6.3.13", + "babel-preset-stage-0": "^6.3.13", + "babel-register": "^6.4.3", + "graphql": "^0.4.17", + "normalizr": "^2.0.0", + "rimraf": "^2.5.1", + "xo": "^0.12.1" + }, + "dependencies": { + "immutable": "^3.7.6" + } +} diff --git a/src/Cashay.js b/src/Cashay.js new file mode 100644 index 0000000..03c4ee1 --- /dev/null +++ b/src/Cashay.js @@ -0,0 +1,98 @@ +import {FETCH_DATA_REQUEST, FETCH_DATA_SUCCESS, FETCH_DATA_ERROR} from './duck'; +import {normalize} from 'normalizr'; +import {parse} from 'graphql/language/parser'; + +export default class Cashay { + constructor({store, transport, schema}) { + this._store = store; + this._transport = transport; + this._schema = schema; + } + + async query(queryString, options = {}) { + //const {string, schema} = queryString; + const {variables, clientSchema, forceFetch, paginationWords, idFieldName} = options; + const {dispatch} = this._store; + const cahsayDataStore = this._store.getState().getIn(['cashay', 'data']); + const queryAST = parse(queryString, {noLocation: true, noSource: true}); + // based on query name + args, does it exist in cashay.denomralizedResults + const denormLocationInCashayState = getDenormLocationFromQueryAST(queryAST, clientSchema, variables); + const existsInStore = isDenormLocationInStore(this._store, denormLocationInCashayState); + // if yes && !forceFetch, return + if (existsInStore && !forceFetch) return; + // denormalize queryAST from store data and create dependencies, return minimziedAST + const context = buildExecutionContext(clientSchema, queryAST, {variables, paginationWords, idFieldName, store: this._store}) + const {dependencies, denormalizedResult, minimizedAST} = denormalizeAndCreateDependencies(queryAST, context); + // insert denomralized JSON object in state.cashay.denormalizedResults + dispatch({ + type: '@@cashay/INSERT_DENORMALIZED', + payload: { + dependencies, + location: denormLocationInCashayState, + result: denormalizedResult + } + }) + //if not complete, + if (minimizedAST) { + // print (minimizedAST) + const minimizedQuerySTring = print(minimizedAST); + // send minimizedQueryString to server and await minimizedQueryResponse + const minimizedQueryResponse = await this._transport(minimizedQuerySTring, variables) + // normalize response + const context = buildExecutionContext(clientSchema, queryAST, {variables, paginationWords, idFieldName}) + const normalizedMinimizedQueryResponse = normalize(minimizedQueryResponse, context) + // stick normalize data in store + dispatch({ + type: '@@cashay/INSERT_NORMALIZED', + payload: { + response: normalizedMinimizedQueryResponse + } + }) + // denormalize queryAST from store data and create dependencies + const {dependencies, denormalizedResult, minimizedAST} = denormalizeAndCreateDependencies(queryAST, this._store); + dispatch({ + type: '@@cashay/INSERT_DENORMALIZED', + payload: { + dependencies, + location: denormLocationInCashayState, + result: denormalizedResult + } + }) + + } + + + //const partial = denormalize(cahsayDataStore.toJS(), varSchema, queryAST) + // see what data we have in the store + //const schemaKeys = Object.keys(schema); + //schemaKeys.forEach(key => { + // if (schema[key].constructor.name === 'EntitySchema') { + // console.log('checking key', key) + // const entityId = cahsayDataState.getIn(['result', key]); + // console.log('entId', entityId, cahsayDataState) + // if (entityId) { + // const subStateName = schema[key].getKey(); + // const obj = cahsayDataState.getIn(['entities', subStateName, entityId]); + // console.log('CACHED RES', obj); + // } + // } + //}) + + //dispatch({type: FETCH_DATA_REQUEST}); + //const {error, data} = await this._transport({query: string}); + //if (error) { + // return dispatch({ + // type: FETCH_DATA_ERROR, + // error + // }) + //} + //console.log('RESP', data) + //const payload = normalize(data, schema); + ////const ans = denormalize(payload, schema); + //dispatch({ + // type: FETCH_DATA_SUCCESS, + // payload + //}); + } +} + diff --git a/src/CashayQL.js b/src/CashayQL.js new file mode 100644 index 0000000..9d1d66d --- /dev/null +++ b/src/CashayQL.js @@ -0,0 +1,21 @@ +///* Code from hueypetersen.com */ +// +//import {Schema, arrayOf, unionOf} from 'normalizr'; +// +//const CashayQL = (string, ...args) => { +// throw new Error('Cashay: Did you install your Babel plugin?'); +//} +// +//Object.assign(CashayQL, { +// schema(key, definition) { +// const schema = new Schema(key); +// if (definition) { +// schema.define(definition); +// } +// return schema; +// }, +// arrayOf, +// unionOf +//}); +// +//export default CashayQL; diff --git a/src/__tests__/clientSchema.json b/src/__tests__/clientSchema.json new file mode 100644 index 0000000..813f6da --- /dev/null +++ b/src/__tests__/clientSchema.json @@ -0,0 +1,1296 @@ +{ + "queryType": { + "name": "BlogSchema" + }, + "mutationType": { + "name": "BlogMutations" + }, + "subscriptionType": null, + "types": [ + { + "kind": "OBJECT", + "name": "BlogSchema", + "fields": [ + { + "name": "posts", + "args": [ + { + "name": "category", + "type": { + "kind": "ENUM", + "name": "Category", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Post", + "ofType": null + } + } + }, + { + "name": "latestPost", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Post", + "ofType": null + } + }, + { + "name": "recentPosts", + "args": [ + { + "name": "count", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "after", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "before", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Post", + "ofType": null + } + } + }, + { + "name": "post", + "args": [ + { + "name": "_id", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Post", + "ofType": null + } + }, + { + "name": "authors", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Author", + "ofType": null + } + } + }, + { + "name": "author", + "args": [ + { + "name": "_id", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Author", + "ofType": null + } + }, + { + "name": "getGroup", + "args": [ + { + "name": "_id", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Group", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "Category", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "METEOR" + }, + { + "name": "PRODUCT" + }, + { + "name": "USER_STORY" + }, + { + "name": "OTHER" + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Post", + "fields": [ + { + "name": "_id", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "title", + "args": [ + { + "name": "language", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "category", + "args": [], + "type": { + "kind": "ENUM", + "name": "Category", + "ofType": null + } + }, + { + "name": "summary", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "content", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "timestamp", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + }, + { + "name": "comments", + "args": [ + { + "name": "limit", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Comment", + "ofType": null + } + } + }, + { + "name": "author", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Author", + "ofType": null + } + }, + { + "name": "cursor", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "HasAuthor", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INTERFACE", + "name": "HasAuthor", + "fields": [ + { + "name": "author", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Author", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": [ + { + "kind": "OBJECT", + "name": "Comment", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Post", + "ofType": null + } + ] + }, + { + "kind": "OBJECT", + "name": "Comment", + "fields": [ + { + "name": "_id", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "content", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "author", + "args": [], + "type": { + "kind": "OBJECT", + "name": "Author", + "ofType": null + } + }, + { + "name": "timestamp", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + }, + { + "name": "replies", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Comment", + "ofType": null + } + } + }, + { + "name": "cursor", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "HasAuthor", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "String", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Author", + "fields": [ + { + "name": "_id", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "name", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "twitterHandle", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "Float", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "Int", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Group", + "fields": [ + { + "name": "_id", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "owner", + "args": [], + "type": { + "kind": "UNION", + "name": "Member", + "ofType": null + } + }, + { + "name": "members", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "UNION", + "name": "Member", + "ofType": null + } + } + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "UNION", + "name": "Member", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": [ + { + "kind": "OBJECT", + "name": "Group", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "Author", + "ofType": null + } + ] + }, + { + "kind": "OBJECT", + "name": "BlogMutations", + "fields": [ + { + "name": "createPost", + "args": [ + { + "name": "_id", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "title", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "content", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "summary", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "category", + "type": { + "kind": "ENUM", + "name": "Category", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "author", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Post", + "ofType": null + } + }, + { + "name": "createAuthor", + "args": [ + { + "name": "_id", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "name", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + }, + { + "name": "twitterHandle", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Author", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Schema", + "fields": [ + { + "name": "types", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type" + } + } + } + } + }, + { + "name": "queryType", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + { + "name": "mutationType", + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + { + "name": "subscriptionType", + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + { + "name": "directives", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Directive" + } + } + } + } + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Type", + "fields": [ + { + "name": "kind", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "__TypeKind", + "ofType": null + } + } + }, + { + "name": "name", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "description", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "fields", + "args": [ + { + "name": "includeDeprecated", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": "false" + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Field", + "ofType": null + } + } + } + }, + { + "name": "interfaces", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + } + }, + { + "name": "possibleTypes", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + } + }, + { + "name": "enumValues", + "args": [ + { + "name": "includeDeprecated", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": "false" + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__EnumValue", + "ofType": null + } + } + } + }, + { + "name": "inputFields", + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + } + }, + { + "name": "ofType", + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "__TypeKind", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "SCALAR" + }, + { + "name": "OBJECT" + }, + { + "name": "INTERFACE" + }, + { + "name": "UNION" + }, + { + "name": "ENUM" + }, + { + "name": "INPUT_OBJECT" + }, + { + "name": "LIST" + }, + { + "name": "NON_NULL" + } + ], + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "Boolean", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Field", + "fields": [ + { + "name": "name", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "name": "description", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "args", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue" + } + } + } + } + }, + { + "name": "type", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + { + "name": "isDeprecated", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + }, + { + "name": "deprecationReason", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__InputValue", + "fields": [ + { + "name": "name", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "name": "description", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "type", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + { + "name": "defaultValue", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__EnumValue", + "fields": [ + { + "name": "name", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "name": "description", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "isDeprecated", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + }, + { + "name": "deprecationReason", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Directive", + "fields": [ + { + "name": "name", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "name": "description", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "name": "args", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue" + } + } + } + } + }, + { + "name": "onOperation", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + }, + { + "name": "onFragment", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + }, + { + "name": "onField", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + } + ], + "directives": [ + { + "name": "include", + "args": [ + { + "name": "if", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + } + ], + "onOperation": false, + "onFragment": true, + "onField": true + }, + { + "name": "skip", + "args": [ + { + "name": "if", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + } + ], + "onOperation": false, + "onFragment": true, + "onField": true + } + ] +} \ No newline at end of file diff --git a/src/__tests__/denormalizeStore-tests.js b/src/__tests__/denormalizeStore-tests.js new file mode 100644 index 0000000..154f3bb --- /dev/null +++ b/src/__tests__/denormalizeStore-tests.js @@ -0,0 +1,38 @@ +import test from 'ava'; +import 'babel-register'; +import 'babel-polyfill'; +import {parse} from 'graphql/language/parser'; +import {buildExecutionContext} from '../buildExecutionContext'; +import {unionQueryString, unionResponse, unionStore} from './unionExample'; +import clientSchema from './clientSchema.json'; +import {denormalizeStore} from '../denormalizeStore'; +import {nestedQueryString, nestedResponse, nestedStore, nestedPaginationWords, nestedVariableValues} from './nestedExample'; + +test('denormalize store from union request', t => { + t.plan(1); + const queryAST = parse(unionQueryString, {noLocation: true, noSource: true}); + const unionOptions = { + variableValues: nestedVariableValues, + paginationWords: nestedPaginationWords, + idFieldName: '_id', + store: unionStore + }; + const context = buildExecutionContext(clientSchema, queryAST, unionOptions); + const denormalizedResponse = denormalizeStore(context); + t.same(denormalizedResponse, unionResponse.data); +}); + +test('denormalize store from nested request', t => { + const queryAST = parse(nestedQueryString, {noLocation: true, noSource: true}); + const nestedOptions = { + variableValues: nestedVariableValues, + paginationWords: nestedPaginationWords, + idFieldName: '_id', + store: nestedStore + }; + const context = buildExecutionContext(clientSchema, queryAST, nestedOptions); + const denormalizedResponse = denormalizeStore(context); + t.same(denormalizedResponse, nestedResponse); +}); + +//TOOD test adding __typename and idField to query auto diff --git a/src/__tests__/frontAndBacks.js b/src/__tests__/frontAndBacks.js new file mode 100644 index 0000000..19cfd4f --- /dev/null +++ b/src/__tests__/frontAndBacks.js @@ -0,0 +1,186 @@ +export const back5Response = { + "data": { + "recentPosts": [ + { + "_id": "0176413761b289e6d64c2c14a758c1c7", + "cursor": "2015-07-07T00:00:00.000Z", + "title": "Sharing the Meteor Login State Between Subdomains" + }, + { + "_id": "1bd16dfab1de982317d2ba4382ec8c86", + "cursor": "2015-07-01T00:00:00.000Z", + "title": "Meteor Server Side Rendering Support with FlowRouter and React" + }, + { + "_id": "19085291c89f0d04943093c4ff16b664", + "cursor": "2014-09-08T00:00:00.000Z", + "title": "Awesome Error Tracking Solution for Meteor Apps with Kadira" + }, + { + "_id": "0be4bea0330ccb5ecf781a9f69a64bc8", + "cursor": "2014-06-30T00:00:00.000Z", + "title": "What Should Kadira Build Next?" + }, + { + "_id": "1afff9dfb0b97b5882c72cb60844e034", + "cursor": "2014-06-12T00:00:00.000Z", + "title": "Tracking Meteor CPU Usage with Kadira" + } + ] + } +} + +export const back5Query = ` +query { + recentPosts(count:5 before: "2014-05-27T00:00:00.000Z") { + _id, + cursor, + title + } +}` + +export const front5Response = { + "data": { + "recentPosts": [ + { + "_id": "03390abb5570ce03ae524397d215713b", + "cursor": "2015-09-01T00:00:00.000Z", + "title": "New Feature: Tracking Error Status with Kadira" + }, + { + "_id": "2f6b59fd0b182dc6e2f0051696c70d70", + "cursor": "2015-08-24T00:00:00.000Z", + "title": "Understanding Mean, Histogram and Percentiles" + }, + { + "_id": "3d7a3853bf435c0f00e46e15257a94d9", + "cursor": "2015-07-20T00:00:00.000Z", + "title": "Introducing Kadira Debug, Version 2" + }, + { + "_id": "0176413761b289e6d64c2c14a758c1c7", + "cursor": "2015-07-07T00:00:00.000Z", + "title": "Sharing the Meteor Login State Between Subdomains" + }, + { + "_id": "1bd16dfab1de982317d2ba4382ec8c86", + "cursor": "2015-07-01T00:00:00.000Z", + "title": "Meteor Server Side Rendering Support with FlowRouter and React" + } + ] + } +}; + +export const front5Query = `query { + recentPosts(count:5) { + _id, + cursor, + title + } +}`; + +export const front5Normalized = { + "entities": { + "Post": { + "03390abb5570ce03ae524397d215713b": { + "_id": "03390abb5570ce03ae524397d215713b", + "cursor": "2015-09-01T00:00:00.000Z", + "title": { + "": "New Feature: Tracking Error Status with Kadira" + } + }, + "2f6b59fd0b182dc6e2f0051696c70d70": { + "_id": "2f6b59fd0b182dc6e2f0051696c70d70", + "cursor": "2015-08-24T00:00:00.000Z", + "title": { + "": "Understanding Mean, Histogram and Percentiles" + } + }, + "3d7a3853bf435c0f00e46e15257a94d9": { + "_id": "3d7a3853bf435c0f00e46e15257a94d9", + "cursor": "2015-07-20T00:00:00.000Z", + "title": { + "": "Introducing Kadira Debug, Version 2" + } + }, + "0176413761b289e6d64c2c14a758c1c7": { + "_id": "0176413761b289e6d64c2c14a758c1c7", + "cursor": "2015-07-07T00:00:00.000Z", + "title": { + "": "Sharing the Meteor Login State Between Subdomains" + } + }, + "1bd16dfab1de982317d2ba4382ec8c86": { + "_id": "1bd16dfab1de982317d2ba4382ec8c86", + "cursor": "2015-07-01T00:00:00.000Z", + "title": { + "": "Meteor Server Side Rendering Support with FlowRouter and React" + } + } + } + }, + "result": { + "recentPosts": { + "front": [ + "Post:03390abb5570ce03ae524397d215713b", + "Post:2f6b59fd0b182dc6e2f0051696c70d70", + "Post:3d7a3853bf435c0f00e46e15257a94d9", + "Post:0176413761b289e6d64c2c14a758c1c7", + "Post:1bd16dfab1de982317d2ba4382ec8c86" + ] + } + } +} + +export const back5Normalized = { + "entities": { + "Post": { + "0176413761b289e6d64c2c14a758c1c7": { + "_id": "0176413761b289e6d64c2c14a758c1c7", + "cursor": "2015-07-07T00:00:00.000Z", + "title": { + "": "Sharing the Meteor Login State Between Subdomains" + } + }, + "1bd16dfab1de982317d2ba4382ec8c86": { + "_id": "1bd16dfab1de982317d2ba4382ec8c86", + "cursor": "2015-07-01T00:00:00.000Z", + "title": { + "": "Meteor Server Side Rendering Support with FlowRouter and React" + } + }, + "19085291c89f0d04943093c4ff16b664": { + "_id": "19085291c89f0d04943093c4ff16b664", + "cursor": "2014-09-08T00:00:00.000Z", + "title": { + "": "Awesome Error Tracking Solution for Meteor Apps with Kadira" + } + }, + "0be4bea0330ccb5ecf781a9f69a64bc8": { + "_id": "0be4bea0330ccb5ecf781a9f69a64bc8", + "cursor": "2014-06-30T00:00:00.000Z", + "title": { + "": "What Should Kadira Build Next?" + } + }, + "1afff9dfb0b97b5882c72cb60844e034": { + "_id": "1afff9dfb0b97b5882c72cb60844e034", + "cursor": "2014-06-12T00:00:00.000Z", + "title": { + "": "Tracking Meteor CPU Usage with Kadira" + } + } + } + }, + "result": { + "recentPosts": { + "back": [ + "Post:0176413761b289e6d64c2c14a758c1c7", + "Post:1bd16dfab1de982317d2ba4382ec8c86", + "Post:19085291c89f0d04943093c4ff16b664", + "Post:0be4bea0330ccb5ecf781a9f69a64bc8", + "Post:1afff9dfb0b97b5882c72cb60844e034" + ] + } + } +} diff --git a/src/__tests__/mergeResponseToStore-tests.js b/src/__tests__/mergeResponseToStore-tests.js new file mode 100644 index 0000000..41002ba --- /dev/null +++ b/src/__tests__/mergeResponseToStore-tests.js @@ -0,0 +1,69 @@ +import test from 'ava'; +import 'babel-register'; +import 'babel-polyfill'; +import {mergeDeepWithArrs, mergeArrays, isObject} from '../mergeDeep'; +import {front5Response, front5Query, front5Normalized, back5Response, back5Query, back5Normalized} from './frontAndBacks'; +import clientSchema from './clientSchema.json'; +import {normalizeResponse} from '../normalizeResponse'; + +const target = { + foo: [{id: 1}, {id: 2, a: {a1: 2}}, {id: 3}], + bar: { + d: 4, + e: 5 + } +}; + +const source = { + foo: [{id: 2, a: {a1: 2}}, {id: 3}, {id: 4}], + bar: { + g: 7 + }, + baz: 1 +}; + +const expected = { + foo: [{id: 1}, {id: 2, a: {a1: 2}}, {id: 3}, {id: 4}], + bar: { + d: 4, + e: 5, + g: 7 + }, + baz: 1 +}; + +foo: [Guest:1, Guest:2, Guest:3] +newResponse: [Guest:2, Guest:3, Guest:4] +foo:{ + 1:guest1 + 2:guest2 + +} + + +guests:{ + id1:{guest1}, + id2:{guest2} +} + +top5:{ + 1:{} +} +test('merges plain objects & arrays', t => { + t.plan(1); + const actual = mergeDeepWithArrs(target, source, {mergeArrays}); + t.same(actual, expected); +}); + +test('merge front and back to full', t => { + const queryASTfront = parse(front5Query, {noLocation: true, noSource: true}); + const contextFront = buildExecutionContext(clientSchema, queryASTfront, {idFieldName: '_id'}); + const normalizedResponseFront = normalizeResponse(front5Response.data, contextFront); + const queryASTBack = parse(back5Query, {noLocation: true, noSource: true}); + const contextBack = buildExecutionContext(clientSchema, queryASTBack, {idFieldName: '_id'}); + const normalizedResponseBack = normalizeResponse(back5Response.data, contextBack); + const newState = mergeDeepWithArrs(normalizedResponseFront, normalizedResponseBack, {mergeArrays}); +}); +//TODO merge into full if front & back overlap + +//Maybe back shouldn't be reversed. Instead, we should prepend new things onto it instead of appending. diff --git a/src/__tests__/nestedExample.js b/src/__tests__/nestedExample.js new file mode 100644 index 0000000..0f5b00d --- /dev/null +++ b/src/__tests__/nestedExample.js @@ -0,0 +1,300 @@ +export const nestedResponse = { + "recentPosts": [ + { + "_id": "03390abb5570ce03ae524397d215713b", + "title": "New Feature: Tracking Error Status with Kadira", + "author": { + "_id": "pahan", + "name": "Pahan Sarathchandra", + "twitterHandle": "@pahans" + }, + "comments": [ + { + "_id": "cid-19710666", + "replies": [ + { + "_id": "cid-37250492", + "content": "Thank You!" + }, + { + "_id": "cid-2617133", + "content": "If you need more information, just contact me." + } + ] + }, + { + "_id": "cid-8221034", + "replies": [ + { + "_id": "cid-37250492", + "content": "Thank You!" + }, + { + "_id": "cid-2617133", + "content": "If you need more information, just contact me." + } + ] + } + ] + }, + { + "_id": "2f6b59fd0b182dc6e2f0051696c70d70", + "title": "Understanding Mean, Histogram and Percentiles", + "author": { + "_id": "arunoda", + "name": "Arunoda Susiripala", + "twitterHandle": "@arunoda" + }, + "comments": [ + { + "_id": "cid-19710666", + "replies": [ + { + "_id": "cid-37250492", + "content": "Thank You!" + }, + { + "_id": "cid-2617133", + "content": "If you need more information, just contact me." + } + ] + }, + { + "_id": "cid-8221034", + "replies": [ + { + "_id": "cid-37250492", + "content": "Thank You!" + }, + { + "_id": "cid-2617133", + "content": "If you need more information, just contact me." + } + ] + } + ] + } + ], + "again": [ + { + "_id": "03390abb5570ce03ae524397d215713b", + "title": "New Feature: Tracking Error Status with Kadira", + "author": { + "_id": "pahan", + "name": "Pahan Sarathchandra", + "twitterHandle": "@pahans" + }, + "comments": [ + { + "_id": "cid-19710666", + "content": "This is a very good blog post", + "replies": [ + { + "_id": "cid-37250492", + "content": "Thank You!" + }, + { + "_id": "cid-2617133", + "content": "If you need more information, just contact me." + } + ] + }, + { + "_id": "cid-8221034", + "content": "Keep up the good work", + "replies": [ + { + "_id": "cid-37250492", + "content": "Thank You!" + }, + { + "_id": "cid-2617133", + "content": "If you need more information, just contact me." + } + ] + } + ] + }, + { + "_id": "2f6b59fd0b182dc6e2f0051696c70d70", + "title": "Understanding Mean, Histogram and Percentiles", + "author": { + "_id": "arunoda", + "name": "Arunoda Susiripala", + "twitterHandle": "@arunoda" + }, + "comments": [ + { + "_id": "cid-19710666", + "content": "This is a very good blog post", + "replies": [ + { + "_id": "cid-37250492", + "content": "Thank You!" + }, + { + "_id": "cid-2617133", + "content": "If you need more information, just contact me." + } + ] + }, + { + "_id": "cid-8221034", + "content": "Keep up the good work", + "replies": [ + { + "_id": "cid-37250492", + "content": "Thank You!" + }, + { + "_id": "cid-2617133", + "content": "If you need more information, just contact me." + } + ] + } + ] + } + ] +}; + +export const nestedQueryString = ` +query getPosts($language: String) { + recentPosts(count: 2) { + _id, + title, + author { + ...getAuthor + }, + comments { + _id, + replies { + _id, + content + } + } + }, + again: recentPosts(count: 2) { + _id, + title (language: $language), + author { + ...getAuthor + }, + comments { + _id, + content, + replies { + _id, + content + } + } + } +} + +fragment getAuthor on Author { + ... on Author { + _id + name + twitterHandle + } +}`; + +export const nestedStore = { + "entities": { + "Post": { + "03390abb5570ce03ae524397d215713b": { + "_id": "03390abb5570ce03ae524397d215713b", + "title": { + "": "New Feature: Tracking Error Status with Kadira", + "{\"language\":\"english\"}": "New Feature: Tracking Error Status with Kadira" + }, + "author": "Author:pahan", + "comments": { + "": [ + "Comment:cid-19710666", + "Comment:cid-8221034" + ] + } + }, + "2f6b59fd0b182dc6e2f0051696c70d70": { + "_id": "2f6b59fd0b182dc6e2f0051696c70d70", + "title": { + "": "Understanding Mean, Histogram and Percentiles", + "{\"language\":\"english\"}": "Understanding Mean, Histogram and Percentiles" + }, + "author": "Author:arunoda", + "comments": { + "": [ + "Comment:cid-19710666", + "Comment:cid-8221034" + ] + } + } + }, + "Author": { + "pahan": { + "_id": "pahan", + "name": "Pahan Sarathchandra", + "twitterHandle": "@pahans" + }, + "arunoda": { + "_id": "arunoda", + "name": "Arunoda Susiripala", + "twitterHandle": "@arunoda" + } + }, + "Comment": { + "cid-19710666": { + "_id": "cid-19710666", + "replies": [ + "Comment:cid-37250492", + "Comment:cid-2617133" + ], + "content": "This is a very good blog post" + }, + "cid-37250492": { + "_id": "cid-37250492", + "content": "Thank You!" + }, + "cid-2617133": { + "_id": "cid-2617133", + "content": "If you need more information, just contact me." + }, + "cid-8221034": { + "_id": "cid-8221034", + "replies": [ + "Comment:cid-37250492", + "Comment:cid-2617133" + ], + "content": "Keep up the good work" + } + } + }, + "result": { + "recentPosts": { + "front": [ + "Post:03390abb5570ce03ae524397d215713b", + "Post:2f6b59fd0b182dc6e2f0051696c70d70" + ] + } + }, + denormalizedResults: { + "recentPosts": { + JSON + } + }, + dependencies: { + //recent posts depends on all posts and specific comments + //you depend on normalized end data + "recentPosts":{"comments":[o1,o7,o19], "posts":[postsObject, o1,o2,o3,o4,o5]} + } +}; + + +export const nestedPaginationWords = { + first: 'count' +}; + +export const nestedVariableValues = { + language: 'english' +}; + diff --git a/src/__tests__/normalizeResponse-test.js b/src/__tests__/normalizeResponse-test.js new file mode 100644 index 0000000..21c7e92 --- /dev/null +++ b/src/__tests__/normalizeResponse-test.js @@ -0,0 +1,52 @@ +import test from 'ava'; +import 'babel-register'; +import 'babel-polyfill'; +import '../normalizeResponse'; +import {unionQueryString, unionResponse, unionStore} from './unionExample'; +import clientSchema from './clientSchema.json'; +import {normalizeResponse} from '../normalizeResponse'; +import {parse} from 'graphql/language/parser'; +import {buildExecutionContext} from '../buildExecutionContext'; +import {nestedQueryString, nestedResponse, nestedStore, nestedPaginationWords, nestedVariableValues} from './nestedExample'; +import {front5Response, front5Query, front5Normalized, back5Response, back5Query, back5Normalized} from './frontAndBacks'; + +test('normalizes unions', t => { + t.plan(1); + const queryAST = parse(unionQueryString, {noLocation: true, noSource: true}); + const context = buildExecutionContext(clientSchema, queryAST, {idFieldName: '_id'}); + const normalizedResponse = normalizeResponse(unionResponse.data, context); + t.same(normalizedResponse, unionStore); +}); + +test('normalizes nests with pagination words and variables', t => { + t.plan(1); + const queryAST = parse(nestedQueryString, {noLocation: true, noSource: true}); + const nestedOptions = { + variableValues: nestedVariableValues, + paginationWords: nestedPaginationWords, + idFieldName: '_id' + }; + const context = buildExecutionContext(clientSchema, queryAST, nestedOptions); + const normalizedResponse = normalizeResponse(nestedResponse, context); + t.same(normalizedResponse, nestedStore); +}); + +test('normalize front 5', t => { + t.plan(1); + const queryAST = parse(front5Query, {noLocation: true, noSource: true}); + const context = buildExecutionContext(clientSchema, queryAST, {idFieldName: '_id'}); + const normalizedResponse = normalizeResponse(front5Response.data, context); + t.same(normalizedResponse, front5Normalized); +}); + +test('normalize back 5', t => { + t.plan(1); + const queryAST = parse(back5Query, {noLocation: true, noSource: true}); + const context = buildExecutionContext(clientSchema, queryAST, {idFieldName: '_id'}); + const normalizedResponse = normalizeResponse(back5Response.data, context); + console.log(JSON.stringify(normalizedResponse, null, 2)); + t.same(normalizedResponse, back5Normalized); +}); + +//TODO test adding to pagination request +//TODO merge front and back when possible diff --git a/src/__tests__/unionExample.js b/src/__tests__/unionExample.js new file mode 100644 index 0000000..cc71db4 --- /dev/null +++ b/src/__tests__/unionExample.js @@ -0,0 +1,125 @@ +export const unionQueryString = ` +query { + getGroup(_id: "allEmployees") { + _id + owner { + __typename + ... on Author { + _id + name + twitterHandle + } + } + members { + __typename + ... on Author { + _id + name + } + ... on Group { + _id + members { + __typename + ... on Author { + _id + name + } + } + } + } + } +}` + +export const unionResponse = { + "data": { + "getGroup": { + "_id": "allEmployees", + "owner": { + "__typename": "Author", + "_id": "arunoda", + "name": "Arunoda Susiripala", + "twitterHandle": "@arunoda" + }, + "members": [ + { + "__typename": "Author", + "_id": "indi", + "name": "Kasun Indi" + }, + { + "__typename": "Group", + "_id": "executiveTeam", + "members": [ + { + "__typename": "Author", + "_id": "arunoda", + "name": "Arunoda Susiripala" + }, + { + "__typename": "Author", + "_id": "pahan", + "name": "Pahan Sarathchandra" + } + ] + } + ] + } + } +} + + +export const unionStore ={ + entities: { + Group: { + allEmployees: { + _id: 'allEmployees', + owner: 'Author:arunoda', + members: ['Author:indi', 'Group:executiveTeam'] + }, + executiveTeam: { + "_id": "executiveTeam", + "members": ['Author:arunoda', 'Author:pahan'] + } + }, + Author: { + arunoda: { + "_id": "arunoda", + "name": "Arunoda Susiripala", + "twitterHandle": "@arunoda" + }, + indi: { + "_id": "indi", + "name": "Kasun Indi" + }, + pahan: { + "_id": "pahan", + "name": "Pahan Sarathchandra" + } + }, + }, + result: { + getGroup: { + '{"_id":"allEmployees"}': 'Group:allEmployees' + } + } +} + +//desiredResult = { +// beds: { +// guest: 'martin' +// } +//} +// +// +//beds: { +// guest: null +//} +//} +// +//beds && beds.guest +//beds : null +// +//beds:[ +// {id:1, guest:null}, +// {id:2, guest: {'Guest:123'} +//] diff --git a/src/__tests__/updateSchema.js b/src/__tests__/updateSchema.js new file mode 100644 index 0000000..4a919a1 --- /dev/null +++ b/src/__tests__/updateSchema.js @@ -0,0 +1,84 @@ +require('babel-register'); + +const path = require('path'); +const rootSchema = require('./schema'); +const fs = require('fs'); +const graphql = require('graphql').graphql; +const introspectionQuery = ` +query IntrospectionQuery { + __schema { + queryType { name } + mutationType { name } + subscriptionType { name } + types { + ...FullType + } + directives { + name + args { + ...InputValue + } + onOperation + onFragment + onField + } + } +} +fragment FullType on __Type { + kind + name + fields(includeDeprecated: false) { + name + args { + ...InputValue + } + type { + ...TypeRef + } + } + inputFields { + ...InputValue + } + interfaces { + ...TypeRef + } + enumValues(includeDeprecated: false) { + name + } + possibleTypes { + ...TypeRef + } +} +fragment InputValue on __InputValue { + name + type { ...TypeRef } + defaultValue +} +fragment TypeRef on __Type { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + } + } + } +}`; + +graphql(rootSchema, introspectionQuery).then(result => { + if (result.errors) { + console.log(result.errors) + } else { + try { + fs.writeFileSync(path.join(__dirname, 'src/clientSchema.json'), JSON.stringify(result.data.__schema, null, 2)); + }catch(e) { + console.log(e) + } + } +}); diff --git a/src/babel-plugin/index.js b/src/babel-plugin/index.js new file mode 100644 index 0000000..e379d67 --- /dev/null +++ b/src/babel-plugin/index.js @@ -0,0 +1,37 @@ +import {transform} from './transform'; +import {parse} from 'graphql/language/parser'; +import {print} from 'graphql/language/printer'; +import {makeNormalSchema} from './makeNormalSchema'; + +const processQueryString = node => { + if (node.quasis.length > 1) { + throw new Error('string interpolation is not currently supported'); + } else { + return node.quasis[0].value.cooked; + } +} + +const querySchema = (schema, t, path) => { + const queryString = processQueryString(path.node.quasi); + const doc = parse(queryString, {noLocation: true, noSource: true}); + const cashaySchema = makeNormalSchema(doc, schema); + const prettyQuery = print(doc); + return t.objectExpression([ + t.objectProperty(t.stringLiteral('schema'), transform(cashaySchema, {t, path})), + t.objectProperty(t.stringLiteral('string'), t.stringLiteral(prettyQuery)) + ]); +} + +const createPlugin = schema => { + return ({types: t}) => ({ + visitor: { + TaggedTemplateExpression(path) { + if (t.isIdentifier(path.node.tag, {name: 'CashayQL'})) { + path.replaceWith(querySchema(schema, t, path)); + } + } + } + }); +} + +module.exports = createPlugin; diff --git a/src/babel-plugin/makeNormalSchema.js b/src/babel-plugin/makeNormalSchema.js new file mode 100644 index 0000000..17816c6 --- /dev/null +++ b/src/babel-plugin/makeNormalSchema.js @@ -0,0 +1,150 @@ +import {visit, QueryDocumentKeys} from 'graphql/language/visitor'; +import {TypeKind} from 'graphql/type/introspection'; +import {Schema, arrayOf, unionOf} from 'normalizr'; +import {getNestedSchema} from '../utils'; + +const {OperationDefinition, Document} = QueryDocumentKeys; +const {UNION, LIST, OBJECT} = TypeKind; + +class CashaySchema { + constructor() { + this.__argDict = {}; + } + + define(obj) { + Object.assign(this, obj); + } +} + +const paginationWords = ['before', 'after', 'first', 'last']; +const getRootTypeName = type => { + while (type.ofType) type = type.ofType; + return type.name; +}; + +const separateArgs = (schemaArgs, suppliedArgs) => { + const regularArgs = {}; + const paginationArgs = {}; + schemaArgs + .sort((a, b) => a.name < b.name) + .forEach(arg => { + const argObject = paginationWords.some(word => arg.name !== word) ? regularArgs : paginationArgs; + const suppliedArg = suppliedArgs.find(suppliedArg => suppliedArg.name.value === arg.name); + if (suppliedArg) { + const hardcodedVal = suppliedArg.value.value; + const argValue = hardcodedVal || `$${suppliedArg.value.name.value}`; + argObject[arg.name] = argValue; + } + }); + const {before, after, first, last} = paginationArgs; + if (before && after) { + console.warn(`You cannot include a before and after cursor. The before cursor will be ignored`); + delete paginationArgs.before; + } + if (first && last) { + let toDelete = paginationArgs.after ? 'last' : 'first'; + console.warn(`You cannot include a first and last limit. The ${toDelete} limit will be ignored`); + delete paginationArgs[toDelete]; + } + + return {regularArgs, paginationArgs}; +}; + +const getNormalizrValue = (schema, {args, type}, suppliedArgs) => { + const {kind, ofType, name} = type; + const {regularArgs, paginationArgs} = separateArgs(args, suppliedArgs); + if (kind === OBJECT) { + const childType = schema.types.find(field => field.name === name); + const isEntity = childType.fields.some(field => field.name === 'id'); + if (isEntity) { + const newEntity = new Schema(name); + return Object.assign(newEntity, { + __args: Object.keys(regularArgs).length ? {...regularArgs} : undefined + }) + } else { + //TODO handle objects w/o ids + } + } else { + let normalizingFn; + if (kind === LIST) { + normalizingFn = arrayOf; + } else if (kind === UNION) { + normalizingFn = unionOf; + } + if (normalizingFn) { + //TODO handle getting the right ofType + const arrOrUnion = normalizingFn(new Schema(ofType.name)); + return Object.assign(arrOrUnion, { + __args: Object.keys(regularArgs).length ? {...regularArgs} : undefined, + __paginationArgs: Object.keys(paginationArgs).length ? {...paginationArgs} : undefined + }); + } + } +}; + +const addArgsToDict = (fieldValue, cashaySchema, stack) => { + const argFields = ['__args', '__paginationArgs']; + argFields.forEach(argField => { + const args = fieldValue[argField]; + if (!args) return; + Object.keys(args).forEach(arg => { + const argVal = args[arg]; + if (argVal[0] === '$') { + cashaySchema.__argDict[argVal.substring(1)] = `${stack.join('.')}.${argField}.${arg}`; + } + }) + }) +}; + +export const makeNormalSchema = (doc, schema) => { + schema = schema.data.__schema; + const cashaySchema = new CashaySchema(); + const stack = []; + let operationSchema; + + visit(doc, { + Document(node) { + if (node.definitions.length > 1) { + console.error('Multiple operations not supported (yet?)'); + } + }, + OperationDefinition(node){ + const operationKey = `${node.operation}Type`; + const operationName = schema[operationKey].name; + if (!operationName) { + console.error(`${operationKey} does not exist in your schema! Try queryType, mutationType, or subscriptionType`) + } + operationSchema = schema.types.find(type => type.name === operationName); + }, + Field: { + enter(node) { + const parentEntity = getNestedSchema(cashaySchema, stack); + if (node.selectionSet) { + const fieldKey = node.name.value; + stack.push(fieldKey); + let childField = operationSchema.fields.find(field => field.name === fieldKey); //check inside rootQuery + if (!childField) { /* Is it not a query? */ + const parentTypeName = parentEntity.getKey(); + const parentType = schema.types.find(field => field.name === parentTypeName); + childField = parentType.fields.find(field => field.name === fieldKey); + } + const fieldValue = getNormalizrValue(schema, childField, node.arguments); + if (fieldValue) { + parentEntity.define({[fieldKey]: fieldValue}); + addArgsToDict(fieldValue, cashaySchema, stack); + } + } else { + parentEntity[node.name.value] = true; + } + }, + leave(node) { + if (node.selectionSet) { + stack.pop(); + } + } + } + }); + console.log('cashaySchema', cashaySchema); + return cashaySchema; +}; + diff --git a/src/babel-plugin/transform.js b/src/babel-plugin/transform.js new file mode 100644 index 0000000..a7f8dab --- /dev/null +++ b/src/babel-plugin/transform.js @@ -0,0 +1,53 @@ +const transformObjectSchema = (schema, {t, path}) => { + const queries = Object.keys(schema).filter(key => key[0] !== '_'); + return t.objectExpression(queries.map(query => { + return t.objectProperty( + t.identifier(query), + transform(schema[query], {t, path}) + ); + })) +}; + +const transformEntitySchema = (schema, {t,path}) => { + return t.callExpression( + t.memberExpression(path.node.tag, t.identifier('schema')), + [t.stringLiteral(schema.getKey()), transformObjectSchema(schema, {t, path})] + ); +}; + +const transformArraySchema = (schema, {t,path}) => { + return t.callExpression( + t.memberExpression(path.node.tag, t.identifier('arrayOf')), + [transform(schema.getItemSchema(), {t, path})] + ); +}; + +const transformUnionSchema = (schema, {t,path}) => { + return t.callExpression( + t.memberExpression(path.node.tag, t.identifier('unionOf')), + [ + transform(schema.getItemSchema(), {t, path}), + t.objectExpression([ + t.objectProperty( + t.identifier('schemaAttribute'), + t.stringLiteral('__typename') + ) + ]) + ] + ); +}; + +export const transform = (schema, context) => { + switch (schema.constructor.name) { + case 'NormalizrSchema': + return transformObjectSchema(schema, context); + case 'EntitySchema': + return transformEntitySchema(schema, context); + case 'ArraySchema': + return transformArraySchema(schema, context); + case 'UnionSchema': + return transformUnionSchema(schema, context); + default: + throw new Error(`How the heck did you make a ${schema.constructor.name}?`); + } +}; diff --git a/src/buildExecutionContext.js b/src/buildExecutionContext.js new file mode 100644 index 0000000..a178b21 --- /dev/null +++ b/src/buildExecutionContext.js @@ -0,0 +1,37 @@ +import {OPERATION_DEFINITION, FRAGMENT_DEFINITION} from 'graphql/language/kinds'; + +export const defaultPaginationWords = { + before: 'before', + after: 'after', + first: 'first', + last: 'last' +}; + +export const buildExecutionContext = (schema, documentAST, options) => { + let operation; + const fragments = documentAST.definitions.reduce((reduction, definition) => { + if (definition.kind === OPERATION_DEFINITION) { + if (operation) { + console.error('Multiple operations not supported'); + } + operation = definition; + } else if (definition.kind === FRAGMENT_DEFINITION) { + reduction[definition.name.value] = definition; + } + return reduction; + }, {}); + if (!operation) { + console.error('Must provide an operation.'); + } + // TODO: Open to PR for defaultValue. Useful if someone called the same query with & without it delcaring it + return { + schema, + fragments, + operation, + paginationWords: Object.assign(defaultPaginationWords, options.paginationWords), + variableValues: options.variableValues, + idFieldName: options.idFieldName || 'id', + store: options.store + }; +}; + diff --git a/src/denormalizeStore.js b/src/denormalizeStore.js new file mode 100644 index 0000000..eb8f0a2 --- /dev/null +++ b/src/denormalizeStore.js @@ -0,0 +1,124 @@ +import {TypeKind} from 'graphql/type/introspection'; +import {buildExecutionContext} from './buildExecutionContext'; +import {mergeDeepWithArrs, mergeArrays, isObject} from './mergeDeep'; +import {separateArgs} from './separateArgs'; +import {FRAGMENT_SPREAD, INLINE_FRAGMENT} from 'graphql/language/kinds'; +import {ensureRootType, getRegularArgsKey} from './utils'; +const {UNION, INTERFACE, LIST, OBJECT, NON_NULL, SCALAR} = TypeKind; + +const getFieldState = (fieldState, regularArgs, paginationArgs) => { + if (regularArgs) { + const regularArgsString = getRegularArgsKey(regularArgs); + fieldState = fieldState[regularArgsString]; + } + if (paginationArgs) { + const {before, after, first, last} = paginationArgs; + let usefulArray = fieldState.full; + let isReverse = false; + if (usefulArray) { // if we have all the docs + isReverse = !!last; //if we're getting stuff in reverse + } else { // if we only have some of the docs + usefulArray = last ? fieldState.back : fieldState.front; + } + if (!usefulArray) { + console.log('no local data') + } + const cursor = before || after; + let cursorIdx = -1; + if (cursor) { + cursorIdx = usefulArray.find(doc => { + const [typeName, docId] = doc.split(':'); + const storedDoc = store.entities[typeName][docId]; + return storedDoc.cursor === cursor + }); + if (!cursorIdx) { + console.error('invalid cursor'); + } + } + if (isReverse) { + const minIdx = Math.max(0, cursorIdx + 1 - last); + fieldState = usefulArray.slice(minIdx, minIdx + last); + } else { + const limit = first || last; //separateArgs ensures at least 1 exists + const maxIdx = cursorIdx + 1 + limit; + if (usefulArray.length < maxIdx) { + console.log('not enough data, need to fetch more'); + } + fieldState = usefulArray.slice(cursorIdx + 1, cursorIdx + 1 + limit); + } + } + return fieldState; +}; + +const visitObject = (subState, reqAST, subSchema, context, baseReduction = {}) => { + return reqAST.selectionSet.selections.reduce((reduction, field) => { + if (field.kind === INLINE_FRAGMENT) { + if (field.typeCondition.name.value === subSchema.name) { + visitObject(subState, field, subSchema, context, reduction); + } + } else if (field.kind === FRAGMENT_SPREAD) { + const fragment = context.fragments[field.name.value]; + visitObject(subState, fragment, subSchema, context, reduction); + } else if (field.name.value === '__typename') { + reduction.__typename = subSchema.name; + } else { + const fieldName = field.name.value; + const aliasOrFieldName = field.alias && field.alias.value || fieldName; + const fieldSchema = subSchema.fields.find(field => field.name === fieldName); + let fieldState = subState[fieldName]; + if (fieldSchema.args && fieldSchema.args.length) { + const {regularArgs, paginationArgs} = separateArgs(fieldSchema, field.arguments, context); + fieldState = getFieldState(fieldState, regularArgs, paginationArgs); + } + reduction[aliasOrFieldName] = visit(fieldState, field, fieldSchema, context); + } + return reduction + }, baseReduction); +}; + +const visitNormalizedString = (subState, reqAST, subSchema, context) => { + const [typeName, docId] = subState.split(':'); + const doc = context.store.entities[typeName][docId]; + const fieldSchema = context.schema.types.find(type => type.name === typeName); + return visit(doc, reqAST, fieldSchema, context); +}; + +const visitIterable = (subState, reqAST, subSchema, context) => { + const fieldType = ensureRootType(subSchema.type); + const fieldSchema = context.schema.types.find(type => type.name === fieldType.name); + return subState.map(res => visit(res, reqAST, fieldSchema, context)); +}; + +const visit = (subState, reqAST, subSchema, context) => { + const objectType = subSchema.kind ? subSchema.kind : subSchema.type.kind; + + switch (objectType) { + case OBJECT: + if (typeof subState === 'string') { + return visitNormalizedString(subState, reqAST, subSchema, context); + } + return visitObject(subState, reqAST, subSchema, context); + case UNION: + return visitNormalizedString(subState, reqAST, subSchema, context); + case LIST: + return visitIterable(subState, reqAST, subSchema, context); + default: + return subState + } +}; + +export const denormalizeStore = context => { + const operationType = `${context.operation.operation}Type`; + const operationSchema = context.schema.types.find(type => type.name === context.schema[operationType].name); + const queryReduction = context.operation.selectionSet.selections.reduce((reduction, selection) => { + const queryName = selection.name.value; + const aliasOrName = selection.alias && selection.alias.value || queryName; + const subSchema = operationSchema.fields.find(field => field.name === queryName); + const {regularArgs, paginationArgs} = separateArgs(subSchema, selection.arguments, context); + const fieldState = getFieldState(context.store.result[queryName], regularArgs, paginationArgs); + reduction[aliasOrName] = visit(fieldState, selection, subSchema, context); + return reduction + }, {}); + console.log('FINAL', queryReduction); + return queryReduction; +}; diff --git a/src/duck.js b/src/duck.js new file mode 100644 index 0000000..4d4f87a --- /dev/null +++ b/src/duck.js @@ -0,0 +1,37 @@ +import {Map, List, fromJS} from 'immutable'; + +export const FETCH_DATA_REQUEST = '@@cashay/FETCH_DATA_REQUEST'; +export const FETCH_DATA_SUCCESS = '@@cashay/FETCH_DATA_SUCCESS'; +export const FETCH_DATA_ERROR = '@@cashay/FETCH_DATA_ERROR'; + +const initialState = Map({ + error: Map(), + isFetching: false, + data: Map({ + entities: Map(), + result: Map() + }) +}); + +export const reducer = (state = initialState, action) => { + switch (action.type) { + case FETCH_DATA_REQUEST: + return state.merge({ + isFetching: true + }); + case FETCH_DATA_SUCCESS: + return state.merge({ + isFetching: false, + data: action.payload + }); + case FETCH_DATA_ERROR: + return state.merge({ + isFetching: false, + error: action.error + }); + default: + return state; + } +}; + + diff --git a/src/getSubReqAST.js b/src/getSubReqAST.js new file mode 100644 index 0000000..4d3a0b2 --- /dev/null +++ b/src/getSubReqAST.js @@ -0,0 +1,19 @@ +import {FRAGMENT_SPREAD, INLINE_FRAGMENT} from 'graphql/language/kinds'; + +export const getSubReqAST = (key, reqAST, fragments) => { + let subReqAST; + for (let i = 0; i < reqAST.selectionSet.selections.length; i++) { + const selection = reqAST.selectionSet.selections[i]; + if (selection.kind === FRAGMENT_SPREAD) { + subReqAST = getSubReqAST(key, fragments[selection.name.value], fragments); + } else if (selection.kind === INLINE_FRAGMENT) { + subReqAST = getSubReqAST(key, selection, fragments); + if (subReqAST) return subReqAST; + } else if (selection.alias && selection.alias.value === key || selection.name.value === key) { + subReqAST = selection; + } + if (subReqAST) { + return subReqAST; + } + } +}; diff --git a/src/index.js b/src/index.js new file mode 100644 index 0000000..9816c26 --- /dev/null +++ b/src/index.js @@ -0,0 +1,11 @@ +import {reducer} from './duck'; +import _CashayQL from './CashayQL'; +import _Cashay from './Cashay'; + +export const Cashay = _Cashay; +export const CashayQL = _CashayQL; +export const cashayReducer = reducer; + + + + diff --git a/src/mergeDeep.js b/src/mergeDeep.js new file mode 100644 index 0000000..d67bd03 --- /dev/null +++ b/src/mergeDeep.js @@ -0,0 +1,67 @@ +export const isObject = (val) => val && typeof val === 'object'; + +//TODO this should take a front or back to know whether to append or prepend +export const mergeArrays = (target, src, shouldPrepend) => { + //check for overlap in docs, intelligently append keys + //const primaryKey = src[0].cursor ? 'cursor' : 'id'; + if (shouldPrepend) { + const maxTraversals = Math.min(target.length, src.length); + let arrayFront = src; + for (let i = 0; i < maxTraversals; i++) { + if (target[i] === src[src.length - 1]) { + arrayFront = src.slice(0, i); + break; + } + } + } else { + const maxTraversals = Math.max(0, target.length - src.length); + let arrayFront = target; + for (let i = target.length - 1; i >= maxTraversals; i--) { + if (target[i] === src[0]) { + //if (target[i][primaryKey] === src[0][primaryKey]) { + arrayFront = target.slice(0, i); + break; + //TODO verify afterFields are equal? only if we do janky mutations + } + } + } + return arrayFront.concat(src); +}; + +export const mergeDeepWithArrs = (target, src, {mergeArrays}, shouldPrepend) => { + const srcIsArr = Array.isArray(src); + if (srcIsArr) { + const targetIsArr = Array.isArray(target); + if (targetIsArr) { + target = mergeArrays(target, src, shouldPrepend); + } else { + //target is obj or scalar, src replaces it + target = src; + } + } else { + Object.keys(src).forEach(key => { + if (isObject(target[key]) && isObject(src[key])) { + target[key] = mergeDeepWithArrs(target[key], src[key], {mergeArrays}, key === 'back'); + } else { + target[key] = src[key]; + } + }); + if (Array.isArray(target.front) && Array.isArray(target.back)) { + debugger + // make sure the arrays changed (performance only) + if (Array.isArray(src.front) || Array.isArray(src.back)) { + const minFromBack = target.back[target.back.length - 1]; + const maxTraversals = Math.max(0, target.back.length - target.front.length); + for (let i = target.front.length - 1; i >= maxTraversals; i--) { + if (minFromBack === target.front[i]) { + target.full = target.front.slice(0, i).concat(target.back.reverse()); + delete target.front; + delete target.back; + break; + } + } + } + } + } + return target; +}; diff --git a/src/normalizeResponse.js b/src/normalizeResponse.js new file mode 100644 index 0000000..64e27f1 --- /dev/null +++ b/src/normalizeResponse.js @@ -0,0 +1,124 @@ +import {mergeDeepWithArrs, mergeArrays, isObject} from './mergeDeep'; +import {separateArgs} from './separateArgs'; +import {getSubReqAST} from './getSubReqAST'; +import {ensureRootType, getRegularArgsKey} from './utils'; +import {TypeKind} from 'graphql/type/introspection'; +const {UNION, INTERFACE, LIST, OBJECT, NON_NULL, SCALAR} = TypeKind; + +const mapResponseToResult = (nestedResult, response, regularArgs, paginationArgs) => { + const regularArgsString = getRegularArgsKey(regularArgs); + if (paginationArgs) { + const paginationObj = {}; + const {before, after, first, last} = paginationArgs; + //if (before) { + // paginationObj.before = before; + //} else if (after) { + // paginationObj.after = after; + //} + const arrName = first ? 'front' : last ? 'back' : 'full'; + paginationObj[arrName] = response; + response = paginationObj; + } + if (regularArgs === false) { + return response; + } else { + const resultObj = {[regularArgsString]: response}; + if (isObject(nestedResult) && !Array.isArray(nestedResult)) { + // Not sure if I need recursive merging, but playing it safe + const mutatedNestedResult = mergeDeepWithArrs(nestedResult, resultObj, {mergeArrays}); + return mutatedNestedResult; + } else { + return resultObj + } + } +}; + +const visitObject = (bag, subResponse, reqAST, subSchema, context) => { + return Object.keys(subResponse).reduce((reduction, key) => { + if (key.startsWith('__')) return reduction; + let subReqAST = getSubReqAST(key, reqAST, context.fragments); + const name = subReqAST.name.value; + const field = subSchema.fields.find(field => field.name === name); + let fieldType = ensureRootType(field.type); + let fieldSchema = context.schema.types.find(type => type.name === fieldType.name); + //debugger + // handle first recursion where things are stored in the query + fieldSchema = fieldSchema || subSchema.types.find(type => type.name === fieldType.name); + const normalizedResponse = visit(bag, subResponse[key], subReqAST, fieldSchema, context); + if (field.args && field.args.length) { + const {regularArgs, paginationArgs} = separateArgs(field, subReqAST.arguments, context); + reduction[name] = mapResponseToResult(reduction[name], normalizedResponse, regularArgs, paginationArgs); + } else { + reduction[name] = normalizedResponse; + } + return reduction; + }, {}) +}; +const visitEntity = (bag, subResponse, reqAST, subSchema, context, id) => { + const entityKey = subSchema.name; + bag[entityKey] = bag[entityKey] || {}; + bag[entityKey][id] = bag[entityKey][id] || {}; + let stored = bag[entityKey][id]; + let normalized = visitObject(bag, subResponse, reqAST, subSchema, context); + mergeDeepWithArrs(stored, normalized, {mergeArrays}); + return `${entityKey}:${id}`; +}; + +const visitIterable = (bag, subResponse, reqAST, subSchema, context) => { + return subResponse.map(res => visit(bag, res, reqAST, subSchema, context)); +}; + +const visitUnion = (bag, subResponse, reqAST, subSchema, context) => { + const concreteSubScema = context.schema.types.find(type => type.name === subResponse.__typename); + return visit(bag, subResponse, reqAST, concreteSubScema, context); +}; + +const visit = (bag, subResponse, reqAST, subSchema, context) => { + if (!isObject(subResponse)) { + return subResponse; + } else if (Array.isArray(subResponse)) { + return visitIterable(bag, subResponse, reqAST, subSchema, context); + } else if (subSchema.kind === UNION) { + return visitUnion(bag, subResponse, reqAST, subSchema, context); + } else { + const isEntity = !!subSchema.fields.find(field => field.name === context.idFieldName); + if (isEntity) { + const id = subResponse[context.idFieldName]; + if (id) { + return visitEntity(bag, subResponse, reqAST, subSchema, context, id); + } + console.warn(`Cashay: Cannot normalize ${subSchema.name}. Did not receive '${context.idFieldName}' field.`) + } + return visitObject(bag, subResponse, reqAST, subSchema, context); + } +}; + +export const normalizeResponse = (response, context) => { + let bag = {}; + const operationType = `${context.operation.operation}Type`; + const operationSchema = context.schema.types.find(type => type.name === context.schema[operationType].name); + const result = visit(bag, response, context.operation, operationSchema, context); + const data = { + entities: bag, + result + }; + return data +}; + +//window.imTarget = im.fromJS(window.target); +//debugger +//window.merged = window.imTarget.mergeDeepWith((prev, next, key) => { +// const aa = key; +// if (Array.isArray(next) && List.isList(prev)) { +// const primaryKey = next[0].cursor ? 'cursor' : 'id'; +// const maxTraversals = Math.max(0, prev.size - next.length); +// let arrayFront = prev; +// for (let i = prev.size -1; i >= maxTraversals; i--) { +// if (prev.getIn([i,primaryKey]) === src[0][primaryKey]) { +// arrayFront = prev.slice(0,i); +// break; +// } +// } +// return arrayFront.concat(src); +// } +//}, window.source) diff --git a/src/separateArgs.js b/src/separateArgs.js new file mode 100644 index 0000000..e0e9ab8 --- /dev/null +++ b/src/separateArgs.js @@ -0,0 +1,64 @@ +import {TypeKind} from 'graphql/type/introspection'; +const {LIST} = TypeKind; +import {ensureTypeFromNonNull} from './utils'; + +const getSuppliedArgs = (args, variableValues = {}, paginationWords) => { + const regularArgs = {}; + const paginationArgs = {}; + args + .sort((a, b) => a.name.value < b.name.value) + .forEach(arg => { + const argName = arg.name.value; + let argValue = arg.value.value || variableValues[argName]; + if (!argValue) return; + let paginationMeaning = Object.keys(paginationWords).find(pageWord => paginationWords[pageWord] === argName); + if (paginationMeaning) { + if (paginationMeaning === 'first' || paginationMeaning === 'last') { + argValue = parseInt(argValue); + if (paginationMeaning === 'first') { + if (paginationWords.first === paginationWords.last && args.find(arg => arg.name.value === 'before')) { + paginationMeaning = 'last'; + } + } + } + + paginationArgs[paginationMeaning] = argValue; + } else { + regularArgs[argName] = argValue; + } + }); + const {before, after, first, last} = paginationArgs; + if (before && !last || after && !first || before && first || after && last || before && after || first && last) { + console.error('Pagination options are: `before, last` `after, first`, `first`, and `last`'); + } + return {regularArgs, paginationArgs}; +}; + +const getPossibleArgs = (schema, paginationWords) => { + if (!schema.args) return {}; + let acceptsRegularArgs = false; + let acceptsPaginationArgs = false; + const paginationWordSet = Object.keys(paginationWords) + .reduce((reduction, key) => reduction.add(paginationWords[key]), new Set()); + schema.args.forEach(arg => { + if (paginationWordSet.has(arg.name)) { + acceptsPaginationArgs = true; + } else { + acceptsRegularArgs = true; + } + }); + return {acceptsRegularArgs, acceptsPaginationArgs}; +}; + +export const separateArgs = (fieldSchema, reqASTArgs, {paginationWords, variableValues}) => { + const responseType = ensureTypeFromNonNull(fieldSchema.type); + const {acceptsRegularArgs, acceptsPaginationArgs} = getPossibleArgs(fieldSchema, paginationWords); + let {regularArgs, paginationArgs} = getSuppliedArgs(reqASTArgs, variableValues, paginationWords); + regularArgs = acceptsRegularArgs && regularArgs; + paginationArgs = acceptsPaginationArgs && paginationArgs; + if (paginationArgs && responseType.kind !== LIST) { + console.warn(`${responseType} is not a List. Pagination args ignored`); + paginationArgs = false; + } + return {regularArgs, paginationArgs} +}; diff --git a/src/utils.js b/src/utils.js new file mode 100644 index 0000000..dd83843 --- /dev/null +++ b/src/utils.js @@ -0,0 +1,21 @@ +import {TypeKind} from 'graphql/type/introspection'; +const {NON_NULL} = TypeKind; + +export const getNestedSchema = (obj, stack) => { + return stack.reduce((reduction, level) => { + const nextLevel = reduction[level]; + return nextLevel.getItemSchema ? nextLevel.getItemSchema() : nextLevel; + }, obj); +}; + +export const ensureTypeFromNonNull = type => type.kind === NON_NULL ? type.ofType : type; + +//const ensureTypeFromList = type => type.kind === LIST ? ensureTypeFromNonNull(type.ofType) : type; +export const ensureRootType = type => { + while (type.ofType) type = type.ofType; + return type; +}; + +export const getRegularArgsKey = regularArgs => { + return regularArgs && (Object.keys(regularArgs).length ? JSON.stringify(regularArgs) : ''); +}; diff --git a/ssh_to_https.sh b/ssh_to_https.sh new file mode 100755 index 0000000..8c6e437 --- /dev/null +++ b/ssh_to_https.sh @@ -0,0 +1,41 @@ +#/bin/bash +#-- Script to automate https://help.github.com/articles/why-is-git-always-asking-for-my-password +# Forked from the original to do the opposite: Switch ssh repo urls to https +# Original here: https://gist.github.com/m14t/3056747 +# Thanks to @m14t + +#origin or upstream +REMOTE=${1-origin} + +REPO_URL=`git remote -v | grep -m1 "^$REMOTE" | sed -Ene's#.*(git@github.com:[^[:space:]]*).*#\1#p'` +if [ -z "$REPO_URL" ]; then + echo "-- ERROR: Could not identify Repo url." + echo " It is possible this repo is already using HTTPS instead of SSH." + exit +fi + +USER=`echo $REPO_URL | sed -Ene's#git@github.com:([^/]*)/(.*).git#\1#p'` +if [ -z "$USER" ]; then + echo "-- ERROR: Could not identify User." + exit +fi + +REPO=`echo $REPO_URL | sed -Ene's#git@github.com:([^/]*)/(.*).git#\2#p'` +if [ -z "$REPO" ]; then + echo "-- ERROR: Could not identify Repo." + exit +fi + +#NEW_URL="git@github.com:$USER/$REPO.git" +NEW_URL="https://github.com/$USER/$REPO.git" +echo "Changing repo url from " +echo " '$REPO_URL'" +echo " to " +echo " '$NEW_URL'" +echo "" + +CHANGE_CMD="git remote set-url $REMOTE $NEW_URL" +echo "$CHANGE_CMD" +`$CHANGE_CMD` + +echo "Success"