Replaced tests with jest

This commit is contained in:
2022-02-20 20:10:08 +00:00
parent f2994fc52e
commit ca11cc8963
23 changed files with 7595 additions and 1019 deletions

119
.gitignore vendored
View File

@@ -1,23 +1,106 @@
#directories # Logs
logs/** logs
/**/coverage *.log
/**/node_modules npm-debug.log*
/**/dl yarn-debug.log*
yarn-error.log*
lerna-debug.log*
#passwords # Diagnostic reports (https://nodejs.org/api/report.html)
/**/*.pwd report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
auth.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env .env
.env.test
#log files # parcel-bundler cache (https://parceljs.org/)
/**/*.log .cache
#output files # Next.js build output
public/*.html .next
public/*.js
public/*.css
public/*.map
public/*.gz
# Non npm lock files # Nuxt.js build / generate output
yarn.lock .nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and *not* Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Output files

7064
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,10 @@
{ {
"name": "sineql", "name": "sineql",
"version": "0.4.0", "version": "0.4.1",
"description": "A simple to use graphQL clone", "description": "A simple to use graphQL clone",
"main": "source/index.js", "main": "source/index.js",
"scripts": { "scripts": {
"test": "jest"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
@@ -19,5 +20,8 @@
"bugs": { "bugs": {
"url": "https://github.com/Ratstail91/sineQL/issues" "url": "https://github.com/Ratstail91/sineQL/issues"
}, },
"homepage": "https://github.com/Ratstail91/sineQL#readme" "homepage": "https://github.com/Ratstail91/sineQL#readme",
"devDependencies": {
"jest": "^27.5.1"
}
} }

View File

@@ -4,7 +4,7 @@ const { eatBlock, checkAlphaNumeric } = require('./utils');
const parseInput = require('./parse-input'); const parseInput = require('./parse-input');
//parse the schema into a type graph //parse the schema into a type graph
const buildTypeGraph = (schema, options) => { const buildTypeGraph = (schema, options = {}) => {
//the default graph //the default graph
let graph = { let graph = {
String: { typeName: 'String', scalar: true }, String: { typeName: 'String', scalar: true },

View File

@@ -40,7 +40,7 @@ const sineQL = (schema, { queryHandlers, createHandlers }, options = {}) => {
case 'update': case 'update':
case 'delete': case 'delete':
return [501, 'Keyword not implemented: ' + tokens[0]]; return [501, 'Keyword not yet implemented: ' + tokens[0]];
//TODO: implement these keywords //TODO: implement these keywords
break; break;

View File

@@ -1,5 +1,5 @@
//build the tokens into a single object of types representing the initial query //build the tokens into a single object of types representing the initial query
const parseCreateTree = (tokens, typeGraph, options) => { const parseCreateTree = (tokens, typeGraph, options = {}) => {
let current = 1; //primed let current = 1; //primed
//check this is a create command //check this is a create command
@@ -94,6 +94,9 @@ const readBlock = (tokens, current, superType, typeGraph, options) => {
//insert the typename into the block //insert the typename into the block
block['typeName'] = typeGraph[superType][fieldName].typeName; block['typeName'] = typeGraph[superType][fieldName].typeName;
//insert the unique modifier if it's set
block['unique'] = typeGraph[superType][fieldName].unique;
//insert the block-level modifier signal //insert the block-level modifier signal
if (modifier) { if (modifier) {
block[modifier] = true; block[modifier] = true;
@@ -122,6 +125,9 @@ const readBlock = (tokens, current, superType, typeGraph, options) => {
//save the typeGraph type into result //save the typeGraph type into result
result[fieldName] = JSON.parse(JSON.stringify( typeGraph[ typeGraph[superType][fieldName].typeName ] )); result[fieldName] = JSON.parse(JSON.stringify( typeGraph[ typeGraph[superType][fieldName].typeName ] ));
//insert the unique modifier if it's set
result[fieldName]['unique'] = typeGraph[superType][fieldName].unique;
//insert the block-level modifier signal //insert the block-level modifier signal
if (modifier) { if (modifier) {
result[fieldName][modifier] = tokens[current++]; result[fieldName][modifier] = tokens[current++];

View File

@@ -1,5 +1,5 @@
//break the body down into tokens //break the body down into tokens
const parseInput = (body, allowStrings, options) => { const parseInput = (body, allowStrings, options = {}) => {
let current = 0; let current = 0;
tokens = []; tokens = [];

View File

@@ -1,5 +1,5 @@
//build the tokens into a single object of types representing the initial query //build the tokens into a single object of types representing the initial query
const parseQueryTree = (tokens, typeGraph, options) => { const parseQueryTree = (tokens, typeGraph, options = {}) => {
let current = 1; //primed let current = 1; //primed
//get a token that matches a type //get a token that matches a type
@@ -60,6 +60,9 @@ const readBlock = (tokens, current, superType, typeGraph, options) => {
//insert the typename into the block //insert the typename into the block
block['typeName'] = typeGraph[superType][fieldName].typeName; block['typeName'] = typeGraph[superType][fieldName].typeName;
//insert the unique modifier if it's set
block['unique'] = typeGraph[superType][fieldName].unique;
//insert into result //insert into result
result[fieldName] = block; result[fieldName] = block;
@@ -83,6 +86,9 @@ const readBlock = (tokens, current, superType, typeGraph, options) => {
//save the typeGraph type into result //save the typeGraph type into result
result[fieldName] = JSON.parse(JSON.stringify( typeGraph[ typeGraph[superType][fieldName].typeName ] )); result[fieldName] = JSON.parse(JSON.stringify( typeGraph[ typeGraph[superType][fieldName].typeName ] ));
//insert the unique modifier if it's set
result[fieldName]['unique'] = typeGraph[superType][fieldName].unique;
//insert the block-level modifier signal //insert the block-level modifier signal
if (modifier) { if (modifier) {
result[fieldName][modifier] = tokens[current++]; result[fieldName][modifier] = tokens[current++];

View File

@@ -1,6 +0,0 @@
DB_HOSTNAME=localhost
DB_DATABASE=sineQL
DB_USERNAME=sineQL
DB_PASSWORD=sineQL
DB_TIMEZONE=Australia/Sydney
DB_LOGGING=

View File

@@ -0,0 +1,78 @@
const buildTypeGraph = require('../source/build-type-graph');
const emptySchema = '';
const simpleSchema = `
scalar Date
type Book {
unique String title
Date published
Float rating
}
type Author {
unique String name
Book books
}
`;
const missingDateSchema = `
type Book {
unique String title
Date published
Float rating
}
type Author {
unique String name
Book books
}
`;
const outOfOrderSchema = `
type Book {
unique String title
Date published
Float rating
}
type Author {
unique String name
Book books
}
scalar Date
`;
//parse the input, validating the structure as you go
test('buildTypeGraph - build an empty type graph', () => {
const graph = buildTypeGraph(emptySchema, { debug: false });
expect(Object.keys(graph).length).toEqual(4); //4 for the 4 base types
});
test('buildTypeGraph - build a simple type graph', () => {
const graph = buildTypeGraph(simpleSchema, { debug: false });
expect(Object.keys(graph).length).toEqual(7); //4 for the 4 base types + 3 for Date, Book, Author
});
test('buildTypeGraph - build an invalid type graph (missing Date)', () => {
const f = () => buildTypeGraph(missingDateSchema, { debug: false });
expect(f).toThrow("Unexpected value found as type field ('Date' is undefined)");
});
test('buildTypeGraph - build an invalid type graph (out of order Date)', () => {
const f = () => buildTypeGraph(outOfOrderSchema, { debug: false });
expect(f).toThrow("Unexpected value found as type field ('Date' is undefined)");
});

View File

@@ -1,12 +0,0 @@
const Sequelize = require('sequelize');
const sequelize = new Sequelize(process.env.DB_DATABASE, process.env.DB_USERNAME, process.env.DB_PASSWORD, {
host: process.env.DB_HOSTNAME,
dialect: 'mariadb',
timezone: process.env.DB_TIMEZONE,
logging: process.env.DB_LOGGING ? console.log : false
});
sequelize.sync();
module.exports = sequelize;

View File

@@ -1,17 +0,0 @@
const Sequelize = require('sequelize');
const sequelize = require('..');
module.exports = sequelize.define('authors', {
id: {
type: Sequelize.INTEGER(11),
allowNull: false,
autoIncrement: true,
primaryKey: true,
unique: true
},
name: {
type: Sequelize.STRING,
unique: true
}
});

View File

@@ -1,25 +0,0 @@
const Sequelize = require('sequelize');
const sequelize = require('..');
module.exports = sequelize.define('books', {
id: {
type: Sequelize.INTEGER(11),
allowNull: false,
autoIncrement: true,
primaryKey: true,
unique: true
},
title: {
type: Sequelize.STRING,
unique: true
},
published: {
type: Sequelize.STRING
},
rating: {
type: Sequelize.FLOAT
}
});

View File

@@ -1,12 +0,0 @@
const sequelize = require('..');
const authors = require('./authors');
const books = require('./books');
books.belongsTo(authors, { as: 'author' }); //books now reference the authorId
sequelize.sync();
module.exports = {
authors,
books
};

View File

@@ -1,211 +0,0 @@
const { Op } = require('sequelize');
const { books, authors } = require('../database/models');
//The create handlers are supposed to handle inserting new data into a database
//You don't have to create all associated books at the same time as the authors - you can use update later to join them
//You can use the '[' and ']' symbols to create mutliple elements of data at once
//'create' also counts as a modifier, indicating that a specific value is new to the database, and returning an error if it exists already OR
//'match' is used when an existing value must already exist in the database, and returning an error if it does not
/* possible create requests include:
create Author {
create name "Sydney Sheldon"
create books [
{
create title "The Naked Face"
published 1970
}
{
create title "A Stranger in the Mirror"
published 1976
}
]
}
create Author {
create name "Sydney Sheldon"
create books {
create title "Bloodline"
create published 1977
}
}
create Author {
create name "Sydney Sheldon"
match books {
match title "Rage of Angels"
}
}
*/
/* all create arguments look like this:
//Author array
[{
typeName: 'Author',
create: true,
name: { typeName: 'String', scalar: true, create: 'Sydney Sheldon' }
books: [{
typeName: 'Book',
create: true,
title: { typeName: 'String', scalar: true, create: 'Bloodline' }
published: { typeName: 'Date', scalar: true, set: 1977 }
}, ...]
},
...]
*/
//higher level elements need to pass their IDs to sub-elements
const createHandlers = {
//complex compound
Author: async (create, graph) => {
//apply the following to an array of authors
const promises = create.map(async author => {
//get the fields alone
const { typeName, create, match, ...fields } = author;
//if we are creating a new element (default with Author as a top-level only type)
if (create) {
//check every unique field is being created
Object.keys(fields).forEach(field => {
if (graph[typeName][field].unique && !fields[field].create) {
throw `Must create a new value for unique fields (${typeName} ${field})`;
}
})
//check the created scalar fields (value must not exist in the database yet)
const createdOrs = Object.keys(fields).filter(field => fields[field].scalar && fields[field].create).map(field => { return { [field]: fields[field].create }; });
const createdFound = await authors.findOne({
where: {
[Op.or]: createdOrs
},
});
if (createdFound) {
//enter error state
Object.keys(fields).forEach(field => {
if (fields[field].create == createdFound[field]) {
throw `Cannot create Author field '${field}' with value '${fields[field].create}' (value already exists)`;
}
});
//no error field found, why?
throw 'Unknown error (createHandlers.Author)';
}
//create the element (with created scalar fields)
const args = {};
Object.keys(fields).filter(field => fields[field].scalar).forEach(field => args[field] = fields[field].create || fields[field].set);
const createdAuthor = await authors.create(args);
//pass on to the sub-objects (books)
Object.keys(fields).filter(field => !fields[field].scalar).forEach(nonScalar => fields[nonScalar].forEach(element => element.authorId = createdAuthor.id)); //hack in the authorId
Object.keys(fields).filter(field => !fields[field].scalar).forEach(nonScalar => {
//delegation
createHandlers[graph[typeName][nonScalar].typeName](fields[nonScalar], graph);
});
}
//just to check
else {
throw `Fall through not implemented for Author (missed create & match)`;
}
});
//handle promises
await Promise.all(promises).catch(e => console.error(e));
return null;
},
//simple compound
Book: async (create, graph) => {
const promises = create.map(async book => {
//get the fields alone
const { typeName, authorId, create, match, ...fields } = book;
//if we are creating a new element(s)
if (create) {
//check every unique field is being created
Object.keys(fields).forEach(field => {
//authorId is hacked in from above
if (graph[typeName][field].unique && !fields[field].create) {
throw `Must create a new value for unique fields (${typeName} ${field})`;
}
})
//check the created scalar fields (value must not exist in the database yet)
const createdOrs = Object.keys(fields).filter(field => fields[field].scalar && fields[field].create).map(field => { return { [field]: fields[field].create }; });
const createdFound = await books.findOne({
where: {
[Op.or]: createdOrs
},
});
if (createdFound) {
//enter error state
Object.keys(fields).forEach(field => {
if (fields[field].create == createdFound[field]) {
throw `Cannot create Book field '${field}' with value '${fields[field].create}' (value already exists)`;
}
});
//no error field found, why?
throw 'Unknown error (createHandlers.Book)';
}
//create the element (with created scalar fields)
const args = {};
Object.keys(fields).filter(field => fields[field].scalar).forEach(field => args[field] = fields[field].create || fields[field].set);
args['authorId'] = authorId; //hacked in
await books.create(args);
}
//pulled from query (match existing books)
else if (match) {
//get the names of matched fields
const matchedNames = Object.keys(fields).filter(field => fields[field].match || fields[field].set);
//short-circuit if querying everything
const where = {};
if (matchedNames.length > 0) {
//build the "where" object
matchedNames.forEach(mn => {
if (fields[mn].match !== true) {
where[mn] = { [Op.eq]: fields[mn].match || fields[mn].set };
}
});
}
//don't steal books
where['authorId'] = { [Op.eq]: null };
//update the sub-elements
await books.update({
authorId: authorId
}, {
where: where
}); //sequelize ORM model
}
//just to check
else {
throw `Fall through not implemented for Book (missed create & match)`;
}
});
//handle promises
await Promise.all(promises).catch(e => console.error(e));
return null;
}
};
module.exports = createHandlers;

View File

@@ -1,126 +0,0 @@
const { Op } = require('sequelize');
const { books, authors } = require('../database/models');
//the handler functions return arrays for each type, containing every element that satisfies the queries
//the "query" argument contains the object built from the sineQL query
//the "graph" argument contains the typeGraph
//the task of the handler functions is to query the database, and return the correct results
/* possible values for "query" include:
{
id: 1,
typeName: 'Author',
name: { typeName: 'String', scalar: true, match: 'Kenneth Grahame' },
books: { typeName: 'Book', match: {
id: 2,
typeName: 'Book',
title: { typeName: 'String', scalar: true, match: 'The Wind in the Willows' }
published: { typeName: 'Date', scalar: true }
}
}
*/
//depth-first search seems to be the best option
//Each query shouldn't know if it's a sub-query
const queryHandlers = {
//complex compound
Author: async (query, graph) => {
//get the fields alone
const { typeName, match, ...fields } = query;
//hack the id into the fields list (if it's not there already)
fields['id'] = fields['id'] || { typeName: 'Integer', scalar: true }; //TODO: should this be default?
//get the names of matched fields (fields to find)
const matchedNames = Object.keys(fields).filter(field => fields[field].match);
//short-circuit if querying everything
const where = {};
if (matchedNames.length > 0) {
//build the "where" object
matchedNames.forEach(mn => {
if (query[mn].match !== true) { //true means it's a compound type
where[mn] = { [Op.eq]: query[mn].match };
}
});
}
//these are field names
const scalars = Object.keys(fields).filter(field => graph[fields[field].typeName].scalar);
const nonScalars = Object.keys(fields).filter(field => !graph[fields[field].typeName].scalar);
let authorResults = await authors.findAll({
attributes: scalars, //fields to find (keys)
where: where,
raw: true
}); //sequelize ORM model
const promiseArray = nonScalars.map(async nonScalar => {
//hack the author ID in, so it can be referenced below
fields[nonScalar]['authorId'] = fields[nonScalar]['authorId'] || { typeName: 'Integer', scalar: true };
//delegate to a deeper part of the tree
const nonScalarArray = await queryHandlers[fields[nonScalar].typeName](fields[nonScalar], graph);
//for each author, update this non-scalar field with the non-scalar's recursed value
authorResults = authorResults.map(author => {
author[nonScalar] = nonScalarArray.filter(ns => ns['authorId'] == author.id);
//BUGFIX: filter out extra authorId
author[nonScalar] = author[nonScalar].map(ns => {
const { authorId, ...res } = ns;
return res;
}); //extract authorId
return author;
});
//prune the authors when matching, but their results are empty
authorResults = authorResults.filter(author => {
return !(fields[nonScalar].match && author[nonScalar].length == 0);
});
});
await Promise.all(promiseArray);
//finally, return the results
return authorResults;
},
//simple compound
Book: async (query, graph) => {
//get the fields alone
const { typeName, match, ...fields } = query;
//hack the id into the fields list (if it's not there already)
fields['id'] = fields['id'] || { typeName: 'Integer', scalar: true }; //TODO: should this be automatic?
//get the names of matched fields
const matchedNames = Object.keys(fields).filter(field => fields[field].match);
//short-circuit if querying everything
const where = {};
if (matchedNames.length > 0) {
//build the "where" object
matchedNames.forEach(mn => {
if (query[mn].match !== true) {
where[mn] = { [Op.eq]: query[mn].match };
}
});
}
//return the result (N+1 bottleneck)
return await books.findAll({
attributes: Object.keys(fields), //fields to find (everything for simple compounds)
where: where,
raw: true
}); //sequelize ORM model
}
};
module.exports = queryHandlers;

View File

@@ -1,15 +0,0 @@
//the matching schema
module.exports = `
scalar Date
type Book {
unique String title
Date published
Float rating
}
type Author {
unique String name
Book books
}
`;

View File

@@ -1,88 +0,0 @@
require('dotenv').config();
//setup database
const sequelize = require('./database');
const { books, authors } = require('./database/models');
//create the dummy data
sequelize.sync().then(async () => {
//*
// return; //DEBUG: delete this for debugging
await sequelize.query('DELETE FROM authors;');
await sequelize.query('DELETE FROM books;');
await authors.bulkCreate([
{ id: 1, name: 'Diana Gabaldon' },
{ id: 2, name: 'Emily Rodda' },
{ id: 3, name: 'Kenneth Grahame' }
]);
await books.bulkCreate([
{ id: 1, authorId: 1, title: 'Outlander', published: '1991', rating: 9.5 },
{ id: 2, authorId: 1, title: 'Dragonfly in Amber', published: '1992', rating: 9.5 },
{ id: 3, authorId: 1, title: 'Voyager', published: '1993', rating: 9.5 },
{ id: 4, authorId: 1, title: 'Drums of Autumn', published: '1996', rating: 9.5 },
{ id: 5, authorId: 1, title: 'The Fiery Cross', published: '2000', rating: 9.5 }, //Incorrect, the correct publish date is 2001
{ id: 6, authorId: 1, title: 'The Breath of Snow and Ashes', published: '2005', rating: 9.5 },
{ id: 7, authorId: 1, title: 'An Echo in the Bone', published: '2009', rating: 9.5 },
{ id: 8, authorId: 1, title: 'Written in my Own Heart\'s Blood', published: '2014', rating: 9.5 },
{ id: 9, authorId: 1, title: 'Go Tell the Bees That I Am Gone', published: null, rating: 9.5 },
{ id: 10, authorId: 2, title: 'The Forest of Silence', published: '2000', rating: 9.5 },
{ id: 11, authorId: 2, title: 'The Lake of Tears', published: '2000', rating: 9.5 },
{ id: 12, authorId: 2, title: 'The City of Rats', published: '2000', rating: 9.5 },
{ id: 13, authorId: 2, title: 'The Shifting Sands', published: '2000', rating: 9.5 },
{ id: 14, authorId: 2, title: 'Dread Mountain', published: '2000', rating: 9.5 },
{ id: 15, authorId: 2, title: 'The Maze of the Beast', published: '2000', rating: 9.5 },
{ id: 16, authorId: 2, title: 'The Valley of the Lost', published: '2000', rating: 9.5 },
{ id: 17, authorId: 2, title: 'Return to Del', published: '2000', rating: 9.5 },
{ id: 18, authorId: 3, title: 'The Wind in the Willows', published: '1908', rating: 9.5 },
]);
//*/
});
//input tools
const readline = require('readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: false
});
const question = (prompt, def = null) => {
return new Promise((resolve, reject) => {
rl.question(`${prompt}${def ? ` (${def})` : ''}> `, answer => {
//loop on required
if (def === null && !answer) {
return resolve(question(prompt, def));
}
return resolve(answer || def);
});
});
};
//the library to test
const sineQL = require('../source/index.js');
//the arguments to the library
const schema = require('./handlers/schema');
const queryHandlers = require('./handlers/query-handlers');
const createHandlers = require('./handlers/create-handlers');
//run the setup function to create the closure (creates the type graph)
const sine = sineQL(schema, { queryHandlers, createHandlers }, { debug: false });
//actually ask the question
(async () => {
while(true) {
const answer = await question('sineQL');
const [code, result] = await sine(answer);
//normal response
if (code == 200) {
console.dir(result, { depth: null });
}
}
})();

466
test/package-lock.json generated
View File

@@ -1,466 +0,0 @@
{
"name": "sineql-test",
"version": "1.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "sineql-test",
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"dotenv": "^8.2.0",
"mariadb": "^2.5.3",
"sequelize": "^6.6.2"
}
},
"node_modules/@types/geojson": {
"version": "7946.0.7",
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.7.tgz",
"integrity": "sha512-wE2v81i4C4Ol09RtsWFAqg3BUitWbHSpSlIo+bNdsCJijO9sjme+zm+73ZMCa/qMC8UEERxzGbvmr1cffo2SiQ=="
},
"node_modules/@types/node": {
"version": "14.14.37",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.37.tgz",
"integrity": "sha512-XYmBiy+ohOR4Lh5jE379fV2IU+6Jn4g5qASinhitfyO71b/sCo6MKsMLF5tc7Zf2CE8hViVQyYSobJNke8OvUw=="
},
"node_modules/any-promise": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz",
"integrity": "sha1-q8av7tzqUugJzcA3au0845Y10X8="
},
"node_modules/debug": {
"version": "4.3.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
"integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
"dependencies": {
"ms": "2.1.2"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/denque": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/denque/-/denque-1.5.0.tgz",
"integrity": "sha512-CYiCSgIF1p6EUByQPlGkKnP1M9g0ZV3qMIrqMqZqdwazygIA/YP2vrbcyl1h/WppKJTdl1F85cXIle+394iDAQ==",
"engines": {
"node": ">=0.10"
}
},
"node_modules/dotenv": {
"version": "8.2.0",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz",
"integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==",
"engines": {
"node": ">=8"
}
},
"node_modules/dottie": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/dottie/-/dottie-2.0.2.tgz",
"integrity": "sha512-fmrwR04lsniq/uSr8yikThDTrM7epXHBAAjH9TbeH3rEA8tdCO7mRzB9hdmdGyJCxF8KERo9CITcm3kGuoyMhg=="
},
"node_modules/iconv-lite": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.2.tgz",
"integrity": "sha512-2y91h5OpQlolefMPmUlivelittSWy0rP+oYVpn6A7GwVHNE8AWzoYOBNmlwks3LobaJxgHCYZAnyNo2GgpNRNQ==",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/inflection": {
"version": "1.12.0",
"resolved": "https://registry.npmjs.org/inflection/-/inflection-1.12.0.tgz",
"integrity": "sha1-ogCTVlbW9fa8TcdQLhrstwMihBY=",
"engines": [
"node >= 0.4.0"
]
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA=="
},
"node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/mariadb": {
"version": "2.5.3",
"resolved": "https://registry.npmjs.org/mariadb/-/mariadb-2.5.3.tgz",
"integrity": "sha512-9ZbQ1zLqasLCQy6KDcPHtX7EUIMBlQ8p64gNR61+yfpCIWjPDji3aR56LvwbOz1QnQbVgYBOJ4J/pHoFN5MR+w==",
"dependencies": {
"@types/geojson": "^7946.0.7",
"@types/node": "^14.14.28",
"denque": "^1.4.1",
"iconv-lite": "^0.6.2",
"long": "^4.0.0",
"moment-timezone": "^0.5.33",
"please-upgrade-node": "^3.2.0"
},
"engines": {
"node": ">= 10.13"
}
},
"node_modules/moment": {
"version": "2.29.1",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz",
"integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==",
"engines": {
"node": "*"
}
},
"node_modules/moment-timezone": {
"version": "0.5.33",
"resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.33.tgz",
"integrity": "sha512-PTc2vcT8K9J5/9rDEPe5czSIKgLoGsH8UNpA4qZTVw0Vd/Uz19geE9abbIOQKaAQFcnQ3v5YEXrbSc5BpshH+w==",
"dependencies": {
"moment": ">= 2.9.0"
},
"engines": {
"node": "*"
}
},
"node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"node_modules/please-upgrade-node": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/please-upgrade-node/-/please-upgrade-node-3.2.0.tgz",
"integrity": "sha512-gQR3WpIgNIKwBMVLkpMUeR3e1/E1y42bqDQZfql+kDeXd8COYfM8PQA4X6y7a8u9Ua9FHmsrrmirW2vHs45hWg==",
"dependencies": {
"semver-compare": "^1.0.0"
}
},
"node_modules/retry-as-promised": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/retry-as-promised/-/retry-as-promised-3.2.0.tgz",
"integrity": "sha512-CybGs60B7oYU/qSQ6kuaFmRd9sTZ6oXSc0toqePvV74Ac6/IFZSI1ReFQmtCN+uvW1Mtqdwpvt/LGOiCBAY2Mg==",
"dependencies": {
"any-promise": "^1.3.0"
}
},
"node_modules/safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"node_modules/semver": {
"version": "7.3.5",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
"integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/semver-compare": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz",
"integrity": "sha1-De4hahyUGrN+nvsXiPavxf9VN/w="
},
"node_modules/sequelize": {
"version": "6.6.2",
"resolved": "https://registry.npmjs.org/sequelize/-/sequelize-6.6.2.tgz",
"integrity": "sha512-H/zrzmTK+tis9PJaSigkuXI57nKBvNCtPQol0yxCvau1iWLzSOuq8t3tMOVeQ+Ep8QH2HoD9/+FCCIAqzUr/BQ==",
"dependencies": {
"debug": "^4.1.1",
"dottie": "^2.0.0",
"inflection": "1.12.0",
"lodash": "^4.17.20",
"moment": "^2.26.0",
"moment-timezone": "^0.5.31",
"retry-as-promised": "^3.2.0",
"semver": "^7.3.2",
"sequelize-pool": "^6.0.0",
"toposort-class": "^1.0.1",
"uuid": "^8.1.0",
"validator": "^10.11.0",
"wkx": "^0.5.0"
},
"engines": {
"node": ">=10.0.0"
},
"peerDependenciesMeta": {
"mariadb": {
"optional": true
},
"mysql2": {
"optional": true
},
"pg": {
"optional": true
},
"pg-hstore": {
"optional": true
},
"sqlite3": {
"optional": true
},
"tedious": {
"optional": true
}
}
},
"node_modules/sequelize-pool": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/sequelize-pool/-/sequelize-pool-6.1.0.tgz",
"integrity": "sha512-4YwEw3ZgK/tY/so+GfnSgXkdwIJJ1I32uZJztIEgZeAO6HMgj64OzySbWLgxj+tXhZCJnzRfkY9gINw8Ft8ZMg==",
"engines": {
"node": ">= 10.0.0"
}
},
"node_modules/toposort-class": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toposort-class/-/toposort-class-1.0.1.tgz",
"integrity": "sha1-f/0feMi+KMO6Rc1OGj9e4ZO9mYg="
},
"node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"bin": {
"uuid": "dist/bin/uuid"
}
},
"node_modules/validator": {
"version": "10.11.0",
"resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz",
"integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==",
"engines": {
"node": ">= 0.10"
}
},
"node_modules/wkx": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/wkx/-/wkx-0.5.0.tgz",
"integrity": "sha512-Xng/d4Ichh8uN4l0FToV/258EjMGU9MGcA0HV2d9B/ZpZB3lqQm7nkOdZdm5GhKtLLhAE7PiVQwN4eN+2YJJUg==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
}
},
"dependencies": {
"@types/geojson": {
"version": "7946.0.7",
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.7.tgz",
"integrity": "sha512-wE2v81i4C4Ol09RtsWFAqg3BUitWbHSpSlIo+bNdsCJijO9sjme+zm+73ZMCa/qMC8UEERxzGbvmr1cffo2SiQ=="
},
"@types/node": {
"version": "14.14.37",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.37.tgz",
"integrity": "sha512-XYmBiy+ohOR4Lh5jE379fV2IU+6Jn4g5qASinhitfyO71b/sCo6MKsMLF5tc7Zf2CE8hViVQyYSobJNke8OvUw=="
},
"any-promise": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz",
"integrity": "sha1-q8av7tzqUugJzcA3au0845Y10X8="
},
"debug": {
"version": "4.3.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
"integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
"requires": {
"ms": "2.1.2"
}
},
"denque": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/denque/-/denque-1.5.0.tgz",
"integrity": "sha512-CYiCSgIF1p6EUByQPlGkKnP1M9g0ZV3qMIrqMqZqdwazygIA/YP2vrbcyl1h/WppKJTdl1F85cXIle+394iDAQ=="
},
"dotenv": {
"version": "8.2.0",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz",
"integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw=="
},
"dottie": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/dottie/-/dottie-2.0.2.tgz",
"integrity": "sha512-fmrwR04lsniq/uSr8yikThDTrM7epXHBAAjH9TbeH3rEA8tdCO7mRzB9hdmdGyJCxF8KERo9CITcm3kGuoyMhg=="
},
"iconv-lite": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.2.tgz",
"integrity": "sha512-2y91h5OpQlolefMPmUlivelittSWy0rP+oYVpn6A7GwVHNE8AWzoYOBNmlwks3LobaJxgHCYZAnyNo2GgpNRNQ==",
"requires": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
}
},
"inflection": {
"version": "1.12.0",
"resolved": "https://registry.npmjs.org/inflection/-/inflection-1.12.0.tgz",
"integrity": "sha1-ogCTVlbW9fa8TcdQLhrstwMihBY="
},
"lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA=="
},
"lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"requires": {
"yallist": "^4.0.0"
}
},
"mariadb": {
"version": "2.5.3",
"resolved": "https://registry.npmjs.org/mariadb/-/mariadb-2.5.3.tgz",
"integrity": "sha512-9ZbQ1zLqasLCQy6KDcPHtX7EUIMBlQ8p64gNR61+yfpCIWjPDji3aR56LvwbOz1QnQbVgYBOJ4J/pHoFN5MR+w==",
"requires": {
"@types/geojson": "^7946.0.7",
"@types/node": "^14.14.28",
"denque": "^1.4.1",
"iconv-lite": "^0.6.2",
"long": "^4.0.0",
"moment-timezone": "^0.5.33",
"please-upgrade-node": "^3.2.0"
}
},
"moment": {
"version": "2.29.1",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz",
"integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ=="
},
"moment-timezone": {
"version": "0.5.33",
"resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.33.tgz",
"integrity": "sha512-PTc2vcT8K9J5/9rDEPe5czSIKgLoGsH8UNpA4qZTVw0Vd/Uz19geE9abbIOQKaAQFcnQ3v5YEXrbSc5BpshH+w==",
"requires": {
"moment": ">= 2.9.0"
}
},
"ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"please-upgrade-node": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/please-upgrade-node/-/please-upgrade-node-3.2.0.tgz",
"integrity": "sha512-gQR3WpIgNIKwBMVLkpMUeR3e1/E1y42bqDQZfql+kDeXd8COYfM8PQA4X6y7a8u9Ua9FHmsrrmirW2vHs45hWg==",
"requires": {
"semver-compare": "^1.0.0"
}
},
"retry-as-promised": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/retry-as-promised/-/retry-as-promised-3.2.0.tgz",
"integrity": "sha512-CybGs60B7oYU/qSQ6kuaFmRd9sTZ6oXSc0toqePvV74Ac6/IFZSI1ReFQmtCN+uvW1Mtqdwpvt/LGOiCBAY2Mg==",
"requires": {
"any-promise": "^1.3.0"
}
},
"safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"semver": {
"version": "7.3.5",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
"integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
"requires": {
"lru-cache": "^6.0.0"
}
},
"semver-compare": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz",
"integrity": "sha1-De4hahyUGrN+nvsXiPavxf9VN/w="
},
"sequelize": {
"version": "6.6.2",
"resolved": "https://registry.npmjs.org/sequelize/-/sequelize-6.6.2.tgz",
"integrity": "sha512-H/zrzmTK+tis9PJaSigkuXI57nKBvNCtPQol0yxCvau1iWLzSOuq8t3tMOVeQ+Ep8QH2HoD9/+FCCIAqzUr/BQ==",
"requires": {
"debug": "^4.1.1",
"dottie": "^2.0.0",
"inflection": "1.12.0",
"lodash": "^4.17.20",
"moment": "^2.26.0",
"moment-timezone": "^0.5.31",
"retry-as-promised": "^3.2.0",
"semver": "^7.3.2",
"sequelize-pool": "^6.0.0",
"toposort-class": "^1.0.1",
"uuid": "^8.1.0",
"validator": "^10.11.0",
"wkx": "^0.5.0"
}
},
"sequelize-pool": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/sequelize-pool/-/sequelize-pool-6.1.0.tgz",
"integrity": "sha512-4YwEw3ZgK/tY/so+GfnSgXkdwIJJ1I32uZJztIEgZeAO6HMgj64OzySbWLgxj+tXhZCJnzRfkY9gINw8Ft8ZMg=="
},
"toposort-class": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toposort-class/-/toposort-class-1.0.1.tgz",
"integrity": "sha1-f/0feMi+KMO6Rc1OGj9e4ZO9mYg="
},
"uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
},
"validator": {
"version": "10.11.0",
"resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz",
"integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw=="
},
"wkx": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/wkx/-/wkx-0.5.0.tgz",
"integrity": "sha512-Xng/d4Ichh8uN4l0FToV/258EjMGU9MGcA0HV2d9B/ZpZB3lqQm7nkOdZdm5GhKtLLhAE7PiVQwN4eN+2YJJUg==",
"requires": {
"@types/node": "*"
}
},
"yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
}
}
}

View File

@@ -1,16 +0,0 @@
{
"name": "sineql-test",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"dev": "node index.js"
},
"author": "Kayne Ruse",
"license": "ISC",
"dependencies": {
"dotenv": "^8.2.0",
"mariadb": "^2.5.3",
"sequelize": "^6.6.2"
}
}

View File

@@ -0,0 +1,175 @@
const buildTypeGraph = require('../source/build-type-graph');
const parseInput = require('../source/parse-input');
const parseCreateTree = require('../source/parse-create-tree');
//schemas
const simpleSchema = `
scalar Date
type Book {
unique String title
Date published
Float rating
}
type Author {
unique String name
Book books
}
`;
const simpleBookQuery = `
create Book {
create title "The Wind in the Willows"
create published "1908"
create rating 9.5
}
`;
const compoundBookQuery = `
create Book [
{
create title "The Philosepher's Kidney Stone"
}
{
create title "The Chamber Pot of Secrets"
}
{
create title "The Prisoner of Aunt Kazban"
}
{
create title "The Goblet of the Fire Cocktail"
}
{
create title "The Order for Kleenex"
}
{
create title "The Half-Priced Pharmacy"
}
{
create title "Yeah, I Got Nothing"
}
]
`;
const simpleAuthorQuery = `
create Author {
create name "Kenneth Grahame"
}
`;
const compoundAuthorQuery = `
create Author {
create name "J. K. Rolling"
match books [
{ match title "The Philosepher's Kidney Stone" }
{ match title "The Chamber Pot of Secrets" }
{ match title "The Prisoner of Aunt Kazban" }
{ match title "The Goblet of the Fire Cocktail" }
{ match title "The Order for Kleenex" }
{ match title "The Half-Priced Pharmacy" }
{ match title "Yeah, I Got Nothing" }
]
}
`;
const joiningQuery = `
create Author {
match name "Kenneth Grahame"
match books {
match title "The Wind in the Willows"
}
}
`;
//do stuff
test('parseCreateTree - create an author', () => {
//setup
const tokens = parseInput(simpleBookQuery, true);
const graph = buildTypeGraph(simpleSchema);
//process
const createTree = parseCreateTree(tokens, graph);
//inspect
expect(createTree.length).toEqual(1);
expect(createTree[0].create).toEqual(true);
expect(createTree[0].typeName).toEqual('Book');
expect(createTree[0].title.create).toEqual('The Wind in the Willows'); //new data is stored in the "create" field
});
test('parseCreateTree - create an array of books', () => {
//setup
const tokens = parseInput(compoundBookQuery, true);
const graph = buildTypeGraph(simpleSchema);
//process
const createTree = parseCreateTree(tokens, graph);
//inspect
expect(createTree.length).toEqual(7);
});
test('parseCreateTree - create a single author', () => {
//setup
const tokens = parseInput(simpleAuthorQuery, true);
const graph = buildTypeGraph(simpleSchema);
//process
const createTree = parseCreateTree(tokens, graph);
//inspect
expect(createTree.length).toEqual(1);
expect(createTree[0].name.create).toEqual('Kenneth Grahame');
});
test('parseCreateTree - create an author with pre-existing books', () => {
//setup
const tokens = parseInput(compoundAuthorQuery, true);
const graph = buildTypeGraph(simpleSchema);
//process
const createTree = parseCreateTree(tokens, graph);
//inspect
expect(createTree.length).toEqual(1);
expect(createTree[0].books.length).toEqual(7);
});
test('parseCreateTree - join an existing author to an existing book', () => {
//setup
const tokens = parseInput(joiningQuery, true);
const graph = buildTypeGraph(simpleSchema);
//process
const createTree = parseCreateTree(tokens, graph);
//inspect
expect(createTree.length).toEqual(1);
expect(createTree[0].books.length).toEqual(1);
});
test('parseCreateTree - check for unique field', () => {
//setup
const tokens = parseInput(simpleAuthorQuery, true);
const graph = buildTypeGraph(simpleSchema);
//process
const createTree = parseCreateTree(tokens, graph);
//inspect
expect(createTree[0].name.unique).toEqual(true); //a bit useless here, but good none the less
});

47
test/parse-input.test.js Normal file
View File

@@ -0,0 +1,47 @@
const parseInput = require('../source/parse-input');
const schema = `
scalar Date
type Book {
unique String title
Date published
Float rating
}
type Author {
unique String name
Book books
}
`;
const query = `
Book {
title "The wind in the willows"
}
`;
const mushedQuery = 'Book{title"published"}'; //this is strange lol
//parse the input with no concern for validity of the structure
test('parseInput - generate the lexemes', () => {
const tokens = parseInput(schema, false, { debug: false });
expect(tokens.length).toEqual(22); //each lexeme becomes a token
});
test('parseInput - generate the lexemes (with strings enabled)', () => {
const tokens = parseInput(query, true, { debug: false });
expect(tokens.length).toEqual(5); //each lexeme becomes a token
});
test('parseInput - generate the lexemes (with strings enabled)', () => {
const tokens = parseInput(mushedQuery, true, { debug: false });
expect(tokens.length).toEqual(5); //each lexeme becomes a token
});

View File

@@ -0,0 +1,107 @@
const buildTypeGraph = require('../source/build-type-graph');
const parseInput = require('../source/parse-input');
const parseQueryTree = require('../source/parse-query-tree');
//schemas
const simpleSchema = `
scalar Date
type Book {
unique String title
Date published
Float rating
}
type Author {
unique String name
Book books
}
`;
const simpleQuery = `
Author {
name
books {
title
published
}
}
`;
const badTypeQuery = `
Company {
author {
name
}
}
`;
const badFieldQuery = `
Author {
name
address
}
`;
//prepare for querying the database
test('parseQueryTree - simple query', () => {
//setup
const tokens = parseInput(simpleQuery, true);
const graph = buildTypeGraph(simpleSchema);
//process
const queryTree = parseQueryTree(tokens, graph);
//inspect
expect(queryTree.typeName).toEqual('Author');
expect(queryTree.name.typeName).toEqual('String');
expect(queryTree.books.typeName).toEqual('Book');
expect(queryTree.books.title.typeName).toEqual('String');
expect(queryTree.books.published.typeName).toEqual('Date');
});
test('parseQueryTree - bad type query', () => {
//setup
const tokens = parseInput(badTypeQuery, true);
const graph = buildTypeGraph(simpleSchema);
//process
const f = () => parseQueryTree(tokens, graph);
//inspect
expect(f).toThrow("Expected a type in the type graph (found Company)");
});
test('parseQueryTree - bad field query', () => {
//setup
const tokens = parseInput(badFieldQuery, true);
const graph = buildTypeGraph(simpleSchema);
//process
const f = () => parseQueryTree(tokens, graph);
//inspect
expect(f).toThrow("Unexpected field name address in type Author");
});
test('parseQueryTree - check for unique field', () => {
//setup
const tokens = parseInput(simpleQuery, true);
const graph = buildTypeGraph(simpleSchema);
//process
const queryTree = parseQueryTree(tokens, graph);
//inspect
expect(queryTree.name.unique).toEqual(true); //a bit useless here, but good none the less
});