From c8c4aef8614df42a427f6081e4f8e618200366c3 Mon Sep 17 00:00:00 2001 From: Kim Brandwijk Date: Sat, 6 Jan 2018 01:33:58 +0100 Subject: [PATCH] feat: object imports Closes #4 --- fixtures/import-all/a.graphql | 2 +- src/index.test.ts | 160 ++++++++++++++++++++++++++++++++++ src/index.ts | 31 ++++--- 3 files changed, 178 insertions(+), 15 deletions(-) diff --git a/fixtures/import-all/a.graphql b/fixtures/import-all/a.graphql index 02714cb..b0324ae 100644 --- a/fixtures/import-all/a.graphql +++ b/fixtures/import-all/a.graphql @@ -1,4 +1,4 @@ -# import B from "b.graphql" +# import * from "b.graphql" type A { # test 1 diff --git a/src/index.test.ts b/src/index.test.ts index dd9bb05..b44b629 100644 --- a/src/index.test.ts +++ b/src/index.test.ts @@ -116,6 +116,166 @@ type C2 { t.is(importSchema('fixtures/import-all/a.graphql'), expectedSDL) }) +test('importSchema: import all from objects', t => { + const schemaC = ` + type C1 { + id: ID! + } + + type C2 { + id: ID! + } + + type C3 { + id: ID! + }` + + const schemaB = ` + # import * from 'schemaC' + + type B { + hello: String! + c1: C1 + c2: C2 + }` + + const schemaA = ` + # import B from 'schemaB' + + type A { + # test 1 + first: String @first + second: Float + b: B + }` + + const schemas = { + schemaA, schemaB, schemaC + } + + const expectedSDL = `\ +type A { + first: String @first + second: Float + b: B +} + +type B { + hello: String! + c1: C1 + c2: C2 +} + +type C1 { + id: ID! +} + +type C2 { + id: ID! +} +` + t.is(importSchema(schemaA, schemas), expectedSDL) +}) + +test(`importSchema: single object schema`, t => { + const schemaA = ` + type A { + field: String + }` + + const expectedSDL = `\ +type A { + field: String +} +` + + t.is(importSchema(schemaA), expectedSDL) +}) + +test(`importSchema: import all mix 'n match`, t => { + const schemaB = ` + # import C1, C2 from 'fixtures/import-all/c.graphql' + + type B { + hello: String! + c1: C1 + c2: C2 + }` + + const schemaA = ` + # import * from "schemaB" + + type A { + # test 1 + first: String @first + second: Float + b: B + }` + + const schemas = { + schemaB + } + + const expectedSDL = `\ +type A { + first: String @first + second: Float + b: B +} + +type B { + hello: String! + c1: C1 + c2: C2 +} + +type C1 { + id: ID! +} + +type C2 { + id: ID! +} +` + t.is(importSchema(schemaA, schemas), expectedSDL) +}) + +test(`importSchema: import all mix 'n match 2`, t => { + + const schemaA = ` + # import * from "fixtures/import-all/b.graphql" + + type A { + # test 1 + first: String @first + second: Float + b: B + }` + + const expectedSDL = `\ +type A { + first: String @first + second: Float + b: B +} + +type B { + hello: String! + c1: C1 + c2: C2 +} + +type C1 { + id: ID! +} + +type C2 { + id: ID! +} +` + t.is(importSchema(schemaA), expectedSDL) +}) + test('importSchema: unions', t => { const expectedSDL = `\ type A { diff --git a/src/index.ts b/src/index.ts index 8b9d689..cb87919 100644 --- a/src/index.ts +++ b/src/index.ts @@ -14,14 +14,14 @@ export interface RawModule { from: string } -/** - * Read a schema file from disk - * - * @param f Filename - * @returns File contents - */ -const read: (f: string) => string = - (f: string): string => fs.readFileSync(f, { encoding: 'utf8' }) +const read = (schema: string, schemas?: { [key: string]: string }) => { + if (isFile(schema)) { + return fs.readFileSync(schema, { encoding: 'utf8' }) + } + return schemas ? schemas[schema] : schema +} + +const isFile = f => f.endsWith('.graphql') /** * Parse a single import line and extract imported types and schema filename @@ -68,15 +68,16 @@ export function parseSDL(sdl: string): RawModule[] { * @param filePath File path to the initial schema file * @returns Single bundled schema with all imported types */ -export function importSchema(filePath: string): string { - const sdl = read(filePath) +export function importSchema(schema: string, schemas?: { [key: string]: string }): string { + const sdl = read(schema, schemas) || schema const document = parse(sdl, { noLocation: true }) // Recursively process the imports, starting by importing all types from the initial schema let { allDefinitions, typeDefinitions } = collectDefinitions( ['*'], sdl, - path.resolve(filePath) + schema, + schemas ) // Post processing of the final schema (missing types, unused types, etc.) @@ -125,6 +126,7 @@ function collectDefinitions( imports: string[], sdl: string, filePath: string, + schemas?: { [key: string]: string }, processedFiles: Set = new Set(), typeDefinitions: TypeDefinitionNode[][] = [], allDefinitions: TypeDefinitionNode[][] = [] @@ -132,7 +134,7 @@ function collectDefinitions( allDefinitions: TypeDefinitionNode[][] typeDefinitions: TypeDefinitionNode[][] } { - const key = path.resolve(filePath) + const key = isFile(filePath) ? path.resolve(filePath) : filePath const dirname = path.dirname(filePath) // Get TypeDefinitionNodes from current schema @@ -159,12 +161,13 @@ function collectDefinitions( // Process each file (recursively) rawModules.forEach(m => { // If it was not yet processed (in case of circular dependencies) - const moduleFilePath = path.resolve(path.join(dirname, m.from)) + const moduleFilePath = isFile(filePath) ? path.resolve(path.join(dirname, m.from)) : m.from if (!processedFiles.has(moduleFilePath)) { collectDefinitions( m.imports, - read(moduleFilePath), + read(moduleFilePath, schemas), moduleFilePath, + schemas, processedFiles, typeDefinitions, allDefinitions