diff --git a/src/language/__tests__/lexer-test.js b/src/language/__tests__/lexer-test.js index 2d6e6b11fc..ec4c0baad3 100644 --- a/src/language/__tests__/lexer-test.js +++ b/src/language/__tests__/lexer-test.js @@ -128,8 +128,7 @@ describe('Lexer', () => { '{ kind: "Name", value: "foo", line: 1, column: 1 }', ); }); - - it('skips whitespace and comments', () => { + it('skips whitespace and commas', () => { expect( lexOne(` @@ -144,18 +143,6 @@ describe('Lexer', () => { value: 'foo', }); - expect( - lexOne(` - #comment - foo#comment -`), - ).to.contain({ - kind: TokenKind.NAME, - start: 18, - end: 21, - value: 'foo', - }); - expect(lexOne(',,,foo,,,')).to.contain({ kind: TokenKind.NAME, start: 3, @@ -181,7 +168,6 @@ describe('Lexer', () => { 4 | `); }); - it('updates line numbers in error for file context', () => { let caughtError; try { @@ -218,6 +204,19 @@ describe('Lexer', () => { | ^ `); }); + it('lexes comments', () => { + expect( + lexOne( + dedent`#this is a comment + a{}`, + ), + ).to.contain({ + kind: TokenKind.COMMENT, + start: 0, + end: 18, + value: 'this is a comment', + }); + }); it('lexes strings', () => { expect(lexOne('""')).to.contain({ @@ -877,9 +876,6 @@ describe('Lexer', () => { let endToken; do { endToken = lexer.advance(); - // Lexer advances over ignored comment tokens to make writing parsers - // easier, but will include them in the linked list result. - expect(endToken.kind).to.not.equal(TokenKind.COMMENT); } while (endToken.kind !== TokenKind.EOF); expect(startToken.prev).to.equal(null); diff --git a/src/language/__tests__/parser-test.js b/src/language/__tests__/parser-test.js index c3ff3a57de..e65332c5bf 100644 --- a/src/language/__tests__/parser-test.js +++ b/src/language/__tests__/parser-test.js @@ -147,6 +147,83 @@ describe('Parser', () => { ); }); + it('Add single comments in AST', () => { + const ast = parse(dedent` + #This comment has a \u0A0A multi-byte character. + type alpha{ field(arg: string):string } + `); + + expect(toJSONDeep(ast.comments)).to.deep.equal([ + { + kind: 'Comment', + loc: { start: 0, end: 43 }, + value: 'This comment has a ਊ multi-byte character.', + }, + ]); + }); + + it('Ignore comments that comes when we peek for a token in AST', () => { + const ast = parse(dedent` + type #This is a comment that gets ignored + alpha{ field(arg: string):string } + `); + + expect(toJSONDeep(ast.comments)).to.deep.equal([ + { + kind: 'Comment', + loc: { + end: 41, + start: 5, + }, + value: 'This is a comment that gets ignored', + }, + ]); + }); + + it('Add empty comments from in AST', () => { + const ast = parse(dedent` + # + type alpha{ field(arg: string):string } + `); + + expect(toJSONDeep(ast.comments)).to.deep.equal([ + { + kind: 'Comment', + loc: { start: 0, end: 1 }, + value: '', + }, + ]); + }); + + it('Add multiple comments in AST', () => { + const ast = parse(dedent` + #This is top comment + type alpha{ + #This comment is demo comment. + field(arg: string):string + #This is another demo comment having # inside + } + `); + + expect(toJSONDeep(ast.comments)).to.deep.equal([ + { + kind: 'Comment', + loc: { start: 0, end: 20 }, + value: 'This is top comment', + }, + { + kind: 'Comment', + loc: { start: 35, end: 65 }, + value: 'This comment is demo comment.', + }, + { + kind: 'Comment', + loc: { start: 97, end: 142 }, + value: 'This is another demo comment having # inside', + }, + ]); + }); + it('parses kitchen sink', () => { expect(() => parse(kitchenSinkQuery)).to.not.throw(); }); @@ -231,6 +308,7 @@ describe('Parser', () => { expect(toJSONDeep(result)).to.deep.equal({ kind: Kind.DOCUMENT, + comments: [], loc: { start: 0, end: 41 }, definitions: [ { @@ -321,6 +399,7 @@ describe('Parser', () => { expect(toJSONDeep(result)).to.deep.equal({ kind: Kind.DOCUMENT, + comments: [], loc: { start: 0, end: 30 }, definitions: [ { diff --git a/src/language/__tests__/schema-parser-test.js b/src/language/__tests__/schema-parser-test.js index a9577d12da..d46a7076ca 100644 --- a/src/language/__tests__/schema-parser-test.js +++ b/src/language/__tests__/schema-parser-test.js @@ -79,6 +79,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeDefinition', @@ -176,6 +177,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeExtension', @@ -201,6 +203,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeExtension', @@ -219,6 +222,7 @@ describe('Schema Parser', () => { const doc = parse('extend interface Hello implements Greeting'); expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'InterfaceTypeExtension', @@ -242,6 +246,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeExtension', @@ -304,6 +309,7 @@ describe('Schema Parser', () => { `); expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'InterfaceTypeExtension', @@ -376,6 +382,7 @@ describe('Schema Parser', () => { const doc = parse(body); expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'SchemaExtension', @@ -400,6 +407,7 @@ describe('Schema Parser', () => { const doc = parse(body); expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'SchemaExtension', @@ -442,6 +450,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeDefinition', @@ -471,6 +480,7 @@ describe('Schema Parser', () => { const doc = parse('interface Hello implements World { field: String }'); expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'InterfaceTypeDefinition', @@ -497,6 +507,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeDefinition', @@ -523,6 +534,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeDefinition', @@ -551,6 +563,7 @@ describe('Schema Parser', () => { const doc = parse('interface Hello implements Wo & rld { field: String }'); expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'InterfaceTypeDefinition', @@ -580,6 +593,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeDefinition', @@ -610,6 +624,7 @@ describe('Schema Parser', () => { ); expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'InterfaceTypeDefinition', @@ -639,6 +654,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'EnumTypeDefinition', @@ -658,6 +674,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'EnumTypeDefinition', @@ -684,6 +701,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'InterfaceTypeDefinition', @@ -714,6 +732,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeDefinition', @@ -752,6 +771,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeDefinition', @@ -794,6 +814,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeDefinition', @@ -836,6 +857,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ObjectTypeDefinition', @@ -876,6 +898,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'UnionTypeDefinition', @@ -895,6 +918,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'UnionTypeDefinition', @@ -917,6 +941,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'UnionTypeDefinition', @@ -967,6 +992,7 @@ describe('Schema Parser', () => { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'ScalarTypeDefinition', @@ -988,6 +1014,7 @@ input Hello { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'InputObjectTypeDefinition', @@ -1026,6 +1053,7 @@ input Hello { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'DirectiveDefinition', @@ -1062,6 +1090,7 @@ input Hello { expect(toJSONDeep(doc)).to.deep.equal({ kind: 'Document', + comments: [], definitions: [ { kind: 'DirectiveDefinition', diff --git a/src/language/__tests__/visitor-test.js b/src/language/__tests__/visitor-test.js index c4bcf1d687..d51df4f518 100644 --- a/src/language/__tests__/visitor-test.js +++ b/src/language/__tests__/visitor-test.js @@ -4,6 +4,7 @@ import { expect } from 'chai'; import { describe, it } from 'mocha'; import invariant from '../../jsutils/invariant'; +import dedent from '../../jsutils/dedent'; import { Kind } from '../kinds'; import { parse } from '../parser'; @@ -115,6 +116,43 @@ describe('Visitor', () => { }); }); + it('allows visiting multiple comments including comments in definitions', () => { + const ast = parse( + dedent` + #This is a comment + { + #a is a field + a + } + `, + { noLocation: true }, + ); + const visited = []; + + visit(ast, { + enter: { + Comment(node) { + visited.push(['enter', node.kind]); + visited.push(['value', node.value]); + }, + }, + leave: { + Comment(node) { + visited.push(['leave', node.kind]); + }, + }, + }); + + expect(visited).to.deep.equal([ + ['enter', 'Comment'], + ['value', 'This is a comment'], + ['leave', 'Comment'], + ['enter', 'Comment'], + ['value', 'a is a field'], + ['leave', 'Comment'], + ]); + }); + it('allows visiting only specified nodes', () => { const ast = parse('{ a }', { noLocation: true }); const visited = []; diff --git a/src/language/ast.d.ts b/src/language/ast.d.ts index 576db47b6c..5090239519 100644 --- a/src/language/ast.d.ts +++ b/src/language/ast.d.ts @@ -99,6 +99,7 @@ export function isNode(maybeNode: any): maybeNode is ASTNode; export type ASTNode = | NameNode | DocumentNode + | CommentNode | OperationDefinitionNode | VariableDefinitionNode | VariableNode @@ -147,6 +148,7 @@ export type ASTNode = export interface ASTKindToNode { Name: NameNode; Document: DocumentNode; + Comment: CommentNode; OperationDefinition: OperationDefinitionNode; VariableDefinition: VariableDefinitionNode; Variable: VariableNode; @@ -204,6 +206,13 @@ export interface DocumentNode { readonly kind: 'Document'; readonly loc?: Location; readonly definitions: ReadonlyArray; + readonly comments?: ReadonlyArray; +} + +export interface CommentNode { + readonly kind: 'Comment'; + readonly loc?: Location; + readonly value: string; } export type DefinitionNode = diff --git a/src/language/ast.js b/src/language/ast.js index 9d5df64d86..1be86ff003 100644 --- a/src/language/ast.js +++ b/src/language/ast.js @@ -135,6 +135,7 @@ export function isNode(maybeNode: mixed): boolean %checks { export type ASTNode = | NameNode | DocumentNode + | CommentNode | OperationDefinitionNode | VariableDefinitionNode | VariableNode @@ -183,6 +184,7 @@ export type ASTNode = export type ASTKindToNode = {| Name: NameNode, Document: DocumentNode, + Comment: CommentNode, OperationDefinition: OperationDefinitionNode, VariableDefinition: VariableDefinitionNode, Variable: VariableNode, @@ -240,6 +242,7 @@ export type DocumentNode = {| +kind: 'Document', +loc?: Location, +definitions: $ReadOnlyArray, + +comments?: $ReadOnlyArray, |}; export type DefinitionNode = @@ -247,6 +250,12 @@ export type DefinitionNode = | TypeSystemDefinitionNode | TypeSystemExtensionNode; +export type CommentNode = {| + +kind: 'Comment', + loc?: Location, + +value: string, +|}; + export type ExecutableDefinitionNode = | OperationDefinitionNode | FragmentDefinitionNode; diff --git a/src/language/lexer.js b/src/language/lexer.js index b6ab501308..1e632369f4 100644 --- a/src/language/lexer.js +++ b/src/language/lexer.js @@ -49,11 +49,14 @@ export class Lexer { } /** - * Advances the token stream to the next non-ignored token. + * Advances the token stream to the next token. Adds last non-ignored + * token as previous pointer to the linked list */ advance(): Token { - this.lastToken = this.token; - const token = (this.token = this.lookahead()); + if (this.token.kind !== TokenKind.COMMENT) { + this.lastToken = this.token; + } + const token = (this.token = this.lookahead(false)); return token; } @@ -61,13 +64,21 @@ export class Lexer { * Looks ahead and returns the next non-ignored token, but does not change * the state of Lexer. */ - lookahead(): Token { + lookahead(ignoreComments: boolean = true): Token { let token = this.token; if (token.kind !== TokenKind.EOF) { + let endLoop = false; do { // Note: next is only mutable during parsing, so we cast to allow this. token = token.next ?? ((token: any).next = readToken(this, token)); - } while (token.kind === TokenKind.COMMENT); + + // If comments are not ignored then: + // return whatever token was found (comment/no-comment) + // else loop till next valid token(no-comment) is not found + if (!ignoreComments || token.kind !== TokenKind.COMMENT) { + endLoop = true; + } + } while (!endLoop); } return token; } diff --git a/src/language/parser.js b/src/language/parser.js index 2fdcdc6ab6..eb8fdfdc1b 100644 --- a/src/language/parser.js +++ b/src/language/parser.js @@ -17,6 +17,7 @@ import { type NameNode, type VariableNode, type DocumentNode, + type CommentNode, type DefinitionNode, type OperationDefinitionNode, type OperationTypeNode, @@ -166,7 +167,7 @@ export function parseType( class Parser { _options: ?ParseOptions; _lexer: Lexer; - + _comments: Array; constructor(source: string | Source, options?: ParseOptions) { const sourceObj = typeof source === 'string' ? new Source(source) : source; devAssert( @@ -176,6 +177,23 @@ class Parser { this._lexer = new Lexer(sourceObj); this._options = options; + this._comments = []; + } + + advance(): Token { + let token = this._lexer.advance(); + while (token.kind === TokenKind.COMMENT) { + this._lexer.advance(); + this._comments.push({ + kind: TokenKind.COMMENT, + // Since we don't store comment nodes in linked list, we use Location constructor + // instead of this.loc + loc: new Location(token, token, this._lexer.source), + value: token.value || '', + }); + token = this._lexer.token; + } + return token; } /** @@ -197,7 +215,7 @@ class Parser { */ parseDocument(): DocumentNode { const start = this._lexer.token; - return { + const { kind, definitions, loc } = { kind: Kind.DOCUMENT, definitions: this.many( TokenKind.SOF, @@ -206,6 +224,8 @@ class Parser { ), loc: this.loc(start), }; + const comments = this._comments; + return { kind, definitions, loc, comments }; } /** @@ -537,14 +557,14 @@ class Parser { case TokenKind.BRACE_L: return this.parseObject(isConst); case TokenKind.INT: - this._lexer.advance(); + this.advance(); return { kind: Kind.INT, value: ((token.value: any): string), loc: this.loc(token), }; case TokenKind.FLOAT: - this._lexer.advance(); + this.advance(); return { kind: Kind.FLOAT, value: ((token.value: any): string), @@ -554,7 +574,7 @@ class Parser { case TokenKind.BLOCK_STRING: return this.parseStringLiteral(); case TokenKind.NAME: - this._lexer.advance(); + this.advance(); switch (token.value) { case 'true': return { kind: Kind.BOOLEAN, value: true, loc: this.loc(token) }; @@ -580,7 +600,7 @@ class Parser { parseStringLiteral(): StringValueNode { const token = this._lexer.token; - this._lexer.advance(); + this.advance(); return { kind: Kind.STRING, value: ((token.value: any): string), @@ -878,8 +898,8 @@ class Parser { this.peek(TokenKind.BRACE_L) && this._lexer.lookahead().kind === TokenKind.BRACE_R ) { - this._lexer.advance(); - this._lexer.advance(); + this.advance(); + this.advance(); return []; } return this.optionalMany( @@ -1418,7 +1438,7 @@ class Parser { expectToken(kind: TokenKindEnum): Token { const token = this._lexer.token; if (token.kind === kind) { - this._lexer.advance(); + this.advance(); return token; } @@ -1436,7 +1456,7 @@ class Parser { expectOptionalToken(kind: TokenKindEnum): ?Token { const token = this._lexer.token; if (token.kind === kind) { - this._lexer.advance(); + this.advance(); return token; } return undefined; @@ -1449,7 +1469,7 @@ class Parser { expectKeyword(value: string) { const token = this._lexer.token; if (token.kind === TokenKind.NAME && token.value === value) { - this._lexer.advance(); + this.advance(); } else { throw syntaxError( this._lexer.source, @@ -1466,7 +1486,7 @@ class Parser { expectOptionalKeyword(value: string): boolean { const token = this._lexer.token; if (token.kind === TokenKind.NAME && token.value === value) { - this._lexer.advance(); + this.advance(); return true; } return false; diff --git a/src/language/visitor.js b/src/language/visitor.js index e310ecdefc..e1056e642d 100644 --- a/src/language/visitor.js +++ b/src/language/visitor.js @@ -51,13 +51,14 @@ export type VisitorKeyMap = $ObjMap< export const QueryDocumentKeys: VisitorKeyMap = { Name: [], - Document: ['definitions'], + Document: ['definitions', 'comments'], OperationDefinition: [ 'name', 'variableDefinitions', 'directives', 'selectionSet', ], + Comment: [], VariableDefinition: ['variable', 'type', 'defaultValue', 'directives'], Variable: ['name'], SelectionSet: ['selections'], diff --git a/src/utilities/stripIgnoredCharacters.js b/src/utilities/stripIgnoredCharacters.js index 2b2036b109..0d5af2d311 100644 --- a/src/utilities/stripIgnoredCharacters.js +++ b/src/utilities/stripIgnoredCharacters.js @@ -76,6 +76,10 @@ export function stripIgnoredCharacters(source: string | Source): string { let wasLastAddedTokenNonPunctuator = false; while (lexer.advance().kind !== TokenKind.EOF) { + if (lexer.token.kind === TokenKind.COMMENT) { + continue; + } + const currentToken = lexer.token; const tokenKind = currentToken.kind;