var TYPE = require('../tokenizer').TYPE; 
 | 
var WHITESPACE = TYPE.WhiteSpace; 
 | 
var COMMENT = TYPE.Comment; 
 | 
  
 | 
module.exports = function readSequence(recognizer) { 
 | 
    var children = this.createList(); 
 | 
    var child = null; 
 | 
    var context = { 
 | 
        recognizer: recognizer, 
 | 
        space: null, 
 | 
        ignoreWS: false, 
 | 
        ignoreWSAfter: false 
 | 
    }; 
 | 
  
 | 
    this.scanner.skipSC(); 
 | 
  
 | 
    while (!this.scanner.eof) { 
 | 
        switch (this.scanner.tokenType) { 
 | 
            case COMMENT: 
 | 
                this.scanner.next(); 
 | 
                continue; 
 | 
  
 | 
            case WHITESPACE: 
 | 
                if (context.ignoreWS) { 
 | 
                    this.scanner.next(); 
 | 
                } else { 
 | 
                    context.space = this.WhiteSpace(); 
 | 
                } 
 | 
                continue; 
 | 
        } 
 | 
  
 | 
        child = recognizer.getNode.call(this, context); 
 | 
  
 | 
        if (child === undefined) { 
 | 
            break; 
 | 
        } 
 | 
  
 | 
        if (context.space !== null) { 
 | 
            children.push(context.space); 
 | 
            context.space = null; 
 | 
        } 
 | 
  
 | 
        children.push(child); 
 | 
  
 | 
        if (context.ignoreWSAfter) { 
 | 
            context.ignoreWSAfter = false; 
 | 
            context.ignoreWS = true; 
 | 
        } else { 
 | 
            context.ignoreWS = false; 
 | 
        } 
 | 
    } 
 | 
  
 | 
    return children; 
 | 
}; 
 |