Wiki

Ticket #212: tokenizer-error-repeat.patch

File tokenizer-error-repeat.patch, 3.5 KB (added by hopscc, 14 years ago)
  • Source/Parser.cobra

     
    33    var _token as IToken? 
    44    var _fileName as String 
    55    var _lineNum as int 
     6    var tokenizerError = false 
    67 
    78    cue init(token as IToken, message as String) 
    89        base.init(message) 
     
    2324    get hasSourceSite as bool is override 
    2425        return true 
    2526 
     27    def consoleString as String is override 
     28        if .tokenizerError  
     29            #Tokeniser errors already have source info embedded in msg so just use that 
     30            return .message 
     31        return base.consoleString 
     32             
    2633    get fileName from var is override 
    2734 
    2835    get lineNum from var is override 
     
    196203        err = _makeError(token, msg) 
    197204        _errorRecorder.recordError(err) 
    198205        return err 
     206     
     207    def recordTokenError(token as IToken?, msg as String) as ParserException 
     208        """ 
     209        Errors from Tokeniser need to be flagged to be treated slightly differently 
     210        since they already have the source info in their msg. 
     211        """ 
     212        err = .recordError(token, msg) 
     213        err.tokenizerError = true 
     214        return err 
    199215         
    200216    def throwError(msg as String) 
    201217        .throwError(.last, msg) 
  • Source/CobraParser.cobra

     
    204204        Sets up for parsing, but does not invoke `parseTokens`. 
    205205        Used by `parseSource` and various test sections. 
    206206        Returns true if successful, false if there were one or more errors. 
    207         Upon success, you can use token methods lik .grab. 
     207        Upon success, you can use token methods like .grab. 
    208208        Does not .throwError but may .recordError. 
    209209        """ 
    210210        _fileName = fileName 
     
    233233                else 
    234234                    _tokens.add(tokens[i]) 
    235235        catch te as TokenizerError 
    236             .recordError(te.token, te.message) 
     236            .recordTokenError(te.token, te.message) 
    237237            return false 
    238238 
    239239        # TODO: fold all occcurrences of this lookup to a helper 
     
    245245 
    246246        if tokenizer.errors.count 
    247247            for error in tokenizer.errors 
    248                 .recordError(error.token, error.message) 
     248                .recordTokenError(error.token, error.message) 
    249249            return false 
    250250 
    251251        _nextTokenIndex = 0 
  • Tests/820-errors/100-lexing-and-parsing/105-tokenizer-error-msg.cobra

     
     1# Test for Tokenizer error giving double print of line no. info: ticket:212 
     2# Error line below used to emit 
     3#  105-tokenizer-error-msg.cobra(8): error: 105-tokenizer-error-msg.cobra(8,1): error: Space-based indentation must be a multiple of 4. 
     4 
     5class TokenizerError 
     6     
     7    # below is wrongly indented only 3 spaces 
     8   def t    #.error. 105-tokenizer-error-msg.cobra(8,1): error: Space-based indentation must be a multiple of 4 
     9        pass 
  • Developer/IntermediateReleaseNotes.text

     
    473473* Fixed: Only the last `test` of a series of tests is included in the compiler's output. 
    474474 
    475475* Fixed: Dynamic binding cannot find `shared` methods.  ticket:208 
     476 
     477* Fixed: File, line number and 'error' repeated in compiler error message.  ticket:212