Ticket #211: implicit-line-cont-preparser.patch
File implicit-line-cont-preparser.patch, 5.8 KB (added by hopscc, 14 years ago) |
---|
-
Source/CobraParser.cobra
183 183 The fileName is recorded, but the file is not physically accessed. 184 184 After parsing, .references will hold a list of any @ref directive references that were parsed. 185 185 """ 186 if not _ preParseSource(fileName, source)186 if not _tokeniseSource(fileName, source) 187 187 # Throw an exception to stop subsequent compilation phases. 188 188 # Note that this doesn't get recorded as an error message. 189 189 # That only happens through .throwError and .recordError … … 197 197 throw 198 198 if _tokenizer and _tokenizer.errors.count <> 0, _tokenizer = nil 199 199 200 def _ preParseSource(fileName as String, source as String) as bool200 def _tokeniseSource(fileName as String, source as String) as bool 201 201 """ 202 Sets up for parsing, but does not invoke `parseTokens`. 202 Sets up for parsing, 203 Turns source text into a list of tokens in _tokens but does not invoke `parseTokens`. 203 204 Used by `parseSource` and various test sections. 204 205 Returns true if successful, false if there were one or more errors. 205 Upon success, you can use token methods lik .grab.206 Upon success, you can use token methods like .grab. 206 207 Does not .throwError but may .recordError. 207 208 """ 208 209 _fileName = fileName 209 tokVerbosity = _verbosity - 4 # in o rder words, tokenizer does not spew unless our verbosity is 5 or greater210 tokVerbosity = _verbosity - 4 # in other words, tokenizer does not spew unless our verbosity is 5 or greater 210 211 if tokVerbosity < 0, tokVerbosity = 0 211 212 212 213 _isNamesStack = Stack<of String>() # for `shared` for example … … 222 223 try 223 224 tokens = tokenizer.startSource(_fileName, source).allTokens 224 225 _tokens = List<of IToken>(tokens.count) 225 for i = 0 .. tokens.count 226 choppedIndents = 0 227 for i in 0 : tokens.count 226 228 if tokens[i].text == '_' and tokens[i].which == 'ID' 227 229 if i < tokens.count-1 and tokens[i+1].which == 'EOL' 228 230 i += 1 229 231 else 230 232 .recordError(tokens[i], 'Unexpected line continuation character.') 231 else 232 _tokens.add(tokens[i]) 233 continue 234 #handle implicit line continuation tokens 235 if i < tokens.count-1 and tokens[i+1].which == 'EOL' and _isImplicitLineContinuation(tokens, i) 236 _tokens.add(tokens[i]) # add the operator 237 i += 1 # eat the following EOL 238 # remove any now hanging indents 239 while i<tokens.count-1 and tokens[i+1].which == 'INDENT' 240 i += 1 241 choppedIndents += 1 242 continue 243 # correct following dedents for removed hanging indents 244 if choppedIndents >0 and tokens[i].which == 'DEDENT' 245 while choppedIndents >0 and tokens[i].which == 'DEDENT' 246 choppedIndents -= 1 247 i += 1 248 assert choppedIndents == 0 249 i -= 1 250 continue 251 252 #print i, tokens[i] 253 _tokens.add(tokens[i]) 233 254 catch te as TokenizerError 234 255 .recordError(te.token, te.message) 235 256 return false … … 248 269 249 270 _nextTokenIndex = 0 250 271 return true 272 273 var _implicitContinuationOps = [ 274 'ASSIGN', 'AND', 'OR', 'STAR', 'SLASH', 'PLUS', 275 'MINUS','EQ', 'NE', 'LT', 'GT', 'LE', 'GE', 'IN', 'NOTIN', 276 'PLUS_EQUALS', 'MINUS_EQUALS', 'STAR_EQUALS', 'SLASH_EQUALS', 'PERCENT_EQUALS', 277 'SLASHSLASH', 'PERCENT', 'AMPERSAND', 'VERTICAL_BAR', 'CARET', 'DOUBLE_LT', 278 'DOUBLE_GT', 'AMPERSAND_EQUALS', 'VERTICAL_BAR_EQUALS', 'CARET_EQUALS', 279 'DOUBLE_LT_EQUALS', 'DOUBLE_GT_EQUALS', 'STARSTAR', 'STARSTAR_EQUALS' 280 ] 251 281 282 def _isImplicitLineContinuation(tokens as List<of IToken>, i as int) as bool 283 #binary ops that can act as implicit line continuation, order by freq of use 284 if tokens[i].which in _implicitContinuationOps 285 if tokens[i].which == 'GT' or tokens[i].which == 'DOUBLE_GT' # end Generic 286 for j in 1:10 # lookback 20 tokens 287 if i-j == 0 or tokens[i-j].which == 'EOL', break 288 if tokens[i-j].which == 'OPEN_GENERIC' 289 return false 290 if tokens[i].which == 'STAR' # Stream dcl 291 for j in 1:20 # lookback 20 tokens 292 if i-j == 0 or tokens[i-j].which == 'EOL', break 293 if tokens[i-j].which == 'AS' # as ...TYPE...* 294 return false 295 return true 296 return false 297 252 298 def optionalStringLiteral as IToken? 253 299 """ 254 300 Gets a token if it matches one of the string literals, -
Tests/110-basics-two/150-line-continuation/120-implicit-line-cont-ops.cobra
1 # Test for implicit forms of line continuation: round brackets and trailing binary ops 2 # ticket:211 (partially also addresses ticket 210) 3 class Program 4 5 def main 6 assert 1 + 2 == 3 # ok 7 8 assert (1 + # braces ok 9 2) == 3 10 assert 1 * # implicit trailing '*', 0 indent 11 2 == 2 12 13 assert 1 + # implicit trailing '+', 1 indent 14 2 == 3 15 #assert (1 + # Ticket 210: Cannot mix tabs and spaces in indentation. 16 # 2) 17 #assert 1 + # Both tickets. 18 # 2 == 3 19 20 assert (1 + # braces, double indent 21 12) == 13 22 assert 1 + # trailing '+', double indent after 23 2 == 3 24 assert 3 - # trailing '-', double indent after 25 2 == 1 26 27 assert 3 * # ((3*1) + 4) -(8/2) # multiple lines 28 1 + 29 4 - 30 8 / 31 2 == 3 32 33 a = 1 34 b = 2 35 assert (a == 1 and 36 b == 2) 37 assert a == 1 and #implicit on 'and' 38 b == 2 39 assert a == 1 or # implicit on or 40 b == 2 41 42 #assert a == 1 and # implicit on and and tabs+spaces 43 # b == 2 44 45