Lines Matching full:token

395     def push(self, token):  argument
396 self.tokens.insert(0, token);
399 print("Last token: ", self.last)
400 print("Token queue: ", self.tokens)
403 def token(self): member in CLexer
624 def error(self, msg, token=-1): argument
629 if token != -1:
630 print("Got token ", token)
634 def debug(self, msg, token=-1): argument
636 if token != -1:
637 print("Got token ", token)
668 def parseComment(self, token): argument
670 self.top_comment = token[1]
671 if self.comment == None or token[1][0] == '*':
672 self.comment = token[1];
674 self.comment = self.comment + token[1]
675 token = self.lexer.token()
683 return token
914 def parsePreproc(self, token): argument
916 print("=> preproc ", token, self.lexer.tokens)
917 name = token[1]
919 token = self.lexer.token()
920 if token == None:
922 if token[0] == 'preproc':
923 self.index_add(token[1], self.filename, not self.is_header,
925 return self.lexer.token()
926 return token
928 token = self.lexer.token()
929 if token == None:
931 if token[0] == 'preproc':
933 name = token[1]
935 token = self.lexer.token()
936 while token != None and token[0] == 'preproc' and \
937 token[1][0] != '#':
938 lst.append(token[1])
939 token = self.lexer.token()
947 return token
1003 token = self.lexer.token()
1004 while token != None and token[0] == 'preproc' and \
1005 token[1][0] != '#':
1006 token = self.lexer.token()
1007 return token
1010 # token acquisition on top of the lexer, it handle internally
1014 def token(self): member in CParser
1017 token = self.lexer.token()
1018 while token != None:
1019 if token[0] == 'comment':
1020 token = self.parseComment(token)
1022 elif token[0] == 'preproc':
1023 token = self.parsePreproc(token)
1025 elif token[0] == "name" and token[1] == "__const":
1026 token = ("name", "const")
1027 return token
1028 elif token[0] == "name" and token[1] == "__attribute":
1029 token = self.lexer.token()
1030 while token != None and token[1] != ";":
1031 token = self.lexer.token()
1032 return token
1033 elif token[0] == "name" and token[1] in ignored_words:
1034 (n, info) = ignored_words[token[1]]
1037 token = self.lexer.token()
1039 token = self.lexer.token()
1043 print("=> ", token)
1044 return token
1050 def parseTypedef(self, token): argument
1051 if token == None:
1053 token = self.parseType(token)
1054 if token == None:
1059 #self.debug("end typedef type", token)
1060 while token != None:
1061 if token[0] == "name":
1062 name = token[1]
1080 token = self.token()
1083 return token
1084 #self.debug("end typedef", token)
1085 if token != None and token[0] == 'sep' and token[1] == ',':
1087 token = self.token()
1088 while token != None and token[0] == "op":
1089 type = type + token[1]
1090 token = self.token()
1091 elif token != None and token[0] == 'sep' and token[1] == ';':
1093 elif token != None and token[0] == 'name':
1097 self.error("parsing typedef: expecting ';'", token)
1098 return token
1099 token = self.token()
1100 return token
1106 def parseBlock(self, token): argument
1107 while token != None:
1108 if token[0] == "sep" and token[1] == "{":
1109 token = self.token()
1110 token = self.parseBlock(token)
1111 elif token[0] == "sep" and token[1] == "}":
1112 token = self.token()
1113 return token
1116 oldtok = token
1117 token = self.token()
1119 if token[0] == "sep" and token[1] == "(":
1122 token = self.token()
1123 elif token[0] == "name":
1124 token = self.token()
1125 if token[0] == "sep" and (token[1] == ";" or
1126 token[1] == "," or token[1] == "="):
1137 token = self.token()
1138 return token
1143 def parseStruct(self, token): argument
1145 #self.debug("start parseStruct", token)
1146 while token != None:
1147 if token[0] == "sep" and token[1] == "{":
1148 token = self.token()
1149 token = self.parseTypeBlock(token)
1150 elif token[0] == "sep" and token[1] == "}":
1152 #self.debug("end parseStruct", token)
1154 token = self.token()
1155 return token
1158 #self.debug("before parseType", token)
1159 token = self.parseType(token)
1160 #self.debug("after parseType", token)
1161 if token != None and token[0] == "name":
1162 fname = token[1]
1163 token = self.token()
1164 if token[0] == "sep" and token[1] == ";":
1165 token = self.token()
1168 self.error("parseStruct: expecting ;", token)
1169 elif token != None and token[0] == "sep" and token[1] == "{":
1170 token = self.token()
1171 token = self.parseTypeBlock(token)
1172 if token != None and token[0] == "name":
1173 token = self.token()
1174 if token != None and token[0] == "sep" and token[1] == ";":
1175 token = self.token()
1177 self.error("parseStruct: expecting ;", token)
1179 self.error("parseStruct: name", token)
1180 token = self.token()
1183 #self.debug("end parseStruct", token)
1185 return token
1190 def parseEnumBlock(self, token): argument
1196 while token != None:
1197 if token[0] == "sep" and token[1] == "{":
1198 token = self.token()
1199 token = self.parseTypeBlock(token)
1200 elif token[0] == "sep" and token[1] == "}":
1206 token = self.token()
1207 return token
1208 elif token[0] == "name":
1214 name = token[1]
1216 token = self.token()
1217 if token[0] == "op" and token[1][0] == "=":
1219 if len(token[1]) > 1:
1220 value = token[1][1:]
1221 token = self.token()
1222 while token[0] != "sep" or (token[1] != ',' and
1223 token[1] != '}'):
1224 value = value + token[1]
1225 token = self.token()
1232 if token[0] == "sep" and token[1] == ",":
1233 token = self.token()
1235 token = self.token()
1236 return token
1242 def parseTypeBlock(self, token): argument
1243 while token != None:
1244 if token[0] == "sep" and token[1] == "{":
1245 token = self.token()
1246 token = self.parseTypeBlock(token)
1247 elif token[0] == "sep" and token[1] == "}":
1248 token = self.token()
1249 return token
1251 token = self.token()
1252 return token
1257 # if inside, the name token is pushed back before returning
1259 def parseType(self, token): argument
1263 if token == None:
1264 return token
1269 while token[0] == "name" and (
1270 token[1] == "const" or \
1271 token[1] == "unsigned" or \
1272 token[1] == "signed"):
1273 if token[1] == "unsigned" or token[1] == "signed":
1276 self.type = token[1]
1278 self.type = self.type + " " + token[1]
1279 token = self.token()
1281 if token[0] == "name" and token[1] in ("char", "short", "int", "long"):
1283 self.type = token[1]
1285 self.type = self.type + " " + token[1]
1290 elif token[0] == "name" and token[1] == "struct":
1292 self.type = token[1]
1294 self.type = self.type + " " + token[1]
1295 token = self.token()
1297 if token[0] == "name":
1298 nametok = token
1299 token = self.token()
1300 if token != None and token[0] == "sep" and token[1] == "{":
1301 token = self.token()
1302 token = self.parseStruct(token)
1303 elif token != None and token[0] == "op" and token[1] == "*":
1305 token = self.token()
1306 while token != None and token[0] == "op" and token[1] == "*":
1308 token = self.token()
1309 if token[0] == "name":
1310 nametok = token
1311 token = self.token()
1313 self.error("struct : expecting name", token)
1314 return token
1315 elif token != None and token[0] == "name" and nametok != None:
1317 return token
1320 self.lexer.push(token)
1321 token = nametok
1322 return token
1324 elif token[0] == "name" and token[1] == "enum":
1326 self.type = token[1]
1328 self.type = self.type + " " + token[1]
1330 token = self.token()
1331 if token != None and token[0] == "sep" and token[1] == "{":
1332 token = self.token()
1333 token = self.parseEnumBlock(token)
1335 self.error("parsing enum: expecting '{'", token)
1337 if token != None and token[0] != "name":
1338 self.lexer.push(token)
1339 token = ("name", "enum")
1341 enum_type = token[1]
1346 return token
1348 elif token[0] == "name":
1350 self.type = token[1]
1352 self.type = self.type + " " + token[1]
1355 token)
1356 return token
1358 token = self.token()
1359 while token != None and (token[0] == "op" or
1360 token[0] == "name" and token[1] == "const"):
1361 self.type = self.type + " " + token[1]
1362 token = self.token()
1367 if token != None and token[0] == "sep" and token[1] == '(':
1368 self.type = self.type + token[1]
1369 token = self.token()
1370 while token != None and token[0] == "op" and token[1] == '*':
1371 self.type = self.type + token[1]
1372 token = self.token()
1373 if token == None or token[0] != "name" :
1374 self.error("parsing function type, name expected", token);
1375 return token
1376 self.type = self.type + token[1]
1377 nametok = token
1378 token = self.token()
1379 if token != None and token[0] == "sep" and token[1] == ')':
1380 self.type = self.type + token[1]
1381 token = self.token()
1382 if token != None and token[0] == "sep" and token[1] == '(':
1383 token = self.token()
1385 token = self.parseSignature(token);
1388 self.error("parsing function type, '(' expected", token);
1389 return token
1391 self.error("parsing function type, ')' expected", token);
1392 return token
1393 self.lexer.push(token)
1394 token = nametok
1395 return token
1400 if token != None and token[0] == "name":
1401 nametok = token
1402 token = self.token()
1403 if token != None and token[0] == "sep" and token[1] == '[':
1405 while token != None and token[0] == "sep" and token[1] == '[':
1406 self.type = self.type + token[1]
1407 token = self.token()
1408 while token != None and token[0] != 'sep' and \
1409 token[1] != ']' and token[1] != ';':
1410 self.type = self.type + token[1]
1411 token = self.token()
1412 if token != None and token[0] == 'sep' and token[1] == ']':
1413 self.type = self.type + token[1]
1414 token = self.token()
1416 self.error("parsing array type, ']' expected", token);
1417 return token
1418 elif token != None and token[0] == "sep" and token[1] == ':':
1420 token = self.token()
1421 token = self.token()
1422 self.lexer.push(token)
1423 token = nametok
1425 return token
1430 def parseSignature(self, token): argument
1432 if token != None and token[0] == "sep" and token[1] == ')':
1434 token = self.token()
1435 return token
1436 while token != None:
1437 token = self.parseType(token)
1438 if token != None and token[0] == "name":
1439 signature.append((self.type, token[1], None))
1440 token = self.token()
1441 elif token != None and token[0] == "sep" and token[1] == ',':
1442 token = self.token()
1444 elif token != None and token[0] == "sep" and token[1] == ')':
1450 if token != None and token[0] == "sep":
1451 if token[1] == ',':
1452 token = self.token()
1454 elif token[1] == ')':
1455 token = self.token()
1458 return token
1464 def parseGlobal(self, token): argument
1466 if token[1] == 'extern':
1467 token = self.token()
1468 if token == None:
1469 return token
1470 if token[0] == 'string':
1471 if token[1] == 'C':
1472 token = self.token()
1473 if token == None:
1474 return token
1475 if token[0] == 'sep' and token[1] == "{":
1476 token = self.token()
1478 while token != None and (token[0] != 'sep' or
1479 token[1] != "}"):
1480 if token[0] == 'name':
1481 token = self.parseGlobal(token)
1484 "token %s %s unexpected at the top level" % (
1485 token[0], token[1]))
1486 token = self.parseGlobal(token)
1488 token = self.token()
1489 return token
1491 return token
1492 elif token[1] == 'static':
1494 token = self.token()
1495 if token == None or token[0] != 'name':
1496 return token
1498 if token[1] == 'typedef':
1499 token = self.token()
1500 return self.parseTypedef(token)
1502 token = self.parseType(token)
1504 if token == None or token[0] != "name":
1505 return token
1507 self.name = token[1]
1508 token = self.token()
1509 while token != None and (token[0] == "sep" or token[0] == "op"):
1510 if token[0] == "sep":
1511 if token[1] == "[":
1512 type = type + token[1]
1513 token = self.token()
1514 while token != None and (token[0] != "sep" or \
1515 token[1] != ";"):
1516 type = type + token[1]
1517 token = self.token()
1519 if token != None and token[0] == "op" and token[1] == "=":
1523 token = self.token()
1524 if token[0] == 'sep' and token[1] == '{':
1525 token = self.token()
1526 token = self.parseBlock(token)
1528 while token != None and (token[0] != "sep" or \
1529 (token[1] != ';' and token[1] != ',')):
1530 token = self.token()
1531 if token == None or token[0] != "sep" or (token[1] != ';' and
1532 token[1] != ','):
1535 if token != None and token[0] == "sep":
1536 if token[1] == ";":
1545 token = self.token()
1547 elif token[1] == "(":
1548 token = self.token()
1549 token = self.parseSignature(token)
1550 if token == None:
1552 if token[0] == "sep" and token[1] == ";":
1558 token = self.token()
1559 elif token[0] == "sep" and token[1] == "{":
1565 token = self.token()
1566 token = self.parseBlock(token);
1567 elif token[1] == ',':
1572 token = self.token()
1573 while token != None and token[0] == "sep":
1574 type = type + token[1]
1575 token = self.token()
1576 if token != None and token[0] == "name":
1577 self.name = token[1]
1578 token = self.token()
1582 return token
1586 token = self.token()
1587 while token != None:
1588 if token[0] == 'name':
1589 token = self.parseGlobal(token)
1591 self.error("token %s %s unexpected at the top level" % (
1592 token[0], token[1]))
1593 token = self.parseGlobal(token)