1
# Yapps 3.0 Runtime (by Kronuz)
3
# This module is needed to run generated parsers.
8
from _scss import Scanner, NoMoreTokens
12
################################################################################
16
class NoMoreTokens(Exception):
18
Another exception object, for when we run out of tokens
22
class Scanner(object):
23
def __init__(self, patterns, ignore, input=None):
25
Patterns is [(terminal,regex)...]
26
Ignore is [terminal,...];
31
# The stored patterns are a pair (compiled regex,source
32
# regex). If the patterns variable passed in to the
33
# constructor is None, we assume that the class already has a
34
# proper .patterns list constructed
35
if patterns is not None:
38
self.patterns.append((k, re.compile(r)))
40
def reset(self, input):
42
self.restrictions = []
48
Print the last 10 tokens that have been scanned in
51
for t in self.tokens[-10:]:
52
output = "%s\n (@%s) %s = %s" % (output, t[0], t[2], repr(t[3]))
55
def _scan(self, restrict):
57
Should scan another token and add it to the list, self.tokens,
58
and add the restriction to self.restrictions
60
# Keep looking for a token, ignoring any in self.ignore
64
# Search the patterns for a match, with earlier
65
# tokens in the list having preference
67
for p, regexp in self.patterns:
68
# First check to see if we're restricting to this token
69
if restrict and p not in restrict and p not in self.ignore:
71
m = regexp.match(self.input, self.pos)
75
best_pat_len = len(m.group(0))
78
# If we didn't find anything, raise an error
82
msg = "Trying to find one of " + ", ".join(restrict)
83
raise SyntaxError("SyntaxError[@ char %s: %s]" % (repr(self.pos), msg))
85
# If we found something that isn't to be ignored, return it
86
if best_pat in self.ignore:
87
# This token should be ignored...
88
self.pos += best_pat_len
90
end_pos = self.pos + best_pat_len
91
# Create a token with this data
96
self.input[self.pos:end_pos]
101
# Only add this token if it's not in the list
102
# (to prevent looping)
103
if not self.tokens or token != self.tokens[-1]:
104
self.tokens.append(token)
105
self.restrictions.append(restrict)
109
def token(self, i, restrict=None):
111
Get the i'th token, and if i is one past the end, then scan
112
for another token; restrict is a list of tokens that
113
are allowed, or 0 for any token.
115
tokens_len = len(self.tokens)
116
if i == tokens_len: # We are at the end, get the next...
117
tokens_len += self._scan(restrict)
119
if restrict and self.restrictions[i] and restrict > self.restrictions[i]:
120
raise NotImplementedError("Unimplemented: restriction set changed")
121
return self.tokens[i]
125
tokens_len = len(self.tokens)
127
token = self.tokens[i]
128
self.tokens = self.tokens[:i]
129
self.restrictions = self.restrictions[:i]
133
class CachedScanner(Scanner):
135
Same as Scanner, but keeps cached tokens for any given input
144
def __init__(self, patterns, ignore, input=None):
146
self._tokens = self._cache_[input]
151
super(CachedScanner, self).__init__(patterns, ignore, input)
153
def reset(self, input):
155
self._tokens = self._cache_[input]
160
super(CachedScanner, self).reset(input)
163
if self._tokens is None:
164
return super(CachedScanner, self).__repr__()
166
for t in self._tokens[-10:]:
167
output = "%s\n (@%s) %s = %s" % (output, t[0], t[2], repr(t[3]))
170
def token(self, i, restrict=None):
171
if self._tokens is None:
172
token = super(CachedScanner, self).token(i, restrict)
173
self.__tokens[i] = token
174
if token[2] in self._goals_: # goal tokens
175
self._cache_[self.__input] = self._tokens = self.__tokens
178
token = self._tokens.get(i)
184
if self._tokens is None:
185
super(CachedScanner, self).rewind(i)
188
class Parser(object):
189
def __init__(self, scanner):
190
self._scanner = scanner
193
def reset(self, input):
194
self._scanner.reset(input)
197
def _peek(self, types):
199
Returns the token type for lookahead; if there are any args
200
then the list of args is the set of token types to allow
202
tok = self._scanner.token(self._pos, types)
205
def _scan(self, type):
207
Returns the matched text, and moves to the next token
209
tok = self._scanner.token(self._pos, set([type]))
211
raise SyntaxError("SyntaxError[@ char %s: %s]" % (repr(tok[0]), "Trying to find " + type))
215
def _rewind(self, n=1):
216
self._pos -= min(n, self._pos)
217
self._scanner.rewind(self._pos)
220
################################################################################
223
def print_error(input, err, scanner):
224
"""This is a really dumb long function to print error messages nicely."""
226
# Figure out the line number
227
line = input[:p].count('\n')
228
print err.msg + " on line " + repr(line + 1) + ":"
229
# Now try printing part of the line
230
text = input[max(p - 80, 0):
232
p = p - max(p - 80, 0)
235
i = text[:p].rfind('\n')
236
j = text[:p].rfind('\r')
237
if i < 0 or (0 <= j < i):
244
i = text.find('\n', p)
245
j = text.find('\r', p)
246
if i < 0 or (0 <= j < i):
251
# Now shorten the text
252
while len(text) > 70 and p > 60:
254
text = "..." + text[10:]
257
# Now print the string, along with an indicator
259
print '> ', ' ' * p + '^'
260
print 'List of nearby tokens:', scanner
263
def wrap_error_reporter(parser, rule, *args):
265
return getattr(parser, rule)(*args)
266
except SyntaxError, s:
267
input = parser._scanner.input
269
print_error(input, s, parser._scanner)
272
print "Syntax Error %s on line %d" % (s.msg, input[:s.pos].count('\n') + 1)
274
print "Could not complete parsing; stopped around here:"
275
print parser._scanner