3
# WARNING! Do not edit! http://waf.googlecode.com/svn/docs/wafbook/single.html#_obtaining_the_waf_file
6
if sys.hexversion < 0x020400f0: from sets import Set as set
7
import re,sys,os,string,traceback
8
from waflib import Logs,Build,Utils,Errors
9
from waflib.Logs import debug,error
10
class PreprocError(Errors.WafError):
15
standard_includes=['/usr/include']
20
g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',}
21
re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
22
re_mac=re.compile("^[a-zA-Z_]\w*")
23
re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
24
re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE)
25
re_nl=re.compile('\\\\\r*\n',re.MULTILINE)
26
re_cpp=re.compile(r"""(/\*[^*]*\*+([^/*][^*]*\*+)*/)|//[^\n]*|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^/"'\\]*)""",re.MULTILINE)
27
trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')]
28
chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39}
34
tok_types=[NUM,STR,IDENT,OP]
35
exp_types=[r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',]
36
re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M)
46
def filter_comments(filename):
47
code=Utils.readf(filename)
49
for(a,b)in trig_def:code=code.split(a).join(b)
50
code=re_nl.sub('',code)
51
code=re_cpp.sub(repl,code)
52
return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)]
54
ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',',']
55
for x in range(len(ops)):
57
for u in syms.split():
62
if s[0]=="'"and s[-1]=="'":return s[1:-1]
64
def reduce_nums(val_1,val_2,val_op):
66
except TypeError:a=int(val_1)
68
except TypeError:b=int(val_2)
77
elif d=='||':c=int(a or b)
79
elif d=='&&':c=int(a and b)
80
elif d=='==':c=int(a==b)
81
elif d=='!=':c=int(a!=b)
82
elif d=='<=':c=int(a<=b)
83
elif d=='<':c=int(a<b)
84
elif d=='>':c=int(a>b)
85
elif d=='>=':c=int(a>=b)
86
elif d=='^':c=int(a^b)
92
if not lst:raise PreprocError("empty list for get_num")
109
raise PreprocError("rparen expected %r"%lst)
110
(num,_)=get_term(lst[1:i])
111
return(num,lst[i+1:])
113
return get_num(lst[1:])
115
num,lst=get_num(lst[1:])
116
return(reduce_nums('-1',num,'*'),lst)
118
num,lst=get_num(lst[1:])
119
return(int(not int(num)),lst)
121
return(~int(num),lst)
123
raise PreprocError("Invalid op token %r for get_num"%lst)
129
raise PreprocError("Invalid token %r for get_num"%lst)
131
if not lst:raise PreprocError("empty list for get_term")
137
if v=='&&'and not num:
142
return get_term(lst[1:])
158
raise PreprocError("rparen expected %r"%lst)
160
return get_term(lst[1:i])
162
return get_term(lst[i+1:])
164
num2,lst=get_num(lst[1:])
166
num2=reduce_nums(num,num2,v)
167
return get_term([(NUM,num2)]+lst)
170
raise PreprocError("op expected %r"%lst)
171
if prec[v2]>=prec[v]:
172
num2=reduce_nums(num,num2,v)
173
return get_term([(NUM,num2)]+lst)
175
num3,lst=get_num(lst[1:])
176
num3=reduce_nums(num2,num3,v2)
177
return get_term([(NUM,num),(p,v),(NUM,num3)]+lst)
178
raise PreprocError("cannot reduce %r"%lst)
179
def reduce_eval(lst):
180
num,lst=get_term(lst)
183
lst=[str(v2)for(p2,v2)in lst]
185
def paste_tokens(t1,t2):
187
if t1[0]==OP and t2[0]==OP:
189
elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM):
191
elif t1[0]==NUM and t2[0]==NUM:
194
raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2))
195
return(p1,t1[1]+t2[1])
196
def reduce_tokens(lst,defs,ban=[]):
200
if p==IDENT and v=="defined":
209
elif p2==OP and v2=='(':
218
raise PreprocError("Invalid define expression %r"%lst)
219
elif p==IDENT and v in defs:
220
if isinstance(defs[v],str):
221
a,b=extract_macro(defs[v])
225
if isinstance(macro_def[0],list):
227
for x in range(len(to_add)):
228
lst.insert(i,to_add[x])
234
raise PreprocError("expected '(' after %r (got nothing)"%v)
236
if p2!=OP or v2!='(':
237
raise PreprocError("expected '(' after %r"%v)
244
if p2==OP and count_paren==0:
246
one_param.append((p2,v2))
249
if one_param:args.append(one_param)
252
if not one_param:raise PreprocError("empty param in funcall %s"%p)
253
args.append(one_param)
256
one_param.append((p2,v2))
258
one_param.append((p2,v2))
259
if v2=='(':count_paren+=1
260
elif v2==')':count_paren-=1
262
raise PreprocError('malformed macro')
264
arg_table=macro_def[0]
268
if p2==OP and v2=='#':
269
if j+1<len(to_add)and to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
270
toks=args[arg_table[to_add[j+1][1]]]
271
accu.append((STR,stringize(toks)))
275
elif p2==OP and v2=='##':
276
if accu and j+1<len(to_add):
278
if to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
279
toks=args[arg_table[to_add[j+1][1]]]
281
accu[-1]=paste_tokens(t1,toks[0])
282
accu.extend(toks[1:])
286
elif to_add[j+1][0]==IDENT and to_add[j+1][1]=='__VA_ARGS__':
290
for x in args[pt-st+1:]:
292
va_toks.append((OP,','))
293
if va_toks:va_toks.pop()
303
accu[-1]=paste_tokens(t1,to_add[j+1])
307
elif p2==IDENT and v2 in arg_table:
308
toks=args[arg_table[v2]]
309
reduce_tokens(toks,defs,ban+[v])
314
reduce_tokens(accu,defs,ban+[v])
315
for x in range(len(accu)-1,-1,-1):
316
lst.insert(i,accu[x])
318
def eval_macro(lst,defs):
319
reduce_tokens(lst,defs,[])
320
if not lst:raise PreprocError("missing tokens to evaluate")
321
(p,v)=reduce_eval(lst)
323
def extract_macro(txt):
325
if re_fun.search(txt):
328
if p!=OP:raise PreprocError("expected open parenthesis")
341
elif p==OP and v==')':
344
raise PreprocError("unexpected token (3)")
348
elif p==OP and v==')':
351
raise PreprocError("comma or ... expected")
357
elif p==OP and v=='...':
358
raise PreprocError("not implemented (1)")
360
raise PreprocError("comma or ... expected (2)")
362
raise PreprocError("not implemented (2)")
364
raise PreprocError("unexpected else")
365
return(name,[params,t[i+1:]])
369
re_include=re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
370
def extract_include(txt,defs):
371
m=re_include.search(txt)
373
if m.group('a'):return'<',m.group('a')
374
if m.group('b'):return'"',m.group('b')
376
reduce_tokens(toks,defs,['waf_include'])
378
raise PreprocError("could not parse include %s"%txt)
383
if toks[0][1]=='<'and toks[-1][1]=='>':
384
return stringize(toks).lstrip('<').rstrip('>')
385
raise PreprocError("could not parse include %s."%txt)
387
if not txt:raise PreprocError("attempted to parse a null char")
392
if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16)
393
return int(txt[2:],16)
395
if c=='0'and len(txt)==2:return 0
397
if len(txt)>i and txt[1:1+i].isdigit():
398
return(1+i,int(txt[1:1+i],8))
400
try:return chr_esc[c]
401
except KeyError:raise PreprocError("could not parse char literal '%s'"%txt)
404
for match in re_clexer.finditer(s):
406
for name in tok_types:
410
try:v=g_optrans[v];name=OP
412
if v.lower()=="true":
415
elif v.lower()=="false":
419
if m('oct'):v=int(v,8)
420
elif m('hex'):v=int(m('hex'),16)
421
elif m('n0'):v=m('n0')
425
else:v=m('n2')or m('n4')
428
elif v=='%:%:':v='##'
434
def define_name(line):
435
return re_mac.match(line).group(0)
436
class c_parser(object):
437
def __init__(self,nodepaths=None,defines=None):
442
self.defs=dict(defines)
445
self.currentnode_stack=[]
446
self.nodepaths=nodepaths or[]
450
self.ban_includes=set([])
451
def cached_find_resource(self,node,filename):
455
nd=node.ctx.cache_nd={}
460
ret=node.find_resource(filename)
462
if getattr(ret,'children',None):
464
elif ret.is_child_of(node.ctx.bldnode):
465
tmp=node.ctx.srcnode.search(ret.path_from(node.ctx.bldnode))
466
if tmp and getattr(tmp,'children',None):
470
def tryfind(self,filename):
471
self.curfile=filename
472
found=self.cached_find_resource(self.currentnode_stack[-1],filename)
473
for n in self.nodepaths:
476
found=self.cached_find_resource(n,filename)
478
self.nodes.append(found)
479
if filename[-4:]!='.moc':
482
if not filename in self.names:
483
self.names.append(filename)
485
def addlines(self,node):
486
self.currentnode_stack.append(node.parent)
487
filepath=node.abspath()
489
if self.count_files>recursion_limit:
490
raise PreprocError("recursion limit exceeded")
492
debug('preproc: reading file %r',filepath)
498
self.lines.extend(lns)
501
lines=filter_comments(filepath)
502
lines.append((POPFILE,''))
505
self.lines.extend(lines)
507
raise PreprocError("could not read the file %s"%filepath)
510
error("parsing %s failed"%filepath)
511
traceback.print_exc()
512
def start(self,node,env):
513
debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
516
self.parse_cache=bld.parse_cache
517
except AttributeError:
519
self.parse_cache=bld.parse_cache
523
lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]]
525
self.lines.extend([('define',x)for x in lst])
526
except AttributeError:
529
(token,line)=self.lines.pop()
532
self.currentnode_stack.pop()
536
if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state)
539
state.append(undefined)
543
if skipped in self.state or ignored in self.state:
546
ret=eval_macro(tokenize(line),self.defs)
547
if ret:state[-1]=accepted
548
else:state[-1]=ignored
551
if m and m.group(0)in self.defs:state[-1]=accepted
552
else:state[-1]=ignored
553
elif token=='ifndef':
555
if m and m.group(0)in self.defs:state[-1]=ignored
556
else:state[-1]=accepted
557
elif token=='include'or token=='import':
558
(kind,inc)=extract_include(line,self.defs)
559
if inc in self.ban_includes:
561
if token=='import':self.ban_includes.add(inc)
562
if ve:debug('preproc: include found %s (%s) ',inc,kind)
563
if kind=='"'or not strict_quotes:
566
if state[-1]==accepted:
568
elif state[-1]==ignored:
569
if eval_macro(tokenize(line),self.defs):
572
if state[-1]==accepted:state[-1]=skipped
573
elif state[-1]==ignored:state[-1]=accepted
574
elif token=='define':
576
self.defs[define_name(line)]=line
578
raise PreprocError("Invalid define line %s"%line)
581
if m and m.group(0)in self.defs:
582
self.defs.__delitem__(m.group(0))
583
elif token=='pragma':
584
if re_pragma_once.match(line.lower()):
585
self.ban_includes.add(self.curfile)
588
debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack())
592
incn=task.generator.includes_nodes
593
except AttributeError:
594
raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator)
598
nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)]
599
tmp=c_parser(nodepaths)
600
tmp.start(task.inputs[0],task.env)
602
debug('deps: deps for %r: %r; unresolved %r'%(task.inputs,tmp.nodes,tmp.names))
603
return(tmp.nodes,tmp.names)
605
Utils.run_once(tokenize)
606
Utils.run_once(define_name)
b'\\ No newline at end of file'