4
import re,sys,os,string
5
import Logs,Build,Utils
6
from Logs import debug,error
8
class PreprocError(Utils.WafError):
13
standard_includes=['/usr/include']
14
if sys.platform=="win32":
17
'apply the trigraph rules first'
19
g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',}
20
re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
21
re_mac=re.compile("^[a-zA-Z_]\w*")
22
re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
23
re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE)
24
re_nl=re.compile('\\\\\r*\n',re.MULTILINE)
25
re_cpp=re.compile(r"""(/\*[^*]*\*+([^/*][^*]*\*+)*/)|//[^\n]*|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^/"'\\]*)""",re.MULTILINE)
26
trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')]
27
chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39}
33
tok_types=[NUM,STR,IDENT,OP]
34
exp_types=[r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',]
35
re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M)
42
if s is not None:return' '
46
def filter_comments(filename):
47
code=Utils.readf(filename)
49
for(a,b)in trig_def:code=code.split(a).join(b)
50
code=re_nl.sub('',code)
51
code=re_cpp.sub(repl,code)
52
return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)]
54
ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',',']
55
for x in range(len(ops)):
57
for u in syms.split():
59
def reduce_nums(val_1,val_2,val_op):
61
except TypeError:a=int(val_1)
63
except TypeError:b=int(val_2)
72
elif d=='||':c=int(a or b)
74
elif d=='&&':c=int(a and b)
75
elif d=='==':c=int(a==b)
76
elif d=='!=':c=int(a!=b)
77
elif d=='<=':c=int(a<=b)
78
elif d=='<':c=int(a<b)
79
elif d=='>':c=int(a>b)
80
elif d=='>=':c=int(a>=b)
81
elif d=='^':c=int(a^b)
87
if not lst:raise PreprocError("empty list for get_num")
104
raise PreprocError("rparen expected %r"%lst)
105
(num,_)=get_term(lst[1:i])
106
return(num,lst[i+1:])
108
return get_num(lst[1:])
110
num,lst=get_num(lst[1:])
111
return(reduce_nums('-1',num,'*'),lst)
113
num,lst=get_num(lst[1:])
114
return(int(not int(num)),lst)
116
return(~int(num),lst)
118
raise PreprocError("invalid op token %r for get_num"%lst)
124
raise PreprocError("invalid token %r for get_num"%lst)
126
if not lst:raise PreprocError("empty list for get_term")
132
if v=='&&'and not num:
137
return get_term(lst[1:])
153
raise PreprocError("rparen expected %r"%lst)
155
return get_term(lst[1:i])
157
return get_term(lst[i+1:])
159
num2,lst=get_num(lst[1:])
161
num2=reduce_nums(num,num2,v)
162
return get_term([(NUM,num2)]+lst)
165
raise PreprocError("op expected %r"%lst)
166
if prec[v2]>=prec[v]:
167
num2=reduce_nums(num,num2,v)
168
return get_term([(NUM,num2)]+lst)
170
num3,lst=get_num(lst[1:])
171
num3=reduce_nums(num2,num3,v2)
172
return get_term([(NUM,num),(p,v),(NUM,num3)]+lst)
173
raise PreprocError("cannot reduce %r"%lst)
174
def reduce_eval(lst):
175
num,lst=get_term(lst)
178
lst=[str(v2)for(p2,v2)in lst]
180
def paste_tokens(t1,t2):
182
if t1[0]==OP and t2[0]==OP:
184
elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM):
186
elif t1[0]==NUM and t2[0]==NUM:
189
raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2))
190
return(p1,t1[1]+t2[1])
191
def reduce_tokens(lst,defs,ban=[]):
195
if p==IDENT and v=="defined":
204
elif p2==OP and v2=='(':
213
raise PreprocError("invalid define expression %r"%lst)
214
elif p==IDENT and v in defs:
215
if isinstance(defs[v],str):
216
a,b=extract_macro(defs[v])
220
if isinstance(macro_def[0],list):
222
for x in xrange(len(to_add)):
223
lst.insert(i,to_add[x])
229
raise PreprocError("expected '(' after %r (got nothing)"%v)
231
if p2!=OP or v2!='(':
232
raise PreprocError("expected '(' after %r"%v)
239
if p2==OP and count_paren==0:
241
one_param.append((p2,v2))
244
if one_param:args.append(one_param)
247
if not one_param:raise PreprocError("empty param in funcall %s"%p)
248
args.append(one_param)
251
one_param.append((p2,v2))
253
one_param.append((p2,v2))
254
if v2=='(':count_paren+=1
255
elif v2==')':count_paren-=1
257
raise PreprocError('malformed macro')
259
arg_table=macro_def[0]
263
if p2==OP and v2=='#':
264
if j+1<len(to_add)and to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
265
toks=args[arg_table[to_add[j+1][1]]]
266
accu.append((STR,stringize(toks)))
270
elif p2==OP and v2=='##':
271
if accu and j+1<len(to_add):
273
if to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
274
toks=args[arg_table[to_add[j+1][1]]]
276
accu[-1]=paste_tokens(t1,toks[0])
277
accu.extend(toks[1:])
281
elif to_add[j+1][0]==IDENT and to_add[j+1][1]=='__VA_ARGS__':
285
for x in args[pt-st+1:]:
287
va_toks.append((OP,','))
288
if va_toks:va_toks.pop()
298
accu[-1]=paste_tokens(t1,to_add[j+1])
302
elif p2==IDENT and v2 in arg_table:
303
toks=args[arg_table[v2]]
304
reduce_tokens(toks,defs,ban+[v])
309
reduce_tokens(accu,defs,ban+[v])
310
for x in xrange(len(accu)-1,-1,-1):
311
lst.insert(i,accu[x])
313
def eval_macro(lst,adefs):
314
reduce_tokens(lst,adefs,[])
315
if not lst:raise PreprocError("missing tokens to evaluate")
316
(p,v)=reduce_eval(lst)
318
def extract_macro(txt):
320
if re_fun.search(txt):
323
if p!=OP:raise PreprocError("expected open parenthesis")
336
elif p==OP and v==')':
339
raise PreprocError("unexpected token (3)")
343
elif p==OP and v==')':
346
raise PreprocError("comma or ... expected")
352
elif p==OP and v=='...':
353
raise PreprocError("not implemented (1)")
355
raise PreprocError("comma or ... expected (2)")
357
raise PreprocError("not implemented (2)")
359
raise PreprocError("unexpected else")
360
return(name,[params,t[i+1:]])
364
re_include=re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
365
def extract_include(txt,defs):
366
m=re_include.search(txt)
368
if m.group('a'):return'<',m.group('a')
369
if m.group('b'):return'"',m.group('b')
371
reduce_tokens(toks,defs,['waf_include'])
373
raise PreprocError("could not parse include %s"%txt)
378
if toks[0][1]=='<'and toks[-1][1]=='>':
379
return stringize(toks).lstrip('<').rstrip('>')
380
raise PreprocError("could not parse include %s."%txt)
382
if not txt:raise PreprocError("attempted to parse a null char")
387
if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16)
388
return int(txt[2:],16)
390
if c=='0'and len(txt)==2:return 0
392
if len(txt)>i and txt[1:1+i].isdigit():
393
return(1+i,int(txt[1:1+i],8))
395
try:return chr_esc[c]
396
except KeyError:raise PreprocError("could not parse char literal '%s'"%txt)
399
for match in re_clexer.finditer(s):
401
for name in tok_types:
405
try:v=g_optrans[v];name=OP
407
if v.lower()=="true":
410
elif v.lower()=="false":
414
if m('oct'):v=int(v,8)
415
elif m('hex'):v=int(m('hex'),16)
416
elif m('n0'):v=m('n0')
420
else:v=m('n2')or m('n4')
423
elif v=='%:%:':v='##'
429
class c_parser(object):
430
def __init__(self,nodepaths=None,defines=None):
435
self.defs=dict(defines)
439
self.currentnode_stack=[]
440
self.nodepaths=nodepaths or[]
445
def tryfind(self,filename):
446
self.curfile=filename
447
found=self.currentnode_stack[-1].find_resource(filename)
448
for n in self.nodepaths:
451
found=n.find_resource(filename)
453
if not filename in self.names:
454
self.names.append(filename)
456
self.nodes.append(found)
457
if filename[-4:]!='.moc':
460
def addlines(self,node):
461
self.currentnode_stack.append(node.parent)
462
filepath=node.abspath(self.env)
464
if self.count_files>recursion_limit:raise PreprocError("recursion limit exceeded")
466
debug('preproc: reading file %r',filepath)
472
self.lines=lns+self.lines
475
lines=filter_comments(filepath)
476
lines.append((POPFILE,''))
478
self.lines=lines+self.lines
480
raise PreprocError("could not read the file %s"%filepath)
483
error("parsing %s failed"%filepath)
484
traceback.print_exc()
485
def start(self,node,env):
486
debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
488
variant=node.variant(env)
489
bld=node.__class__.bld
491
self.parse_cache=bld.parse_cache
492
except AttributeError:
494
self.parse_cache=bld.parse_cache
497
self.lines=[('define',x)for x in env['DEFLINES']]+self.lines
499
(kind,line)=self.lines.pop(0)
501
self.currentnode_stack.pop()
504
self.process_line(kind,line)
507
debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack())
508
def process_line(self,token,line):
510
if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state)
512
if token in['ifdef','ifndef','if']:
513
state.append(undefined)
516
if not token in['else','elif','endif']:
517
if skipped in self.state or ignored in self.state:
520
ret=eval_macro(tokenize(line),self.defs)
521
if ret:state[-1]=accepted
522
else:state[-1]=ignored
524
m=re_mac.search(line)
525
if m and m.group(0)in self.defs:state[-1]=accepted
526
else:state[-1]=ignored
527
elif token=='ifndef':
528
m=re_mac.search(line)
529
if m and m.group(0)in self.defs:state[-1]=ignored
530
else:state[-1]=accepted
531
elif token=='include'or token=='import':
532
(kind,inc)=extract_include(line,self.defs)
533
if inc in self.ban_includes:return
534
if token=='import':self.ban_includes.append(inc)
535
if ve:debug('preproc: include found %s (%s) ',inc,kind)
536
if kind=='"'or not strict_quotes:
539
if state[-1]==accepted:
541
elif state[-1]==ignored:
542
if eval_macro(tokenize(line),self.defs):
545
if state[-1]==accepted:state[-1]=skipped
546
elif state[-1]==ignored:state[-1]=accepted
547
elif token=='define':
548
m=re_mac.search(line)
551
if ve:debug('preproc: define %s %s',name,line)
554
raise PreprocError("invalid define line %s"%line)
556
m=re_mac.search(line)
557
if m and m.group(0)in self.defs:
558
self.defs.__delitem__(m.group(0))
559
elif token=='pragma':
560
if re_pragma_once.search(line.lower()):
561
self.ban_includes.append(self.curfile)
562
def get_deps(node,env,nodepaths=[]):
563
gruik=c_parser(nodepaths)
564
gruik.start(node,env)
565
return(gruik.nodes,gruik.names)
566
re_inc=re.compile('^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
567
def lines_includes(filename):
568
code=Utils.readf(filename)
570
for(a,b)in trig_def:code=code.split(a).join(b)
571
code=re_nl.sub('',code)
572
code=re_cpp.sub(repl,code)
573
return[(m.group(2),m.group(3))for m in re.finditer(re_inc,code)]
574
def get_deps_simple(node,env,nodepaths=[],defines={}):
578
lst=lines_includes(node.abspath(env))
580
(t,filename)=extract_include(line,defines)
581
if filename in names:
583
if filename.endswith('.moc'):
584
names.append(filename)
589
found=n.find_resource(filename)
591
if not filename in names:
592
names.append(filename)
593
elif not found in nodes: