3
# Copyright (c) 2009 Google Inc. All rights reserved.
4
# Use of this source code is governed by a BSD-style license that can be
5
# found in the LICENSE file.
7
from compiler.ast import Const
8
from compiler.ast import Dict
9
from compiler.ast import Discard
10
from compiler.ast import List
11
from compiler.ast import Module
12
from compiler.ast import Node
13
from compiler.ast import Stmt
25
# A list of types that are treated as linkable.
26
linkable_types = ['executable', 'shared_library', 'loadable_module']
28
# A list of sections that contain links to other targets.
29
dependency_sections = ['dependencies', 'export_dependent_settings']
31
# base_path_sections is a list of sections defined by GYP that contain
32
# pathnames. The generators can provide more keys, the two lists are merged
33
# into path_sections, but you should call IsPathSection instead of using either
35
base_path_sections = [
47
def IsPathSection(section):
48
# If section ends in one of these characters, it's applied to a section
49
# without the trailing characters. '/' is notably absent from this list,
50
# because there's no way for a regular expression to be treated as a path.
51
while section[-1:] in ('=', '+', '?', '!'):
52
section = section[0:-1]
54
if section in path_sections or \
55
section.endswith('_dir') or section.endswith('_dirs') or \
56
section.endswith('_file') or section.endswith('_files') or \
57
section.endswith('_path') or section.endswith('_paths'):
62
# base_non_configuraiton_keys is a list of key names that belong in the target
63
# itself and should not be propagated into its configurations. It is merged
64
# with a list that can come from the generator to
65
# create non_configuration_keys.
66
base_non_configuration_keys = [
67
# Sections that must exist inside targets and not configurations.
71
'default_configuration',
73
'dependencies_original',
92
# Sections that can be found inside targets or configurations, but that
93
# should not be propagated from targets into their configurations.
96
non_configuration_keys = []
98
# Controls how the generator want the build file paths.
99
absolute_build_file_paths = False
101
# Controls whether or not the generator supports multiple toolsets.
102
multiple_toolsets = False
105
def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
106
"""Return a list of all build files included into build_file_path.
108
The returned list will contain build_file_path as well as all other files
109
that it included, either directly or indirectly. Note that the list may
110
contain files that were included into a conditional section that evaluated
111
to false and was not merged into build_file_path's dict.
113
aux_data is a dict containing a key for each build file or included build
114
file. Those keys provide access to dicts whose "included" keys contain
115
lists of all other files included by the build file.
117
included should be left at its default None value by external callers. It
118
is used for recursion.
120
The returned list will not contain any duplicate entries. Each build file
121
in the list will be relative to the current directory.
127
if build_file_path in included:
130
included.append(build_file_path)
132
for included_build_file in aux_data[build_file_path].get('included', []):
133
GetIncludedBuildFiles(included_build_file, aux_data, included)
138
def CheckedEval(file_contents):
139
"""Return the eval of a gyp file.
141
The gyp file is restricted to dictionaries and lists only, and
142
repeated keys are not allowed.
144
Note that this is slower than eval() is.
147
ast = compiler.parse(file_contents)
148
assert isinstance(ast, Module)
149
c1 = ast.getChildren()
151
assert isinstance(c1[1], Stmt)
152
c2 = c1[1].getChildren()
153
assert isinstance(c2[0], Discard)
154
c3 = c2[0].getChildren()
156
return CheckNode(c3[0], [])
159
def CheckNode(node, keypath):
160
if isinstance(node, Dict):
161
c = node.getChildren()
163
for n in range(0, len(c), 2):
164
assert isinstance(c[n], Const)
165
key = c[n].getChildren()[0]
167
raise KeyError, "Key '" + key + "' repeated at level " + \
168
repr(len(keypath) + 1) + " with key path '" + \
169
'.'.join(keypath) + "'"
170
kp = list(keypath) # Make a copy of the list for descending this node.
172
dict[key] = CheckNode(c[n + 1], kp)
174
elif isinstance(node, List):
175
c = node.getChildren()
177
for index, child in enumerate(c):
178
kp = list(keypath) # Copy list.
179
kp.append(repr(index))
180
children.append(CheckNode(child, kp))
182
elif isinstance(node, Const):
183
return node.getChildren()[0]
185
raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
189
def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
191
if build_file_path in data:
192
return data[build_file_path]
194
if os.path.exists(build_file_path):
195
build_file_contents = open(build_file_path).read()
197
raise Exception("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
199
build_file_data = None
202
build_file_data = CheckedEval(build_file_contents)
204
build_file_data = eval(build_file_contents, {'__builtins__': None},
206
except SyntaxError, e:
207
e.filename = build_file_path
210
gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
213
data[build_file_path] = build_file_data
214
aux_data[build_file_path] = {}
216
# Scan for includes and merge them in.
219
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
220
aux_data, variables, includes, check)
222
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
223
aux_data, variables, None, check)
225
gyp.common.ExceptionAppend(e,
226
'while reading includes of ' + build_file_path)
229
return build_file_data
232
def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
233
variables, includes, check):
236
includes_list.extend(includes)
237
if 'includes' in subdict:
238
for include in subdict['includes']:
239
# "include" is specified relative to subdict_path, so compute the real
240
# path to include by appending the provided "include" to the directory
241
# in which subdict_path resides.
243
os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
244
includes_list.append(relative_include)
245
# Unhook the includes list, it's no longer needed.
246
del subdict['includes']
248
# Merge in the included files.
249
for include in includes_list:
250
if not 'included' in aux_data[subdict_path]:
251
aux_data[subdict_path]['included'] = []
252
aux_data[subdict_path]['included'].append(include)
254
gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'" % include)
257
LoadOneBuildFile(include, data, aux_data, variables, None,
259
subdict_path, include)
261
# Recurse into subdictionaries.
262
for k, v in subdict.iteritems():
263
if v.__class__ == dict:
264
LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
266
elif v.__class__ == list:
267
LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
271
# This recurses into lists so that it can look for dicts.
272
def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
275
if item.__class__ == dict:
276
LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
277
variables, None, check)
278
elif item.__class__ == list:
279
LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
282
# Processes toolsets in all the targets. This recurses into condition entries
283
# since they can contain toolsets as well.
284
def ProcessToolsetsInDict(data):
285
if 'targets' in data:
286
target_list = data['targets']
288
for target in target_list:
289
global multiple_toolsets
290
if multiple_toolsets:
291
toolsets = target.get('toolsets', ['target'])
293
toolsets = ['target']
294
if len(toolsets) > 0:
295
# Optimization: only do copies if more than one toolset is specified.
296
for build in toolsets[1:]:
297
new_target = copy.deepcopy(target)
298
new_target['toolset'] = build
299
new_target_list.append(new_target)
300
target['toolset'] = toolsets[0]
301
new_target_list.append(target)
302
data['targets'] = new_target_list
303
if 'conditions' in data:
304
for condition in data['conditions']:
305
if isinstance(condition, list):
306
for condition_dict in condition[1:]:
307
ProcessToolsetsInDict(condition_dict)
310
# TODO(mark): I don't love this name. It just means that it's going to load
311
# a build file that contains targets and is expected to provide a targets dict
312
# that contains the targets...
313
def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
315
global absolute_build_file_paths
317
# If depth is set, predefine the DEPTH variable to be a relative path from
318
# this build file's directory to the directory identified by depth.
320
# TODO(dglazkov) The backslash/forward-slash replacement at the end is a
321
# temporary measure. This should really be addressed by keeping all paths
322
# in POSIX until actual project generation.
323
d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
325
variables['DEPTH'] = '.'
327
variables['DEPTH'] = d.replace('\\', '/')
329
# If the generator needs absolue paths, then do so.
330
if absolute_build_file_paths:
331
build_file_path = os.path.abspath(build_file_path)
333
if build_file_path in data['target_build_files']:
336
data['target_build_files'].add(build_file_path)
338
gyp.DebugOutput(gyp.DEBUG_INCLUDES,
339
"Loading Target Build File '%s'" % build_file_path)
341
build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
342
includes, True, check)
344
# Store DEPTH for later use in generators.
345
build_file_data['_DEPTH'] = depth
347
# Set up the included_files key indicating which .gyp files contributed to
349
if 'included_files' in build_file_data:
350
raise KeyError, build_file_path + ' must not contain included_files key'
352
included = GetIncludedBuildFiles(build_file_path, aux_data)
353
build_file_data['included_files'] = []
354
for included_file in included:
355
# included_file is relative to the current directory, but it needs to
356
# be made relative to build_file_path's directory.
357
included_relative = \
358
gyp.common.RelativePath(included_file,
359
os.path.dirname(build_file_path))
360
build_file_data['included_files'].append(included_relative)
362
ProcessToolsetsInDict(build_file_data)
364
# Apply "pre"/"early" variable expansions and condition evaluations.
365
ProcessVariablesAndConditionsInDict(build_file_data, False, variables,
368
# Look at each project's target_defaults dict, and merge settings into
370
if 'target_defaults' in build_file_data:
372
if 'targets' in build_file_data:
373
while index < len(build_file_data['targets']):
374
# This procedure needs to give the impression that target_defaults is
375
# used as defaults, and the individual targets inherit from that.
376
# The individual targets need to be merged into the defaults. Make
377
# a deep copy of the defaults for each target, merge the target dict
378
# as found in the input file into that copy, and then hook up the
379
# copy with the target-specific data merged into it as the replacement
381
old_target_dict = build_file_data['targets'][index]
382
new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
383
MergeDicts(new_target_dict, old_target_dict,
384
build_file_path, build_file_path)
385
build_file_data['targets'][index] = new_target_dict
389
"Unable to find targets in build file %s" % build_file_path
392
del build_file_data['target_defaults']
394
# Look for dependencies. This means that dependency resolution occurs
395
# after "pre" conditionals and variable expansion, but before "post" -
396
# in other words, you can't put a "dependencies" section inside a "post"
397
# conditional within a target.
399
if 'targets' in build_file_data:
400
for target_dict in build_file_data['targets']:
401
if 'dependencies' not in target_dict:
403
for dependency in target_dict['dependencies']:
405
gyp.common.ResolveTarget(build_file_path, dependency, None)[0]
407
LoadTargetBuildFile(other_build_file, data, aux_data, variables,
408
includes, depth, check)
410
gyp.common.ExceptionAppend(
411
e, 'while loading dependencies of %s' % build_file_path)
417
# Look for the bracket that matches the first bracket seen in a
418
# string, and return the start and end as a tuple. For example, if
419
# the input is something like "<(foo <(bar)) blah", then it would
420
# return (1, 13), indicating the entire string except for the leading
421
# "<" and trailing " blah".
422
def FindEnclosingBracketGroup(input):
423
brackets = { '}': '{',
430
if char in brackets.values():
434
if char in brackets.keys():
436
last_bracket = stack.pop()
439
if last_bracket != brackets[char]:
442
return (start, count + 1)
447
canonical_int_re = re.compile('^(0|-?[1-9][0-9]*)$')
450
def IsStrCanonicalInt(string):
451
"""Returns True if |string| is in its canonical integer form.
453
The canonical form is such that str(int(string)) == string.
455
if not isinstance(string, str) or not canonical_int_re.match(string):
461
early_variable_re = re.compile('(?P<replace>(?P<type><((!?@?)|\|)?)'
462
'\((?P<is_array>\s*\[?)'
463
'(?P<content>.*?)(\]?)\))')
464
late_variable_re = re.compile('(?P<replace>(?P<type>>((!?@?)|\|)?)'
465
'\((?P<is_array>\s*\[?)'
466
'(?P<content>.*?)(\]?)\))')
468
# Global cache of results from running commands so they don't have to be run
470
cached_command_results = {}
473
def FixupPlatformCommand(cmd):
474
if sys.platform == 'win32':
475
if type(cmd) == list:
476
cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
478
cmd = re.sub('^cat ', 'type ', cmd)
482
def ExpandVariables(input, is_late, variables, build_file):
483
# Look for the pattern that gets expanded into variables
485
variable_re = early_variable_re
486
expansion_symbol = '<'
488
variable_re = late_variable_re
489
expansion_symbol = '>'
491
input_str = str(input)
492
# Do a quick scan to determine if an expensive regex search is warranted.
493
if expansion_symbol in input_str:
494
# Get the entire list of matches as a list of MatchObject instances.
495
# (using findall here would return strings instead of MatchObjects).
496
matches = [match for match in variable_re.finditer(input_str)]
502
# Reverse the list of matches so that replacements are done right-to-left.
503
# That ensures that earlier replacements won't mess up the string in a
504
# way that causes later calls to find the earlier substituted text instead
505
# of what's intended for replacement.
507
for match_group in matches:
508
match = match_group.groupdict()
509
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
510
"Matches: %s" % repr(match))
511
# match['replace'] is the substring to look for, match['type']
512
# is the character code for the replacement type (< > <! >! <| >| <@
513
# >@ <!@ >!@), match['is_array'] contains a '[' for command
514
# arrays, and match['content'] is the name of the variable (< >)
515
# or command to run (<! >!).
517
# run_command is true if a ! variant is used.
518
run_command = '!' in match['type']
520
# file_list is true if a | variant is used.
521
file_list = '|' in match['type']
523
# Capture these now so we can adjust them later.
524
replace_start = match_group.start('replace')
525
replace_end = match_group.end('replace')
527
# Find the ending paren, and re-evaluate the contained string.
528
(c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
530
# Adjust the replacement range to match the entire command
531
# found by FindEnclosingBracketGroup (since the variable_re
532
# probably doesn't match the entire command if it contained
534
replace_end = replace_start + c_end
536
# Find the "real" replacement, matching the appropriate closing
537
# paren, and adjust the replacement start and end.
538
replacement = input_str[replace_start:replace_end]
540
# Figure out what the contents of the variable parens are.
541
contents_start = replace_start + c_start + 1
542
contents_end = replace_end - 1
543
contents = input_str[contents_start:contents_end]
545
# Do filter substitution now for <|().
546
# Admittedly, this is different than the evaluation order in other
547
# contexts. However, since filtration has no chance to run on <|(),
548
# this seems like the only obvious way to give them access to filters.
550
processed_variables = copy.deepcopy(variables)
551
ProcessListFiltersInDict(contents, processed_variables)
552
# Recurse to expand variables in the contents
553
contents = ExpandVariables(contents, is_late,
554
processed_variables, build_file)
556
# Recurse to expand variables in the contents
557
contents = ExpandVariables(contents, is_late, variables, build_file)
559
# Strip off leading/trailing whitespace so that variable matches are
560
# simpler below (and because they are rarely needed).
561
contents = contents.strip()
563
# expand_to_list is true if an @ variant is used. In that case,
564
# the expansion should result in a list. Note that the caller
565
# is to be expecting a list in return, and not all callers do
566
# because not all are working in list context. Also, for list
567
# expansions, there can be no other text besides the variable
568
# expansion in the input string.
569
expand_to_list = '@' in match['type'] and input_str == replacement
571
if run_command or file_list:
572
# Find the build file's directory, so commands can be run or file lists
573
# generated relative to it.
574
build_file_dir = os.path.dirname(build_file)
575
if build_file_dir == '':
576
# If build_file is just a leaf filename indicating a file in the
577
# current directory, build_file_dir might be an empty string. Set
578
# it to None to signal to subprocess.Popen that it should run the
579
# command in the current directory.
580
build_file_dir = None
582
# Support <|(listfile.txt ...) which generates a file
583
# containing items from a gyp list, generated at gyp time.
584
# This works around actions/rules which have more inputs than will
585
# fit on the command line.
587
if type(contents) == list:
588
contents_list = contents
590
contents_list = contents.split(' ')
591
replacement = contents_list[0]
593
if not os.path.isabs(path):
594
path = os.path.join(build_file_dir, path)
595
f = gyp.common.WriteOnDiff(path)
596
for i in contents_list[1:]:
602
if match['is_array']:
603
contents = eval(contents)
606
# Check for a cached value to avoid executing commands, or generating
607
# file lists more than once.
608
# TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
609
# possible that the command being invoked depends on the current
610
# directory. For that case the syntax needs to be extended so that the
611
# directory is also used in cache_key (it becomes a tuple).
612
# TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
613
# someone could author a set of GYP files where each time the command
614
# is invoked it produces different output by design. When the need
615
# arises, the syntax should be extended to support no caching off a
616
# command's output so it is run every time.
617
cache_key = str(contents)
618
cached_value = cached_command_results.get(cache_key, None)
619
if cached_value is None:
620
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
621
"Executing command '%s' in directory '%s'" %
622
(contents,build_file_dir))
624
# Fix up command with platform specific workarounds.
625
contents = FixupPlatformCommand(contents)
626
p = subprocess.Popen(contents, shell=use_shell,
627
stdout=subprocess.PIPE,
628
stderr=subprocess.PIPE,
629
stdin=subprocess.PIPE,
632
(p_stdout, p_stderr) = p.communicate('')
634
if p.wait() != 0 or p_stderr:
635
sys.stderr.write(p_stderr)
636
# Simulate check_call behavior, since check_call only exists
637
# in python 2.5 and later.
638
raise Exception("Call to '%s' returned exit status %d." %
639
(contents, p.returncode))
640
replacement = p_stdout.rstrip()
642
cached_command_results[cache_key] = replacement
644
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
645
"Had cache value for command '%s' in directory '%s'" %
646
(contents,build_file_dir))
647
replacement = cached_value
650
if not contents in variables:
651
raise KeyError, 'Undefined variable ' + contents + \
653
replacement = variables[contents]
655
if isinstance(replacement, list):
656
for item in replacement:
657
if not isinstance(item, str) and not isinstance(item, int):
658
raise TypeError, 'Variable ' + contents + \
659
' must expand to a string or list of strings; ' + \
660
'list contains a ' + \
661
item.__class__.__name__
662
# Run through the list and handle variable expansions in it. Since
663
# the list is guaranteed not to contain dicts, this won't do anything
664
# with conditions sections.
665
ProcessVariablesAndConditionsInList(replacement, is_late, variables,
667
elif not isinstance(replacement, str) and \
668
not isinstance(replacement, int):
669
raise TypeError, 'Variable ' + contents + \
670
' must expand to a string or list of strings; ' + \
671
'found a ' + replacement.__class__.__name__
674
# Expanding in list context. It's guaranteed that there's only one
675
# replacement to do in |input_str| and that it's this replacement. See
677
if isinstance(replacement, list):
678
# If it's already a list, make a copy.
679
output = replacement[:]
681
# Split it the same way sh would split arguments.
682
output = shlex.split(str(replacement))
684
# Expanding in string context.
685
encoded_replacement = ''
686
if isinstance(replacement, list):
687
# When expanding a list into string context, turn the list items
688
# into a string in a way that will work with a subprocess call.
690
# TODO(mark): This isn't completely correct. This should
691
# call a generator-provided function that observes the
692
# proper list-to-argument quoting rules on a specific
693
# platform instead of just calling the POSIX encoding
695
encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
697
encoded_replacement = replacement
699
output = output[:replace_start] + str(encoded_replacement) + \
701
# Prepare for the next match iteration.
704
# Look for more matches now that we've replaced some, to deal with
705
# expanding local variables (variables defined in the same
706
# variables block as this one).
707
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
708
"Found output %s, recursing." % repr(output))
709
if isinstance(output, list):
712
new_output.append(ExpandVariables(item, is_late, variables, build_file))
715
output = ExpandVariables(output, is_late, variables, build_file)
717
# Convert all strings that are canonically-represented integers into integers.
718
if isinstance(output, list):
719
for index in xrange(0, len(output)):
720
if IsStrCanonicalInt(output[index]):
721
output[index] = int(output[index])
722
elif IsStrCanonicalInt(output):
725
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
726
"Expanding %s to %s" % (repr(input), repr(output)))
730
def ProcessConditionsInDict(the_dict, is_late, variables, build_file):
731
# Process a 'conditions' or 'target_conditions' section in the_dict,
732
# depending on is_late. If is_late is False, 'conditions' is used.
734
# Each item in a conditions list consists of cond_expr, a string expression
735
# evaluated as the condition, and true_dict, a dict that will be merged into
736
# the_dict if cond_expr evaluates to true. Optionally, a third item,
737
# false_dict, may be present. false_dict is merged into the_dict if
738
# cond_expr evaluates to false.
740
# Any dict merged into the_dict will be recursively processed for nested
741
# conditionals and other expansions, also according to is_late, immediately
742
# prior to being merged.
745
conditions_key = 'conditions'
747
conditions_key = 'target_conditions'
749
if not conditions_key in the_dict:
752
conditions_list = the_dict[conditions_key]
753
# Unhook the conditions list, it's no longer needed.
754
del the_dict[conditions_key]
756
for condition in conditions_list:
757
if not isinstance(condition, list):
758
raise TypeError, conditions_key + ' must be a list'
759
if len(condition) != 2 and len(condition) != 3:
760
# It's possible that condition[0] won't work in which case this
761
# attempt will raise its own IndexError. That's probably fine.
762
raise IndexError, conditions_key + ' ' + condition[0] + \
763
' must be length 2 or 3, not ' + len(condition)
765
[cond_expr, true_dict] = condition[0:2]
767
if len(condition) == 3:
768
false_dict = condition[2]
770
# Do expansions on the condition itself. Since the conditon can naturally
771
# contain variable references without needing to resort to GYP expansion
772
# syntax, this is of dubious value for variables, but someone might want to
773
# use a command expansion directly inside a condition.
774
cond_expr_expanded = ExpandVariables(cond_expr, is_late, variables,
776
if not isinstance(cond_expr_expanded, str) and \
777
not isinstance(cond_expr_expanded, int):
779
'Variable expansion in this context permits str and int ' + \
780
'only, found ' + expanded.__class__.__name__
783
ast_code = compile(cond_expr_expanded, '<string>', 'eval')
785
if eval(ast_code, {'__builtins__': None}, variables):
786
merge_dict = true_dict
788
merge_dict = false_dict
789
except SyntaxError, e:
790
syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
792
(str(e.args[0]), e.text, build_file, e.offset),
793
e.filename, e.lineno, e.offset, e.text)
796
gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
797
(cond_expr_expanded, build_file))
800
if merge_dict != None:
801
# Expand variables and nested conditinals in the merge_dict before
803
ProcessVariablesAndConditionsInDict(merge_dict, is_late,
804
variables, build_file)
806
MergeDicts(the_dict, merge_dict, build_file, build_file)
809
def LoadAutomaticVariablesFromDict(variables, the_dict):
810
# Any keys with plain string values in the_dict become automatic variables.
811
# The variable name is the key name with a "_" character prepended.
812
for key, value in the_dict.iteritems():
813
if isinstance(value, str) or isinstance(value, int) or \
814
isinstance(value, list):
815
variables['_' + key] = value
818
def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
819
# Any keys in the_dict's "variables" dict, if it has one, becomes a
820
# variable. The variable name is the key name in the "variables" dict.
821
# Variables that end with the % character are set only if they are unset in
822
# the variables dict. the_dict_key is the name of the key that accesses
823
# the_dict in the_dict's parent dict. If the_dict's parent is not a dict
824
# (it could be a list or it could be parentless because it is a root dict),
825
# the_dict_key will be None.
826
for key, value in the_dict.get('variables', {}).iteritems():
827
if not isinstance(value, str) and not isinstance(value, int) and \
828
not isinstance(value, list):
831
if key.endswith('%'):
832
variable_name = key[:-1]
833
if variable_name in variables:
834
# If the variable is already set, don't set it.
836
if the_dict_key is 'variables' and variable_name in the_dict:
837
# If the variable is set without a % in the_dict, and the_dict is a
838
# variables dict (making |variables| a varaibles sub-dict of a
839
# variables dict), use the_dict's definition.
840
value = the_dict[variable_name]
844
variables[variable_name] = value
847
def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in,
848
build_file, the_dict_key=None):
849
"""Handle all variable and command expansion and conditional evaluation.
851
This function is the public entry point for all variable expansions and
852
conditional evaluations. The variables_in dictionary will not be modified
856
# Make a copy of the variables_in dict that can be modified during the
857
# loading of automatics and the loading of the variables dict.
858
variables = variables_in.copy()
859
LoadAutomaticVariablesFromDict(variables, the_dict)
861
if 'variables' in the_dict:
862
# Make sure all the local variables are added to the variables
863
# list before we process them so that you can reference one
864
# variable from another. They will be fully expanded by recursion
865
# in ExpandVariables.
866
for key, value in the_dict['variables'].iteritems():
867
variables[key] = value
869
# Handle the associated variables dict first, so that any variable
870
# references within can be resolved prior to using them as variables.
871
# Pass a copy of the variables dict to avoid having it be tainted.
872
# Otherwise, it would have extra automatics added for everything that
873
# should just be an ordinary variable in this scope.
874
ProcessVariablesAndConditionsInDict(the_dict['variables'], is_late,
875
variables, build_file, 'variables')
877
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
879
for key, value in the_dict.iteritems():
880
# Skip "variables", which was already processed if present.
881
if key != 'variables' and isinstance(value, str):
882
expanded = ExpandVariables(value, is_late, variables, build_file)
883
if not isinstance(expanded, str) and not isinstance(expanded, int):
885
'Variable expansion in this context permits str and int ' + \
886
'only, found ' + expanded.__class__.__name__ + ' for ' + key
887
the_dict[key] = expanded
889
# Variable expansion may have resulted in changes to automatics. Reload.
890
# TODO(mark): Optimization: only reload if no changes were made.
891
variables = variables_in.copy()
892
LoadAutomaticVariablesFromDict(variables, the_dict)
893
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
895
# Process conditions in this dict. This is done after variable expansion
896
# so that conditions may take advantage of expanded variables. For example,
897
# if the_dict contains:
898
# {'type': '<(library_type)',
899
# 'conditions': [['_type=="static_library"', { ... }]]},
900
# _type, as used in the condition, will only be set to the value of
901
# library_type if variable expansion is performed before condition
902
# processing. However, condition processing should occur prior to recursion
903
# so that variables (both automatic and "variables" dict type) may be
904
# adjusted by conditions sections, merged into the_dict, and have the
905
# intended impact on contained dicts.
907
# This arrangement means that a "conditions" section containing a "variables"
908
# section will only have those variables effective in subdicts, not in
909
# the_dict. The workaround is to put a "conditions" section within a
910
# "variables" section. For example:
911
# {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
912
# 'defines': ['<(define)'],
913
# 'my_subdict': {'defines': ['<(define)']}},
914
# will not result in "IS_MAC" being appended to the "defines" list in the
915
# current scope but would result in it being appended to the "defines" list
916
# within "my_subdict". By comparison:
917
# {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
918
# 'defines': ['<(define)'],
919
# 'my_subdict': {'defines': ['<(define)']}},
920
# will append "IS_MAC" to both "defines" lists.
922
# Evaluate conditions sections, allowing variable expansions within them
923
# as well as nested conditionals. This will process a 'conditions' or
924
# 'target_conditions' section, perform appropriate merging and recursive
925
# conditional and variable processing, and then remove the conditions section
926
# from the_dict if it is present.
927
ProcessConditionsInDict(the_dict, is_late, variables, build_file)
929
# Conditional processing may have resulted in changes to automatics or the
930
# variables dict. Reload.
931
variables = variables_in.copy()
932
LoadAutomaticVariablesFromDict(variables, the_dict)
933
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
935
# Recurse into child dicts, or process child lists which may result in
936
# further recursion into descendant dicts.
937
for key, value in the_dict.iteritems():
938
# Skip "variables" and string values, which were already processed if
940
if key == 'variables' or isinstance(value, str):
942
if isinstance(value, dict):
943
# Pass a copy of the variables dict so that subdicts can't influence
945
ProcessVariablesAndConditionsInDict(value, is_late, variables,
947
elif isinstance(value, list):
948
# The list itself can't influence the variables dict, and
949
# ProcessVariablesAndConditionsInList will make copies of the variables
950
# dict if it needs to pass it to something that can influence it. No
951
# copy is necessary here.
952
ProcessVariablesAndConditionsInList(value, is_late, variables,
954
elif not isinstance(value, int):
955
raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
959
def ProcessVariablesAndConditionsInList(the_list, is_late, variables,
961
# Iterate using an index so that new values can be assigned into the_list.
963
while index < len(the_list):
964
item = the_list[index]
965
if isinstance(item, dict):
966
# Make a copy of the variables dict so that it won't influence anything
967
# outside of its own scope.
968
ProcessVariablesAndConditionsInDict(item, is_late, variables, build_file)
969
elif isinstance(item, list):
970
ProcessVariablesAndConditionsInList(item, is_late, variables, build_file)
971
elif isinstance(item, str):
972
expanded = ExpandVariables(item, is_late, variables, build_file)
973
if isinstance(expanded, str) or isinstance(expanded, int):
974
the_list[index] = expanded
975
elif isinstance(expanded, list):
977
for expanded_item in expanded:
978
the_list.insert(index, expanded_item)
981
# index now identifies the next item to examine. Continue right now
982
# without falling into the index increment below.
986
'Variable expansion in this context permits strings and ' + \
987
'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
989
elif not isinstance(item, int):
990
raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
995
def BuildTargetsDict(data):
996
"""Builds a dict mapping fully-qualified target names to their target dicts.
998
|data| is a dict mapping loaded build files by pathname relative to the
999
current directory. Values in |data| are build file contents. For each
1000
|data| value with a "targets" key, the value of the "targets" key is taken
1001
as a list containing target dicts. Each target's fully-qualified name is
1002
constructed from the pathname of the build file (|data| key) and its
1003
"target_name" property. These fully-qualified names are used as the keys
1004
in the returned dict. These keys provide access to the target dicts,
1005
the dicts in the "targets" lists.
1009
for build_file in data['target_build_files']:
1010
for target in data[build_file].get('targets', []):
1011
target_name = gyp.common.QualifiedTarget(build_file,
1012
target['target_name'],
1014
if target_name in targets:
1015
raise KeyError, 'Duplicate target definitions for ' + target_name
1016
targets[target_name] = target
1021
def QualifyDependencies(targets):
1022
"""Make dependency links fully-qualified relative to the current directory.
1024
|targets| is a dict mapping fully-qualified target names to their target
1025
dicts. For each target in this dict, keys known to contain dependency
1026
links are examined, and any dependencies referenced will be rewritten
1027
so that they are fully-qualified and relative to the current directory.
1028
All rewritten dependencies are suitable for use as keys to |targets| or a
1032
for target, target_dict in targets.iteritems():
1033
target_build_file = gyp.common.BuildFile(target)
1034
toolset = target_dict['toolset']
1035
for dependency_key in dependency_sections:
1036
dependencies = target_dict.get(dependency_key, [])
1037
for index in xrange(0, len(dependencies)):
1038
dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
1039
target_build_file, dependencies[index], toolset)
1040
global multiple_toolsets
1041
if not multiple_toolsets:
1042
# Ignore toolset specification in the dependency if it is specified.
1043
dep_toolset = toolset
1044
dependency = gyp.common.QualifiedTarget(dep_file,
1047
dependencies[index] = dependency
1049
# Make sure anything appearing in a list other than "dependencies" also
1050
# appears in the "dependencies" list.
1051
if dependency_key != 'dependencies' and \
1052
dependency not in target_dict['dependencies']:
1053
raise KeyError, 'Found ' + dependency + ' in ' + dependency_key + \
1054
' of ' + target + ', but not in dependencies'
1057
def ExpandWildcardDependencies(targets, data):
1058
"""Expands dependencies specified as build_file:*.
1060
For each target in |targets|, examines sections containing links to other
1061
targets. If any such section contains a link of the form build_file:*, it
1062
is taken as a wildcard link, and is expanded to list each target in
1063
build_file. The |data| dict provides access to build file dicts.
1065
Any target that does not wish to be included by wildcard can provide an
1066
optional "suppress_wildcard" key in its target dict. When present and
1067
true, a wildcard dependency link will not include such targets.
1069
All dependency names, including the keys to |targets| and the values in each
1070
dependency list, must be qualified when this function is called.
1073
for target, target_dict in targets.iteritems():
1074
toolset = target_dict['toolset']
1075
target_build_file = gyp.common.BuildFile(target)
1076
for dependency_key in dependency_sections:
1077
dependencies = target_dict.get(dependency_key, [])
1079
# Loop this way instead of "for dependency in" or "for index in xrange"
1080
# because the dependencies list will be modified within the loop body.
1082
while index < len(dependencies):
1083
(dependency_build_file, dependency_target, dependency_toolset) = \
1084
gyp.common.ParseQualifiedTarget(dependencies[index])
1085
if dependency_target != '*' and dependency_toolset != '*':
1086
# Not a wildcard. Keep it moving.
1090
if dependency_build_file == target_build_file:
1091
# It's an error for a target to depend on all other targets in
1092
# the same file, because a target cannot depend on itself.
1093
raise KeyError, 'Found wildcard in ' + dependency_key + ' of ' + \
1094
target + ' referring to same build file'
1096
# Take the wildcard out and adjust the index so that the next
1097
# dependency in the list will be processed the next time through the
1099
del dependencies[index]
1102
# Loop through the targets in the other build file, adding them to
1103
# this target's list of dependencies in place of the removed
1105
dependency_target_dicts = data[dependency_build_file]['targets']
1106
for dependency_target_dict in dependency_target_dicts:
1107
if int(dependency_target_dict.get('suppress_wildcard', False)):
1109
dependency_target_name = dependency_target_dict['target_name']
1110
if (dependency_target != '*' and
1111
dependency_target != dependency_target_name):
1113
dependency_target_toolset = dependency_target_dict['toolset']
1114
if (dependency_toolset != '*' and
1115
dependency_toolset != dependency_target_toolset):
1117
dependency = gyp.common.QualifiedTarget(dependency_build_file,
1118
dependency_target_name,
1119
dependency_target_toolset)
1121
dependencies.insert(index, dependency)
1126
class DependencyGraphNode(object):
1130
ref: A reference to an object that this DependencyGraphNode represents.
1131
dependencies: List of DependencyGraphNodes on which this one depends.
1132
dependents: List of DependencyGraphNodes that depend on this one.
1135
class CircularException(Exception):
1138
def __init__(self, ref):
1140
self.dependencies = []
1141
self.dependents = []
1143
def FlattenToList(self):
1144
# flat_list is the sorted list of dependencies - actually, the list items
1145
# are the "ref" attributes of DependencyGraphNodes. Every target will
1146
# appear in flat_list after all of its dependencies, and before all of its
1150
# in_degree_zeros is the list of DependencyGraphNodes that have no
1151
# dependencies not in flat_list. Initially, it is a copy of the children
1152
# of this node, because when the graph was built, nodes with no
1153
# dependencies were made implicit dependents of the root node.
1154
in_degree_zeros = self.dependents[:]
1156
while in_degree_zeros:
1157
# Nodes in in_degree_zeros have no dependencies not in flat_list, so they
1158
# can be appended to flat_list. Take these nodes out of in_degree_zeros
1159
# as work progresses, so that the next node to process from the list can
1160
# always be accessed at a consistent position.
1161
node = in_degree_zeros.pop(0)
1162
flat_list.append(node.ref)
1164
# Look at dependents of the node just added to flat_list. Some of them
1165
# may now belong in in_degree_zeros.
1166
for node_dependent in node.dependents:
1167
is_in_degree_zero = True
1168
for node_dependent_dependency in node_dependent.dependencies:
1169
if not node_dependent_dependency.ref in flat_list:
1170
# The dependent one or more dependencies not in flat_list. There
1171
# will be more chances to add it to flat_list when examining
1172
# it again as a dependent of those other dependencies, provided
1173
# that there are no cycles.
1174
is_in_degree_zero = False
1177
if is_in_degree_zero:
1178
# All of the dependent's dependencies are already in flat_list. Add
1179
# it to in_degree_zeros where it will be processed in a future
1180
# iteration of the outer loop.
1181
in_degree_zeros.append(node_dependent)
1185
def DirectDependencies(self, dependencies=None):
1186
"""Returns a list of just direct dependencies."""
1187
if dependencies == None:
1190
for dependency in self.dependencies:
1191
# Check for None, corresponding to the root node.
1192
if dependency.ref != None and dependency.ref not in dependencies:
1193
dependencies.append(dependency.ref)
1197
def _AddImportedDependencies(self, targets, dependencies=None):
1198
"""Given a list of direct dependencies, adds indirect dependencies that
1199
other dependencies have declared to export their settings.
1201
This method does not operate on self. Rather, it operates on the list
1202
of dependencies in the |dependencies| argument. For each dependency in
1203
that list, if any declares that it exports the settings of one of its
1204
own dependencies, those dependencies whose settings are "passed through"
1205
are added to the list. As new items are added to the list, they too will
1206
be processed, so it is possible to import settings through multiple levels
1209
This method is not terribly useful on its own, it depends on being
1210
"primed" with a list of direct dependencies such as one provided by
1211
DirectDependencies. DirectAndImportedDependencies is intended to be the
1215
if dependencies == None:
1219
while index < len(dependencies):
1220
dependency = dependencies[index]
1221
dependency_dict = targets[dependency]
1222
# Add any dependencies whose settings should be imported to the list
1223
# if not already present. Newly-added items will be checked for
1224
# their own imports when the list iteration reaches them.
1225
# Rather than simply appending new items, insert them after the
1226
# dependency that exported them. This is done to more closely match
1227
# the depth-first method used by DeepDependencies.
1229
for imported_dependency in \
1230
dependency_dict.get('export_dependent_settings', []):
1231
if imported_dependency not in dependencies:
1232
dependencies.insert(index + add_index, imported_dependency)
1233
add_index = add_index + 1
1238
def DirectAndImportedDependencies(self, targets, dependencies=None):
1239
"""Returns a list of a target's direct dependencies and all indirect
1240
dependencies that a dependency has advertised settings should be exported
1241
through the dependency for.
1244
dependencies = self.DirectDependencies(dependencies)
1245
return self._AddImportedDependencies(targets, dependencies)
1247
def DeepDependencies(self, dependencies=None):
1248
"""Returns a list of all of a target's dependencies, recursively."""
1249
if dependencies == None:
1252
for dependency in self.dependencies:
1253
# Check for None, corresponding to the root node.
1254
if dependency.ref != None and dependency.ref not in dependencies:
1255
dependencies.append(dependency.ref)
1256
dependency.DeepDependencies(dependencies)
1260
def LinkDependencies(self, targets, dependencies=None, initial=True):
1261
"""Returns a list of dependency targets that are linked into this target.
1263
This function has a split personality, depending on the setting of
1264
|initial|. Outside callers should always leave |initial| at its default
1267
When adding a target to the list of dependencies, this function will
1268
recurse into itself with |initial| set to False, to collect depenedencies
1269
that are linked into the linkable target for which the list is being built.
1271
if dependencies == None:
1274
# Check for None, corresponding to the root node.
1275
if self.ref == None:
1278
# It's kind of sucky that |targets| has to be passed into this function,
1279
# but that's presently the easiest way to access the target dicts so that
1280
# this function can find target types.
1282
if not 'target_name' in targets[self.ref]:
1283
raise Exception("Missing 'target_name' field in target.")
1286
target_type = targets[self.ref]['type']
1288
raise Exception("Missing 'type' field in target %s" %
1289
targets[self.ref]['target_name'])
1291
is_linkable = target_type in linkable_types
1293
if initial and not is_linkable:
1294
# If this is the first target being examined and it's not linkable,
1295
# return an empty list of link dependencies, because the link
1296
# dependencies are intended to apply to the target itself (initial is
1297
# True) and this target won't be linked.
1300
# Executables and loadable modules are already fully and finally linked.
1301
# Nothing else can be a link dependency of them, there can only be
1302
# dependencies in the sense that a dependent target might run an
1303
# executable or load the loadable_module.
1304
if not initial and target_type in ('executable', 'loadable_module'):
1307
# The target is linkable, add it to the list of link dependencies.
1308
if self.ref not in dependencies:
1309
if target_type != 'none':
1310
# Special case: "none" type targets don't produce any linkable products
1311
# and shouldn't be exposed as link dependencies, although dependencies
1312
# of "none" type targets may still be link dependencies.
1313
dependencies.append(self.ref)
1314
if initial or not is_linkable:
1315
# If this is a subsequent target and it's linkable, don't look any
1316
# further for linkable dependencies, as they'll already be linked into
1317
# this target linkable. Always look at dependencies of the initial
1318
# target, and always look at dependencies of non-linkables.
1319
for dependency in self.dependencies:
1320
dependency.LinkDependencies(targets, dependencies, False)
1325
def BuildDependencyList(targets):
1326
# Create a DependencyGraphNode for each target. Put it into a dict for easy
1328
dependency_nodes = {}
1329
for target, spec in targets.iteritems():
1330
if not target in dependency_nodes:
1331
dependency_nodes[target] = DependencyGraphNode(target)
1333
# Set up the dependency links. Targets that have no dependencies are treated
1334
# as dependent on root_node.
1335
root_node = DependencyGraphNode(None)
1336
for target, spec in targets.iteritems():
1337
target_node = dependency_nodes[target]
1338
target_build_file = gyp.common.BuildFile(target)
1339
if not 'dependencies' in spec or len(spec['dependencies']) == 0:
1340
target_node.dependencies = [root_node]
1341
root_node.dependents.append(target_node)
1343
dependencies = spec['dependencies']
1344
for index in xrange(0, len(dependencies)):
1346
dependency = dependencies[index]
1347
dependency_node = dependency_nodes[dependency]
1348
target_node.dependencies.append(dependency_node)
1349
dependency_node.dependents.append(target_node)
1351
gyp.common.ExceptionAppend(e,
1352
'while trying to load target %s' % target)
1355
flat_list = root_node.FlattenToList()
1357
# If there's anything left unvisited, there must be a circular dependency
1358
# (cycle). If you need to figure out what's wrong, look for elements of
1359
# targets that are not in flat_list.
1360
if len(flat_list) != len(targets):
1361
raise DependencyGraphNode.CircularException, \
1362
'Some targets not reachable, cycle in dependency graph detected'
1364
return [dependency_nodes, flat_list]
1367
def VerifyNoGYPFileCircularDependencies(targets):
1368
# Create a DependencyGraphNode for each gyp file containing a target. Put
1369
# it into a dict for easy access.
1370
dependency_nodes = {}
1371
for target in targets.iterkeys():
1372
build_file = gyp.common.BuildFile(target)
1373
if not build_file in dependency_nodes:
1374
dependency_nodes[build_file] = DependencyGraphNode(build_file)
1376
# Set up the dependency links.
1377
for target, spec in targets.iteritems():
1378
build_file = gyp.common.BuildFile(target)
1379
build_file_node = dependency_nodes[build_file]
1380
target_dependencies = spec.get('dependencies', [])
1381
for dependency in target_dependencies:
1383
dependency_build_file = gyp.common.BuildFile(dependency)
1384
if dependency_build_file == build_file:
1385
# A .gyp file is allowed to refer back to itself.
1387
dependency_node = dependency_nodes[dependency_build_file]
1388
if dependency_node not in build_file_node.dependencies:
1389
build_file_node.dependencies.append(dependency_node)
1390
dependency_node.dependents.append(build_file_node)
1392
gyp.common.ExceptionAppend(
1393
e, 'while computing dependencies of .gyp file %s' % build_file)
1396
# Files that have no dependencies are treated as dependent on root_node.
1397
root_node = DependencyGraphNode(None)
1398
for build_file_node in dependency_nodes.itervalues():
1399
if len(build_file_node.dependencies) == 0:
1400
build_file_node.dependencies.append(root_node)
1401
root_node.dependents.append(build_file_node)
1403
flat_list = root_node.FlattenToList()
1405
# If there's anything left unvisited, there must be a circular dependency
1407
if len(flat_list) != len(dependency_nodes):
1409
for file in dependency_nodes.iterkeys():
1410
if not file in flat_list:
1411
bad_files.append(file)
1412
raise DependencyGraphNode.CircularException, \
1413
'Some files not reachable, cycle in .gyp file dependency graph ' + \
1414
'detected involving some or all of: ' + \
1418
def DoDependentSettings(key, flat_list, targets, dependency_nodes):
1419
# key should be one of all_dependent_settings, direct_dependent_settings,
1422
for target in flat_list:
1423
target_dict = targets[target]
1424
build_file = gyp.common.BuildFile(target)
1426
if key == 'all_dependent_settings':
1427
dependencies = dependency_nodes[target].DeepDependencies()
1428
elif key == 'direct_dependent_settings':
1430
dependency_nodes[target].DirectAndImportedDependencies(targets)
1431
elif key == 'link_settings':
1432
dependencies = dependency_nodes[target].LinkDependencies(targets)
1434
raise KeyError, "DoDependentSettings doesn't know how to determine " + \
1435
'dependencies for ' + key
1437
for dependency in dependencies:
1438
dependency_dict = targets[dependency]
1439
if not key in dependency_dict:
1441
dependency_build_file = gyp.common.BuildFile(dependency)
1442
MergeDicts(target_dict, dependency_dict[key],
1443
build_file, dependency_build_file)
1446
def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes):
1447
# Recompute target "dependencies" properties. For each static library
1448
# target, remove "dependencies" entries referring to other static libraries,
1449
# unless the dependency has the "hard_dependency" attribute set. For each
1450
# linkable target, add a "dependencies" entry referring to all of the
1451
# target's computed list of link dependencies (including static libraries
1452
# if no such entry is already present.
1453
for target in flat_list:
1454
target_dict = targets[target]
1455
target_type = target_dict['type']
1457
if target_type == 'static_library':
1458
if not 'dependencies' in target_dict:
1461
target_dict['dependencies_original'] = target_dict.get(
1462
'dependencies', [])[:]
1465
while index < len(target_dict['dependencies']):
1466
dependency = target_dict['dependencies'][index]
1467
dependency_dict = targets[dependency]
1468
if dependency_dict['type'] == 'static_library' and \
1469
(not 'hard_dependency' in dependency_dict or \
1470
not dependency_dict['hard_dependency']):
1471
# A static library should not depend on another static library unless
1472
# the dependency relationship is "hard," which should only be done
1473
# when a dependent relies on some side effect other than just the
1474
# build product, like a rule or action output. Take the dependency
1475
# out of the list, and don't increment index because the next
1476
# dependency to analyze will shift into the index formerly occupied
1477
# by the one being removed.
1478
del target_dict['dependencies'][index]
1482
# If the dependencies list is empty, it's not needed, so unhook it.
1483
if len(target_dict['dependencies']) == 0:
1484
del target_dict['dependencies']
1486
elif target_type in linkable_types:
1487
# Get a list of dependency targets that should be linked into this
1488
# target. Add them to the dependencies list if they're not already
1491
link_dependencies = dependency_nodes[target].LinkDependencies(targets)
1492
for dependency in link_dependencies:
1493
if dependency == target:
1495
if not 'dependencies' in target_dict:
1496
target_dict['dependencies'] = []
1497
if not dependency in target_dict['dependencies']:
1498
target_dict['dependencies'].append(dependency)
1500
# Initialize this here to speed up MakePathRelative.
1501
exception_re = re.compile(r'''["']?[-/$<>]''')
1504
def MakePathRelative(to_file, fro_file, item):
1505
# If item is a relative path, it's relative to the build file dict that it's
1506
# coming from. Fix it up to make it relative to the build file dict that
1508
# Exception: any |item| that begins with these special characters is
1509
# returned without modification.
1510
# / Used when a path is already absolute (shortcut optimization;
1511
# such paths would be returned as absolute anyway)
1512
# $ Used for build environment variables
1513
# - Used for some build environment flags (such as -lapr-1 in a
1514
# "libraries" section)
1515
# < Used for our own variable and command expansions (see ExpandVariables)
1516
# > Used for our own variable and command expansions (see ExpandVariables)
1518
# "/' Used when a value is quoted. If these are present, then we
1519
# check the second character instead.
1521
if to_file == fro_file or exception_re.match(item):
1524
# TODO(dglazkov) The backslash/forward-slash replacement at the end is a
1525
# temporary measure. This should really be addressed by keeping all paths
1526
# in POSIX until actual project generation.
1527
return os.path.normpath(os.path.join(
1528
gyp.common.RelativePath(os.path.dirname(fro_file),
1529
os.path.dirname(to_file)),
1530
item)).replace('\\', '/')
1533
def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
1538
if isinstance(item, str) or isinstance(item, int):
1539
# The cheap and easy case.
1541
to_item = MakePathRelative(to_file, fro_file, item)
1545
if not isinstance(item, str) or not item.startswith('-'):
1546
# Any string that doesn't begin with a "-" is a singleton - it can
1547
# only appear once in a list, to be enforced by the list merge append
1550
elif isinstance(item, dict):
1551
# Make a copy of the dictionary, continuing to look for paths to fix.
1552
# The other intelligent aspects of merge processing won't apply because
1553
# item is being merged into an empty dict.
1555
MergeDicts(to_item, item, to_file, fro_file)
1556
elif isinstance(item, list):
1557
# Recurse, making a copy of the list. If the list contains any
1558
# descendant dicts, path fixing will occur. Note that here, custom
1559
# values for is_paths and append are dropped; those are only to be
1560
# applied to |to| and |fro|, not sublists of |fro|. append shouldn't
1561
# matter anyway because the new |to_item| list is empty.
1563
MergeLists(to_item, item, to_file, fro_file)
1566
'Attempt to merge list item of unsupported type ' + \
1567
item.__class__.__name__
1570
# If appending a singleton that's already in the list, don't append.
1571
# This ensures that the earliest occurrence of the item will stay put.
1572
if not singleton or not to_item in to:
1575
# If prepending a singleton that's already in the list, remove the
1576
# existing instance and proceed with the prepend. This ensures that the
1577
# item appears at the earliest possible position in the list.
1578
while singleton and to_item in to:
1581
# Don't just insert everything at index 0. That would prepend the new
1582
# items to the list in reverse order, which would be an unwelcome
1584
to.insert(prepend_index, to_item)
1585
prepend_index = prepend_index + 1
1588
def MergeDicts(to, fro, to_file, fro_file):
1589
# I wanted to name the parameter "from" but it's a Python keyword...
1590
for k, v in fro.iteritems():
1591
# It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
1592
# copy semantics. Something else may want to merge from the |fro| dict
1593
# later, and having the same dict ref pointed to twice in the tree isn't
1594
# what anyone wants considering that the dicts may subsequently be
1598
if isinstance(v, str) or isinstance(v, int):
1599
if not (isinstance(to[k], str) or isinstance(to[k], int)):
1601
elif v.__class__ != to[k].__class__:
1606
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
1607
' into incompatible type ' + to[k].__class__.__name__ + \
1609
if isinstance(v, str) or isinstance(v, int):
1610
# Overwrite the existing value, if any. Cheap and easy.
1611
is_path = IsPathSection(k)
1613
to[k] = MakePathRelative(to_file, fro_file, v)
1616
elif isinstance(v, dict):
1617
# Recurse, guaranteeing copies will be made of objects that require it.
1620
MergeDicts(to[k], v, to_file, fro_file)
1621
elif isinstance(v, list):
1622
# Lists in dicts can be merged with different policies, depending on
1623
# how the key in the "from" dict (k, the from-key) is written.
1625
# If the from-key has ...the to-list will have this action
1626
# this character appended:... applied when receiving the from-list:
1629
# ? set, only if to-list does not yet exist
1632
# This logic is list-specific, but since it relies on the associated
1633
# dict key, it's checked in this dict-oriented function.
1638
lists_incompatible = [list_base, list_base + '?']
1642
lists_incompatible = [list_base + '=', list_base + '?']
1646
lists_incompatible = [list_base, list_base + '=', list_base + '+']
1649
lists_incompatible = [list_base + '=', list_base + '?']
1651
# Some combinations of merge policies appearing together are meaningless.
1652
# It's stupid to replace and append simultaneously, for example. Append
1653
# and prepend are the only policies that can coexist.
1654
for list_incompatible in lists_incompatible:
1655
if list_incompatible in fro:
1656
raise KeyError, 'Incompatible list policies ' + k + ' and ' + \
1661
# If the key ends in "?", the list will only be merged if it doesn't
1664
if not isinstance(to[list_base], list):
1665
# This may not have been checked above if merging in a list with an
1666
# extension character.
1668
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
1669
' into incompatible type ' + to[list_base].__class__.__name__ + \
1670
' for key ' + list_base + '(' + k + ')'
1674
# Call MergeLists, which will make copies of objects that require it.
1675
# MergeLists can recurse back into MergeDicts, although this will be
1676
# to make copies of dicts (with paths fixed), there will be no
1677
# subsequent dict "merging" once entering a list because lists are
1678
# always replaced, appended to, or prepended to.
1679
is_paths = IsPathSection(list_base)
1680
MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
1683
'Attempt to merge dict value of unsupported type ' + \
1684
v.__class__.__name__ + ' for key ' + k
1687
def MergeConfigWithInheritance(new_configuration_dict, build_file,
1688
target_dict, configuration, visited):
1689
# Skip if previously visted.
1690
if configuration in visited:
1693
# Look at this configuration.
1694
configuration_dict = target_dict['configurations'][configuration]
1697
for parent in configuration_dict.get('inherit_from', []):
1698
MergeConfigWithInheritance(new_configuration_dict, build_file,
1699
target_dict, parent, visited + [configuration])
1701
# Merge it into the new config.
1702
MergeDicts(new_configuration_dict, configuration_dict,
1703
build_file, build_file)
1706
if 'abstract' in new_configuration_dict:
1707
del new_configuration_dict['abstract']
1710
def SetUpConfigurations(target, target_dict):
1711
global non_configuration_keys
1712
# key_suffixes is a list of key suffixes that might appear on key names.
1713
# These suffixes are handled in conditional evaluations (for =, +, and ?)
1714
# and rules/exclude processing (for ! and /). Keys with these suffixes
1715
# should be treated the same as keys without.
1716
key_suffixes = ['=', '+', '?', '!', '/']
1718
build_file = gyp.common.BuildFile(target)
1720
# Provide a single configuration by default if none exists.
1721
# TODO(mark): Signal an error if default_configurations exists but
1722
# configurations does not.
1723
if not 'configurations' in target_dict:
1724
target_dict['configurations'] = {'Default': {}}
1725
if not 'default_configuration' in target_dict:
1726
concrete = [i for i in target_dict['configurations'].keys()
1727
if not target_dict['configurations'][i].get('abstract')]
1728
target_dict['default_configuration'] = sorted(concrete)[0]
1730
for configuration in target_dict['configurations'].keys():
1731
old_configuration_dict = target_dict['configurations'][configuration]
1732
# Skip abstract configurations (saves work only).
1733
if old_configuration_dict.get('abstract'):
1735
# Configurations inherit (most) settings from the enclosing target scope.
1736
# Get the inheritance relationship right by making a copy of the target
1738
new_configuration_dict = copy.deepcopy(target_dict)
1740
# Take out the bits that don't belong in a "configurations" section.
1741
# Since configuration setup is done before conditional, exclude, and rules
1742
# processing, be careful with handling of the suffix characters used in
1745
for key in new_configuration_dict:
1747
if key_ext in key_suffixes:
1751
if key_base in non_configuration_keys:
1752
delete_keys.append(key)
1754
for key in delete_keys:
1755
del new_configuration_dict[key]
1757
# Merge in configuration (with all its parents first).
1758
MergeConfigWithInheritance(new_configuration_dict, build_file,
1759
target_dict, configuration, [])
1761
# Put the new result back into the target dict as a configuration.
1762
target_dict['configurations'][configuration] = new_configuration_dict
1764
# Now drop all the abstract ones.
1765
for configuration in target_dict['configurations'].keys():
1766
old_configuration_dict = target_dict['configurations'][configuration]
1767
if old_configuration_dict.get('abstract'):
1768
del target_dict['configurations'][configuration]
1770
# Now that all of the target's configurations have been built, go through
1771
# the target dict's keys and remove everything that's been moved into a
1772
# "configurations" section.
1774
for key in target_dict:
1776
if key_ext in key_suffixes:
1780
if not key_base in non_configuration_keys:
1781
delete_keys.append(key)
1782
for key in delete_keys:
1783
del target_dict[key]
1786
def ProcessListFiltersInDict(name, the_dict):
1787
"""Process regular expression and exclusion-based filters on lists.
1789
An exclusion list is in a dict key named with a trailing "!", like
1790
"sources!". Every item in such a list is removed from the associated
1791
main list, which in this example, would be "sources". Removed items are
1792
placed into a "sources_excluded" list in the dict.
1794
Regular expression (regex) filters are contained in dict keys named with a
1795
trailing "/", such as "sources/" to operate on the "sources" list. Regex
1796
filters in a dict take the form:
1797
'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'] ],
1798
['include', '_mac\\.cc$'] ],
1799
The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
1800
_win.cc. The second filter then includes all files ending in _mac.cc that
1801
are now or were once in the "sources" list. Items matching an "exclude"
1802
filter are subject to the same processing as would occur if they were listed
1803
by name in an exclusion list (ending in "!"). Items matching an "include"
1804
filter are brought back into the main list if previously excluded by an
1805
exclusion list or exclusion regex filter. Subsequent matching "exclude"
1806
patterns can still cause items to be excluded after matching an "include".
1809
# Look through the dictionary for any lists whose keys end in "!" or "/".
1810
# These are lists that will be treated as exclude lists and regular
1811
# expression-based exclude/include lists. Collect the lists that are
1812
# needed first, looking for the lists that they operate on, and assemble
1813
# then into |lists|. This is done in a separate loop up front, because
1814
# the _included and _excluded keys need to be added to the_dict, and that
1815
# can't be done while iterating through it.
1819
for key, value in the_dict.iteritems():
1821
if operation != '!' and operation != '/':
1824
if not isinstance(value, list):
1825
raise ValueError, name + ' key ' + key + ' must be list, not ' + \
1826
value.__class__.__name__
1829
if list_key not in the_dict:
1830
# This happens when there's a list like "sources!" but no corresponding
1831
# "sources" list. Since there's nothing for it to operate on, queue up
1832
# the "sources!" list for deletion now.
1833
del_lists.append(key)
1836
if not isinstance(the_dict[list_key], list):
1837
raise ValueError, name + ' key ' + list_key + \
1838
' must be list, not ' + \
1839
value.__class__.__name__ + ' when applying ' + \
1840
{'!': 'exclusion', '/': 'regex'}[operation]
1842
if not list_key in lists:
1843
lists.append(list_key)
1845
# Delete the lists that are known to be unneeded at this point.
1846
for del_list in del_lists:
1847
del the_dict[del_list]
1849
for list_key in lists:
1850
the_list = the_dict[list_key]
1852
# Initialize the list_actions list, which is parallel to the_list. Each
1853
# item in list_actions identifies whether the corresponding item in
1854
# the_list should be excluded, unconditionally preserved (included), or
1855
# whether no exclusion or inclusion has been applied. Items for which
1856
# no exclusion or inclusion has been applied (yet) have value -1, items
1857
# excluded have value 0, and items included have value 1. Includes and
1858
# excludes override previous actions. All items in list_actions are
1859
# initialized to -1 because no excludes or includes have been processed
1861
list_actions = list((-1,) * len(the_list))
1863
exclude_key = list_key + '!'
1864
if exclude_key in the_dict:
1865
for exclude_item in the_dict[exclude_key]:
1866
for index in xrange(0, len(the_list)):
1867
if exclude_item == the_list[index]:
1868
# This item matches the exclude_item, so set its action to 0
1870
list_actions[index] = 0
1872
# The "whatever!" list is no longer needed, dump it.
1873
del the_dict[exclude_key]
1875
regex_key = list_key + '/'
1876
if regex_key in the_dict:
1877
for regex_item in the_dict[regex_key]:
1878
[action, pattern] = regex_item
1879
pattern_re = re.compile(pattern)
1881
for index in xrange(0, len(the_list)):
1882
list_item = the_list[index]
1883
if pattern_re.search(list_item):
1884
# Regular expression match.
1886
if action == 'exclude':
1887
# This item matches an exclude regex, so set its value to 0
1889
list_actions[index] = 0
1890
elif action == 'include':
1891
# This item matches an include regex, so set its value to 1
1893
list_actions[index] = 1
1895
# This is an action that doesn't make any sense.
1896
raise ValueError, 'Unrecognized action ' + action + ' in ' + \
1897
name + ' key ' + key
1899
# The "whatever/" list is no longer needed, dump it.
1900
del the_dict[regex_key]
1902
# Add excluded items to the excluded list.
1904
# Note that exclude_key ("sources!") is different from excluded_key
1905
# ("sources_excluded"). The exclude_key list is input and it was already
1906
# processed and deleted; the excluded_key list is output and it's about
1908
excluded_key = list_key + '_excluded'
1909
if excluded_key in the_dict:
1911
name + ' key ' + excluded_key + ' must not be present prior ' + \
1912
' to applying exclusion/regex filters for ' + list_key
1916
# Go backwards through the list_actions list so that as items are deleted,
1917
# the indices of items that haven't been seen yet don't shift. That means
1918
# that things need to be prepended to excluded_list to maintain them in the
1919
# same order that they existed in the_list.
1920
for index in xrange(len(list_actions) - 1, -1, -1):
1921
if list_actions[index] == 0:
1922
# Dump anything with action 0 (exclude). Keep anything with action 1
1923
# (include) or -1 (no include or exclude seen for the item).
1924
excluded_list.insert(0, the_list[index])
1927
# If anything was excluded, put the excluded list into the_dict at
1929
if len(excluded_list) > 0:
1930
the_dict[excluded_key] = excluded_list
1932
# Now recurse into subdicts and lists that may contain dicts.
1933
for key, value in the_dict.iteritems():
1934
if isinstance(value, dict):
1935
ProcessListFiltersInDict(key, value)
1936
elif isinstance(value, list):
1937
ProcessListFiltersInList(key, value)
1940
def ProcessListFiltersInList(name, the_list):
1941
for item in the_list:
1942
if isinstance(item, dict):
1943
ProcessListFiltersInDict(name, item)
1944
elif isinstance(item, list):
1945
ProcessListFiltersInList(name, item)
1948
def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
1949
"""Ensures that the rules sections in target_dict are valid and consistent,
1950
and determines which sources they apply to.
1953
target: string, name of target.
1954
target_dict: dict, target spec containing "rules" and "sources" lists.
1955
extra_sources_for_rules: a list of keys to scan for rule matches in
1956
addition to 'sources'.
1959
# Dicts to map between values found in rules' 'rule_name' and 'extension'
1960
# keys and the rule dicts themselves.
1962
rule_extensions = {}
1964
rules = target_dict.get('rules', [])
1966
# Make sure that there's no conflict among rule names and extensions.
1967
rule_name = rule['rule_name']
1968
if rule_name in rule_names:
1969
raise KeyError, 'rule %s exists in duplicate, target %s' % \
1971
rule_names[rule_name] = rule
1973
rule_extension = rule['extension']
1974
if rule_extension in rule_extensions:
1975
raise KeyError, ('extension %s associated with multiple rules, ' +
1976
'target %s rules %s and %s') % \
1977
(rule_extension, target,
1978
rule_extensions[rule_extension]['rule_name'],
1980
rule_extensions[rule_extension] = rule
1982
# Make sure rule_sources isn't already there. It's going to be
1983
# created below if needed.
1984
if 'rule_sources' in rule:
1986
'rule_sources must not exist in input, target %s rule %s' % \
1988
extension = rule['extension']
1991
source_keys = ['sources']
1992
source_keys.extend(extra_sources_for_rules)
1993
for source_key in source_keys:
1994
for source in target_dict.get(source_key, []):
1995
(source_root, source_extension) = os.path.splitext(source)
1996
if source_extension.startswith('.'):
1997
source_extension = source_extension[1:]
1998
if source_extension == extension:
1999
rule_sources.append(source)
2001
if len(rule_sources) > 0:
2002
rule['rule_sources'] = rule_sources
2005
def ValidateActionsInTarget(target, target_dict, build_file):
2006
'''Validates the inputs to the actions in a target.'''
2007
target_name = target_dict.get('target_name')
2008
actions = target_dict.get('actions', [])
2009
for action in actions:
2010
action_name = action.get('action_name')
2012
raise Exception("Anonymous action in target %s. "
2013
"An action must have an 'action_name' field." %
2015
inputs = action.get('inputs', [])
2018
def ValidateRunAsInTarget(target, target_dict, build_file):
2019
target_name = target_dict.get('target_name')
2020
run_as = target_dict.get('run_as')
2023
if not isinstance(run_as, dict):
2024
raise Exception("The 'run_as' in target %s from file %s should be a "
2026
(target_name, build_file))
2027
action = run_as.get('action')
2029
raise Exception("The 'run_as' in target %s from file %s must have an "
2030
"'action' section." %
2031
(target_name, build_file))
2032
if not isinstance(action, list):
2033
raise Exception("The 'action' for 'run_as' in target %s from file %s "
2035
(target_name, build_file))
2036
working_directory = run_as.get('working_directory')
2037
if working_directory and not isinstance(working_directory, str):
2038
raise Exception("The 'working_directory' for 'run_as' in target %s "
2039
"in file %s should be a string." %
2040
(target_name, build_file))
2041
environment = run_as.get('environment')
2042
if environment and not isinstance(environment, dict):
2043
raise Exception("The 'environment' for 'run_as' in target %s "
2044
"in file %s should be a dictionary." %
2045
(target_name, build_file))
2048
def TurnIntIntoStrInDict(the_dict):
2049
"""Given dict the_dict, recursively converts all integers into strings.
2051
# Use items instead of iteritems because there's no need to try to look at
2052
# reinserted keys and their associated values.
2053
for k, v in the_dict.items():
2054
if isinstance(v, int):
2057
elif isinstance(v, dict):
2058
TurnIntIntoStrInDict(v)
2059
elif isinstance(v, list):
2060
TurnIntIntoStrInList(v)
2062
if isinstance(k, int):
2063
the_dict[str(k)] = v
2067
def TurnIntIntoStrInList(the_list):
2068
"""Given list the_list, recursively converts all integers into strings.
2070
for index in xrange(0, len(the_list)):
2071
item = the_list[index]
2072
if isinstance(item, int):
2073
the_list[index] = str(item)
2074
elif isinstance(item, dict):
2075
TurnIntIntoStrInDict(item)
2076
elif isinstance(item, list):
2077
TurnIntIntoStrInList(item)
2080
def Load(build_files, variables, includes, depth, generator_input_info, check,
2082
# Set up path_sections and non_configuration_keys with the default data plus
2083
# the generator-specifc data.
2084
global path_sections
2085
path_sections = base_path_sections[:]
2086
path_sections.extend(generator_input_info['path_sections'])
2088
global non_configuration_keys
2089
non_configuration_keys = base_non_configuration_keys[:]
2090
non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
2092
# TODO(mark) handle variants if the generator doesn't want them directly.
2093
generator_handles_variants = \
2094
generator_input_info['generator_handles_variants']
2096
global absolute_build_file_paths
2097
absolute_build_file_paths = \
2098
generator_input_info['generator_wants_absolute_build_file_paths']
2100
global multiple_toolsets
2101
multiple_toolsets = generator_input_info[
2102
'generator_supports_multiple_toolsets']
2104
# A generator can have other lists (in addition to sources) be processed
2106
extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
2108
# Load build files. This loads every target-containing build file into
2109
# the |data| dictionary such that the keys to |data| are build file names,
2110
# and the values are the entire build file contents after "early" or "pre"
2111
# processing has been done and includes have been resolved.
2112
# NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
2113
# well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
2114
# track of the keys corresponding to "target" files.
2115
data = {'target_build_files': set()}
2117
for build_file in build_files:
2118
# Normalize paths everywhere. This is important because paths will be
2119
# used as keys to the data dict and for references between input files.
2120
build_file = os.path.normpath(build_file)
2122
LoadTargetBuildFile(build_file, data, aux_data, variables, includes,
2124
except Exception, e:
2125
gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
2128
# Build a dict to access each target's subdict by qualified name.
2129
targets = BuildTargetsDict(data)
2131
# Fully qualify all dependency links.
2132
QualifyDependencies(targets)
2134
# Expand dependencies specified as build_file:*.
2135
ExpandWildcardDependencies(targets, data)
2138
# Make sure that any targets in a.gyp don't contain dependencies in other
2139
# .gyp files that further depend on a.gyp.
2140
VerifyNoGYPFileCircularDependencies(targets)
2142
[dependency_nodes, flat_list] = BuildDependencyList(targets)
2144
# Handle dependent settings of various types.
2145
for settings_type in ['all_dependent_settings',
2146
'direct_dependent_settings',
2148
DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
2150
# Take out the dependent settings now that they've been published to all
2151
# of the targets that require them.
2152
for target in flat_list:
2153
if settings_type in targets[target]:
2154
del targets[target][settings_type]
2156
# Make sure static libraries don't declare dependencies on other static
2157
# libraries, but that linkables depend on all unlinked static libraries
2158
# that they need so that their link steps will be correct.
2159
AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes)
2161
# Apply "post"/"late"/"target" variable expansions and condition evaluations.
2162
for target in flat_list:
2163
target_dict = targets[target]
2164
build_file = gyp.common.BuildFile(target)
2165
ProcessVariablesAndConditionsInDict(target_dict, True, variables,
2168
# Move everything that can go into a "configurations" section into one.
2169
for target in flat_list:
2170
target_dict = targets[target]
2171
SetUpConfigurations(target, target_dict)
2173
# Apply exclude (!) and regex (/) list filters.
2174
for target in flat_list:
2175
target_dict = targets[target]
2176
ProcessListFiltersInDict(target, target_dict)
2178
# Make sure that the rules make sense, and build up rule_sources lists as
2179
# needed. Not all generators will need to use the rule_sources lists, but
2180
# some may, and it seems best to build the list in a common spot.
2181
# Also validate actions and run_as elements in targets.
2182
for target in flat_list:
2183
target_dict = targets[target]
2184
build_file = gyp.common.BuildFile(target)
2185
ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
2186
ValidateRunAsInTarget(target, target_dict, build_file)
2187
ValidateActionsInTarget(target, target_dict, build_file)
2189
# Generators might not expect ints. Turn them into strs.
2190
TurnIntIntoStrInDict(data)
2192
# TODO(mark): Return |data| for now because the generator needs a list of
2193
# build files that came in. In the future, maybe it should just accept
2194
# a list, and not the whole data dict.
2195
return [flat_list, targets, data]