From 389a0df0eb72f44b57d68eb10acf896c4a9279d2 Mon Sep 17 00:00:00 2001 From: Ben Noordhuis Date: Sat, 8 Oct 2016 10:30:04 +0200 Subject: [PATCH 1/4] tools: update gyp to 702ac58 --- tools/gyp/AUTHORS | 2 + tools/gyp/DEPS | 9 +- tools/gyp/README.md | 4 + tools/gyp/pylib/gyp/MSVSSettings.py | 1 + tools/gyp/pylib/gyp/MSVSUtil.py | 3 +- tools/gyp/pylib/gyp/MSVSVersion.py | 24 +- tools/gyp/pylib/gyp/common.py | 13 +- tools/gyp/pylib/gyp/generator/analyzer.py | 293 ++++++++++++++++------ tools/gyp/pylib/gyp/generator/cmake.py | 65 +++-- tools/gyp/pylib/gyp/generator/make.py | 22 +- tools/gyp/pylib/gyp/generator/msvs.py | 69 ++++- tools/gyp/pylib/gyp/generator/ninja.py | 141 ++++++++--- tools/gyp/pylib/gyp/generator/xcode.py | 61 ++++- tools/gyp/pylib/gyp/input.py | 35 ++- tools/gyp/pylib/gyp/mac_tool.py | 186 ++++++++++---- tools/gyp/pylib/gyp/msvs_emulation.py | 13 +- tools/gyp/pylib/gyp/win_tool.py | 18 +- tools/gyp/pylib/gyp/xcode_emulation.py | 238 +++++++++++++++--- tools/gyp/pylib/gyp/xcode_ninja.py | 25 +- tools/gyp/pylib/gyp/xcodeproj_file.py | 120 +++++++-- tools/gyp/tools/pretty_gyp.py | 27 +- 21 files changed, 1069 insertions(+), 300 deletions(-) create mode 100644 tools/gyp/README.md diff --git a/tools/gyp/AUTHORS b/tools/gyp/AUTHORS index 9389ca0a23e48f..727df6d30fb325 100644 --- a/tools/gyp/AUTHORS +++ b/tools/gyp/AUTHORS @@ -9,3 +9,5 @@ Steven Knight Ryan Norton David J. Sankel Eric N. Vander Weele +Tom Freudenberg +Julien Brianceau diff --git a/tools/gyp/DEPS b/tools/gyp/DEPS index 2e1120f274687d..167fb779b0e1be 100644 --- a/tools/gyp/DEPS +++ b/tools/gyp/DEPS @@ -3,8 +3,7 @@ # (You don't need to use gclient for normal GYP development work.) vars = { - "chrome_trunk": "http://src.chromium.org/svn/trunk", - "googlecode_url": "http://%s.googlecode.com/svn", + "chromium_git": "https://chromium.googlesource.com/", } deps = { @@ -13,12 +12,12 @@ deps = { deps_os = { "win": { "third_party/cygwin": - Var("chrome_trunk") + "/deps/third_party/cygwin@66844", + Var("chromium_git") + "chromium/deps/cygwin@4fbd5b9", "third_party/python_26": - Var("chrome_trunk") + "/tools/third_party/python_26@89111", + Var("chromium_git") + "chromium/deps/python_26@5bb4080", "src/third_party/pefile": - (Var("googlecode_url") % "pefile") + "/trunk@63", + Var("chromium_git") + "external/pefile@72c6ae4", }, } diff --git a/tools/gyp/README.md b/tools/gyp/README.md new file mode 100644 index 00000000000000..c0d73ac9587af3 --- /dev/null +++ b/tools/gyp/README.md @@ -0,0 +1,4 @@ +GYP can Generate Your Projects. +=================================== + +Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline. diff --git a/tools/gyp/pylib/gyp/MSVSSettings.py b/tools/gyp/pylib/gyp/MSVSSettings.py index 4985756bdde76a..8ae19180ea271b 100644 --- a/tools/gyp/pylib/gyp/MSVSSettings.py +++ b/tools/gyp/pylib/gyp/MSVSSettings.py @@ -592,6 +592,7 @@ def _ValidateSettings(validators, settings, stderr): _Same(_compile, 'UseFullPaths', _boolean) # /FC _Same(_compile, 'WholeProgramOptimization', _boolean) # /GL _Same(_compile, 'XMLDocumentationFileName', _file_name) +_Same(_compile, 'CompileAsWinRT', _boolean) # /ZW _Same(_compile, 'AssemblerOutput', _Enumeration(['NoListing', diff --git a/tools/gyp/pylib/gyp/MSVSUtil.py b/tools/gyp/pylib/gyp/MSVSUtil.py index 0b32e911807844..96dea6c2c9d27f 100644 --- a/tools/gyp/pylib/gyp/MSVSUtil.py +++ b/tools/gyp/pylib/gyp/MSVSUtil.py @@ -14,6 +14,7 @@ 'loadable_module': 'dll', 'shared_library': 'dll', 'static_library': 'lib', + 'windows_driver': 'sys', } @@ -110,7 +111,7 @@ def ShardTargets(target_list, target_dicts): else: new_target_dicts[t] = target_dicts[t] # Shard dependencies. - for t in new_target_dicts: + for t in sorted(new_target_dicts): for deptype in ('dependencies', 'dependencies_original'): dependencies = copy.copy(new_target_dicts[t].get(deptype, [])) new_dependencies = [] diff --git a/tools/gyp/pylib/gyp/MSVSVersion.py b/tools/gyp/pylib/gyp/MSVSVersion.py index d9bfa684fa30c2..edaf6eed001847 100644 --- a/tools/gyp/pylib/gyp/MSVSVersion.py +++ b/tools/gyp/pylib/gyp/MSVSVersion.py @@ -68,17 +68,19 @@ def DefaultToolset(self): of a user override.""" return self.default_toolset - def SetupScript(self, target_arch): + def _SetupScriptInternal(self, target_arch): """Returns a command (with arguments) to be used to set up the environment.""" - # Check if we are running in the SDK command line environment and use - # the setup script from the SDK if so. |target_arch| should be either - # 'x86' or 'x64'. + # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the + # depot_tools build tools and should run SetEnv.Cmd to set up the + # environment. The check for WindowsSDKDir alone is not sufficient because + # this is set by running vcvarsall.bat. assert target_arch in ('x86', 'x64') sdk_dir = os.environ.get('WindowsSDKDir') - if self.sdk_based and sdk_dir: - return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')), - '/' + target_arch] + if sdk_dir: + setup_path = os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')) + if self.sdk_based and sdk_dir and os.path.exists(setup_path): + return [setup_path, '/' + target_arch] else: # We don't use VC/vcvarsall.bat for x86 because vcvarsall calls # vcvars32, which it can only find if VS??COMNTOOLS is set, which it @@ -106,6 +108,14 @@ def SetupScript(self, target_arch): return [os.path.normpath( os.path.join(self.path, 'VC/vcvarsall.bat')), arg] + def SetupScript(self, target_arch): + script_data = self._SetupScriptInternal(target_arch) + script_path = script_data[0] + if not os.path.exists(script_path): + raise Exception('%s is missing - make sure VC++ tools are installed.' % + script_path) + return script_data + def _RegistryQueryBase(sysdir, key, value): """Use reg.exe to read a particular key. diff --git a/tools/gyp/pylib/gyp/common.py b/tools/gyp/pylib/gyp/common.py index d482a20df3cdd9..a1e1db5f1239ff 100644 --- a/tools/gyp/pylib/gyp/common.py +++ b/tools/gyp/pylib/gyp/common.py @@ -425,13 +425,15 @@ def GetFlavor(params): return 'freebsd' if sys.platform.startswith('openbsd'): return 'openbsd' + if sys.platform.startswith('netbsd'): + return 'netbsd' if sys.platform.startswith('aix'): return 'aix' return 'linux' -def CopyTool(flavor, out_path): +def CopyTool(flavor, out_path, generator_flags={}): """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it to |out_path|.""" # aix and solaris just need flock emulation. mac and win use more complicated @@ -451,11 +453,18 @@ def CopyTool(flavor, out_path): with open(source_path) as source_file: source = source_file.readlines() + # Set custom header flags. + header = '# Generated by gyp. Do not edit.\n' + mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None) + if flavor == 'mac' and mac_toolchain_dir: + header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" \ + % mac_toolchain_dir + # Add header and write it out. tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix) with open(tool_path, 'w') as tool_file: tool_file.write( - ''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:])) + ''.join([source[0], header] + source[1:])) # Make file executable. os.chmod(tool_path, 0755) diff --git a/tools/gyp/pylib/gyp/generator/analyzer.py b/tools/gyp/pylib/gyp/generator/analyzer.py index f403d4e266b048..921c1a6b714328 100644 --- a/tools/gyp/pylib/gyp/generator/analyzer.py +++ b/tools/gyp/pylib/gyp/generator/analyzer.py @@ -7,23 +7,59 @@ the generator flag config_path) the path of a json file that dictates the files and targets to search for. The following keys are supported: files: list of paths (relative) of the files to search for. -targets: list of targets to search for. The target names are unqualified. +test_targets: unqualified target names to search for. Any target in this list +that depends upon a file in |files| is output regardless of the type of target +or chain of dependencies. +additional_compile_targets: Unqualified targets to search for in addition to +test_targets. Targets in the combined list that depend upon a file in |files| +are not necessarily output. For example, if the target is of type none then the +target is not output (but one of the descendants of the target will be). The following is output: error: only supplied if there is an error. -targets: the set of targets passed in via targets that either directly or - indirectly depend upon the set of paths supplied in files. -build_targets: minimal set of targets that directly depend on the changed - files and need to be built. The expectation is this set of targets is passed - into a build step. +compile_targets: minimal set of targets that directly or indirectly (for + targets of type none) depend on the files in |files| and is one of the + supplied targets or a target that one of the supplied targets depends on. + The expectation is this set of targets is passed into a build step. This list + always contains the output of test_targets as well. +test_targets: set of targets from the supplied |test_targets| that either + directly or indirectly depend upon a file in |files|. This list if useful + if additional processing needs to be done for certain targets after the + build, such as running tests. status: outputs one of three values: none of the supplied files were found, one of the include files changed so that it should be assumed everything - changed (in this case targets and build_targets are not output) or at + changed (in this case test_targets and compile_targets are not output) or at least one file was found. -invalid_targets: list of supplied targets thare were not found. +invalid_targets: list of supplied targets that were not found. + +Example: +Consider a graph like the following: + A D + / \ +B C +A depends upon both B and C, A is of type none and B and C are executables. +D is an executable, has no dependencies and nothing depends on it. +If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and +files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then +the following is output: +|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc +and the supplied target A depends upon it. A is not output as a build_target +as it is of type none with no rules and actions. +|test_targets| = ["B"] B directly depends upon the change file b.cc. + +Even though the file d.cc, which D depends upon, has changed D is not output +as it was not supplied by way of |additional_compile_targets| or |test_targets|. If the generator flag analyzer_output_path is specified, output is written there. Otherwise output is written to stdout. + +In Gyp the "all" target is shorthand for the root targets in the files passed +to gyp. For example, if file "a.gyp" contains targets "a1" and +"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency +on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2". +Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not +directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp +then the "all" target includes "b1" and "b2". """ import gyp.common @@ -210,6 +246,8 @@ class Config(object): def __init__(self): self.files = [] self.targets = set() + self.additional_compile_target_names = set() + self.test_target_names = set() def Init(self, params): """Initializes Config. This is a separate method as it raises an exception @@ -229,7 +267,9 @@ def Init(self, params): if not isinstance(config, dict): raise Exception('config_path must be a JSON file containing a dictionary') self.files = config.get('files', []) - self.targets = set(config.get('targets', [])) + self.additional_compile_target_names = set( + config.get('additional_compile_targets', [])) + self.test_target_names = set(config.get('test_targets', [])) def _WasBuildFileModified(build_file, data, files, toplevel_dir): @@ -280,12 +320,13 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, """Returns a tuple of the following: . A dictionary mapping from fully qualified name to Target. . A list of the targets that have a source file in |files|. - . Set of root Targets reachable from the the files |build_files|. + . Targets that constitute the 'all' target. See description at top of file + for details on the 'all' target. This sets the |match_status| of the targets that contain any of the source files in |files| to MATCH_STATUS_MATCHES. |toplevel_dir| is the root of the source tree.""" # Maps from target name to Target. - targets = {} + name_to_target = {} # Targets that matched. matching_targets = [] @@ -305,7 +346,8 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, while len(targets_to_visit) > 0: target_name = targets_to_visit.pop() - created_target, target = _GetOrCreateTargetByName(targets, target_name) + created_target, target = _GetOrCreateTargetByName(name_to_target, + target_name) if created_target: roots.add(target) elif target.visited: @@ -348,22 +390,25 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, for dep in target_dicts[target_name].get('dependencies', []): targets_to_visit.append(dep) - created_dep_target, dep_target = _GetOrCreateTargetByName(targets, dep) + created_dep_target, dep_target = _GetOrCreateTargetByName(name_to_target, + dep) if not created_dep_target: roots.discard(dep_target) target.deps.add(dep_target) dep_target.back_deps.add(target) - return targets, matching_targets, roots & build_file_targets + return name_to_target, matching_targets, roots & build_file_targets def _GetUnqualifiedToTargetMapping(all_targets, to_find): - """Returns a mapping (dictionary) from unqualified name to Target for all the - Targets in |to_find|.""" + """Returns a tuple of the following: + . mapping (dictionary) from unqualified name to Target for all the + Targets in |to_find|. + . any target names not found. If this is empty all targets were found.""" result = {} if not to_find: - return result + return {}, [] to_find = set(to_find) for target_name in all_targets.keys(): extracted = gyp.common.ParseQualifiedTarget(target_name) @@ -371,13 +416,14 @@ def _GetUnqualifiedToTargetMapping(all_targets, to_find): to_find.remove(extracted[1]) result[extracted[1]] = all_targets[target_name] if not to_find: - return result - return result + return result, [] + return result, [x for x in to_find] -def _DoesTargetDependOn(target): - """Returns true if |target| or any of its dependencies matches the supplied - set of paths. This updates |matches| of the Targets as it recurses. +def _DoesTargetDependOnMatchingTargets(target): + """Returns true if |target| or any of its dependencies is one of the + targets containing the files supplied as input to analyzer. This updates + |matches| of the Targets as it recurses. target: the Target to look for.""" if target.match_status == MATCH_STATUS_DOESNT_MATCH: return False @@ -385,7 +431,7 @@ def _DoesTargetDependOn(target): target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY: return True for dep in target.deps: - if _DoesTargetDependOn(dep): + if _DoesTargetDependOnMatchingTargets(dep): target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY print '\t', target.name, 'matches by dep', dep.name return True @@ -393,19 +439,20 @@ def _DoesTargetDependOn(target): return False -def _GetTargetsDependingOn(possible_targets): +def _GetTargetsDependingOnMatchingTargets(possible_targets): """Returns the list of Targets in |possible_targets| that depend (either - directly on indirectly) on the matched targets. + directly on indirectly) on at least one of the targets containing the files + supplied as input to analyzer. possible_targets: targets to search from.""" found = [] print 'Targets that matched by dependency:' for target in possible_targets: - if _DoesTargetDependOn(target): + if _DoesTargetDependOnMatchingTargets(target): found.append(target) return found -def _AddBuildTargets(target, roots, add_if_no_ancestor, result): +def _AddCompileTargets(target, roots, add_if_no_ancestor, result): """Recurses through all targets that depend on |target|, adding all targets that need to be built (and are in |roots|) to |result|. roots: set of root targets. @@ -416,10 +463,10 @@ def _AddBuildTargets(target, roots, add_if_no_ancestor, result): return target.visited = True - target.in_roots = not target.back_deps and target in roots + target.in_roots = target in roots for back_dep_target in target.back_deps: - _AddBuildTargets(back_dep_target, roots, False, result) + _AddCompileTargets(back_dep_target, roots, False, result) target.added_to_compile_targets |= back_dep_target.added_to_compile_targets target.in_roots |= back_dep_target.in_roots target.is_or_has_linked_ancestor |= ( @@ -437,7 +484,7 @@ def _AddBuildTargets(target, roots, add_if_no_ancestor, result): (add_if_no_ancestor or target.requires_build)) or (target.is_static_library and add_if_no_ancestor and not target.is_or_has_linked_ancestor)): - print '\t\tadding to build targets', target.name, 'executable', \ + print '\t\tadding to compile targets', target.name, 'executable', \ target.is_executable, 'added_to_compile_targets', \ target.added_to_compile_targets, 'add_if_no_ancestor', \ add_if_no_ancestor, 'requires_build', target.requires_build, \ @@ -447,14 +494,14 @@ def _AddBuildTargets(target, roots, add_if_no_ancestor, result): target.added_to_compile_targets = True -def _GetBuildTargets(matching_targets, roots): +def _GetCompileTargets(matching_targets, supplied_targets): """Returns the set of Targets that require a build. matching_targets: targets that changed and need to be built. - roots: set of root targets in the build files to search from.""" + supplied_targets: set of targets supplied to analyzer to search from.""" result = set() for target in matching_targets: - print '\tfinding build targets for match', target.name - _AddBuildTargets(target, roots, True, result) + print 'finding compile targets for match', target.name + _AddCompileTargets(target, supplied_targets, True, result) return result @@ -479,6 +526,16 @@ def _WriteOutput(params, **values): print 'Targets that require a build:' for target in values['build_targets']: print '\t', target + if 'compile_targets' in values: + values['compile_targets'].sort() + print 'Targets that need to be built:' + for target in values['compile_targets']: + print '\t', target + if 'test_targets' in values: + values['test_targets'].sort() + print 'Test targets:' + for target in values['test_targets']: + print '\t', target output_path = params.get('generator_flags', {}).get( 'analyzer_output_path', None) @@ -538,11 +595,104 @@ def CalculateVariables(default_variables, params): default_variables.setdefault('OS', operating_system) +class TargetCalculator(object): + """Calculates the matching test_targets and matching compile_targets.""" + def __init__(self, files, additional_compile_target_names, test_target_names, + data, target_list, target_dicts, toplevel_dir, build_files): + self._additional_compile_target_names = set(additional_compile_target_names) + self._test_target_names = set(test_target_names) + self._name_to_target, self._changed_targets, self._root_targets = ( + _GenerateTargets(data, target_list, target_dicts, toplevel_dir, + frozenset(files), build_files)) + self._unqualified_mapping, self.invalid_targets = ( + _GetUnqualifiedToTargetMapping(self._name_to_target, + self._supplied_target_names_no_all())) + + def _supplied_target_names(self): + return self._additional_compile_target_names | self._test_target_names + + def _supplied_target_names_no_all(self): + """Returns the supplied test targets without 'all'.""" + result = self._supplied_target_names(); + result.discard('all') + return result + + def is_build_impacted(self): + """Returns true if the supplied files impact the build at all.""" + return self._changed_targets + + def find_matching_test_target_names(self): + """Returns the set of output test targets.""" + assert self.is_build_impacted() + # Find the test targets first. 'all' is special cased to mean all the + # root targets. To deal with all the supplied |test_targets| are expanded + # to include the root targets during lookup. If any of the root targets + # match, we remove it and replace it with 'all'. + test_target_names_no_all = set(self._test_target_names) + test_target_names_no_all.discard('all') + test_targets_no_all = _LookupTargets(test_target_names_no_all, + self._unqualified_mapping) + test_target_names_contains_all = 'all' in self._test_target_names + if test_target_names_contains_all: + test_targets = [x for x in (set(test_targets_no_all) | + set(self._root_targets))] + else: + test_targets = [x for x in test_targets_no_all] + print 'supplied test_targets' + for target_name in self._test_target_names: + print '\t', target_name + print 'found test_targets' + for target in test_targets: + print '\t', target.name + print 'searching for matching test targets' + matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets) + matching_test_targets_contains_all = (test_target_names_contains_all and + set(matching_test_targets) & + set(self._root_targets)) + if matching_test_targets_contains_all: + # Remove any of the targets for all that were not explicitly supplied, + # 'all' is subsequentely added to the matching names below. + matching_test_targets = [x for x in (set(matching_test_targets) & + set(test_targets_no_all))] + print 'matched test_targets' + for target in matching_test_targets: + print '\t', target.name + matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1] + for target in matching_test_targets] + if matching_test_targets_contains_all: + matching_target_names.append('all') + print '\tall' + return matching_target_names + + def find_matching_compile_target_names(self): + """Returns the set of output compile targets.""" + assert self.is_build_impacted(); + # Compile targets are found by searching up from changed targets. + # Reset the visited status for _GetBuildTargets. + for target in self._name_to_target.itervalues(): + target.visited = False + + supplied_targets = _LookupTargets(self._supplied_target_names_no_all(), + self._unqualified_mapping) + if 'all' in self._supplied_target_names(): + supplied_targets = [x for x in (set(supplied_targets) | + set(self._root_targets))] + print 'Supplied test_targets & compile_targets' + for target in supplied_targets: + print '\t', target.name + print 'Finding compile targets' + compile_targets = _GetCompileTargets(self._changed_targets, + supplied_targets) + return [gyp.common.ParseQualifiedTarget(target.name)[1] + for target in compile_targets] + + def GenerateOutput(target_list, target_dicts, data, params): """Called by gyp as the final stage. Outputs results.""" config = Config() try: config.Init(params) + if not config.files: raise Exception('Must specify files to analyze via config_path generator ' 'flag') @@ -553,55 +703,38 @@ def GenerateOutput(target_list, target_dicts, data, params): if _WasGypIncludeFileModified(params, config.files): result_dict = { 'status': all_changed_string, - 'targets': list(config.targets) } + 'test_targets': list(config.test_target_names), + 'compile_targets': list( + config.additional_compile_target_names | + config.test_target_names) } _WriteOutput(params, **result_dict) return - all_targets, matching_targets, roots = _GenerateTargets( - data, target_list, target_dicts, toplevel_dir, frozenset(config.files), - params['build_files']) - - print 'roots:' - for root in roots: - print '\t', root.name - - unqualified_mapping = _GetUnqualifiedToTargetMapping(all_targets, - config.targets) - invalid_targets = None - if len(unqualified_mapping) != len(config.targets): - invalid_targets = _NamesNotIn(config.targets, unqualified_mapping) - - if matching_targets: - search_targets = _LookupTargets(config.targets, unqualified_mapping) - print 'supplied targets' - for target in config.targets: - print '\t', target - print 'expanded supplied targets' - for target in search_targets: - print '\t', target.name - matched_search_targets = _GetTargetsDependingOn(search_targets) - print 'raw matched search targets:' - for target in matched_search_targets: - print '\t', target.name - # Reset the visited status for _GetBuildTargets. - for target in all_targets.itervalues(): - target.visited = False - print 'Finding build targets' - build_targets = _GetBuildTargets(matching_targets, roots) - matched_search_targets = [gyp.common.ParseQualifiedTarget(target.name)[1] - for target in matched_search_targets] - build_targets = [gyp.common.ParseQualifiedTarget(target.name)[1] - for target in build_targets] - else: - matched_search_targets = [] - build_targets = [] - - result_dict = { 'targets': matched_search_targets, - 'status': found_dependency_string if matching_targets else - no_dependency_string, - 'build_targets': build_targets} - if invalid_targets: - result_dict['invalid_targets'] = invalid_targets + calculator = TargetCalculator(config.files, + config.additional_compile_target_names, + config.test_target_names, data, + target_list, target_dicts, toplevel_dir, + params['build_files']) + if not calculator.is_build_impacted(): + result_dict = { 'status': no_dependency_string, + 'test_targets': [], + 'compile_targets': [] } + if calculator.invalid_targets: + result_dict['invalid_targets'] = calculator.invalid_targets + _WriteOutput(params, **result_dict) + return + + test_target_names = calculator.find_matching_test_target_names() + compile_target_names = calculator.find_matching_compile_target_names() + found_at_least_one_target = compile_target_names or test_target_names + result_dict = { 'test_targets': test_target_names, + 'status': found_dependency_string if + found_at_least_one_target else no_dependency_string, + 'compile_targets': list( + set(compile_target_names) | + set(test_target_names)) } + if calculator.invalid_targets: + result_dict['invalid_targets'] = calculator.invalid_targets _WriteOutput(params, **result_dict) except Exception as e: diff --git a/tools/gyp/pylib/gyp/generator/cmake.py b/tools/gyp/pylib/gyp/generator/cmake.py index eece6ea98ddd00..a2b96291aa526a 100644 --- a/tools/gyp/pylib/gyp/generator/cmake.py +++ b/tools/gyp/pylib/gyp/generator/cmake.py @@ -34,6 +34,7 @@ import string import subprocess import gyp.common +import gyp.xcode_emulation generator_default_variables = { 'EXECUTABLE_PREFIX': '', @@ -55,7 +56,7 @@ 'CONFIGURATION_NAME': '${configuration}', } -FULL_PATH_VARS = ('${CMAKE_SOURCE_DIR}', '${builddir}', '${obj}') +FULL_PATH_VARS = ('${CMAKE_CURRENT_LIST_DIR}', '${builddir}', '${obj}') generator_supports_multiple_toolsets = True generator_wants_static_library_dependencies_adjusted = True @@ -103,7 +104,7 @@ def NormjoinPathForceCMakeSource(base_path, rel_path): if any([rel_path.startswith(var) for var in FULL_PATH_VARS]): return rel_path # TODO: do we need to check base_path for absolute variables as well? - return os.path.join('${CMAKE_SOURCE_DIR}', + return os.path.join('${CMAKE_CURRENT_LIST_DIR}', os.path.normpath(os.path.join(base_path, rel_path))) @@ -293,7 +294,7 @@ def WriteActions(target_name, actions, extra_sources, extra_deps, WriteVariable(output, inputs_name) output.write('\n') - output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/') + output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/') output.write(path_to_gyp) output.write('\n') @@ -398,9 +399,9 @@ def WriteRules(target_name, rules, extra_sources, extra_deps, output.write(NormjoinPath(path_to_gyp, rule_source)) output.write('\n') - # CMAKE_SOURCE_DIR is where the CMakeLists.txt lives. + # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives. # The cwd is the current build directory. - output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/') + output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/') output.write(path_to_gyp) output.write('\n') @@ -522,7 +523,7 @@ def __init__(self, ext, command): WriteVariable(output, copy.inputs_name, ' ') output.write('\n') - output.write('WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/') + output.write('WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/') output.write(path_to_gyp) output.write('\n') @@ -608,8 +609,8 @@ def CreateCMakeTargetName(self, qualified_target): def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, - options, generator_flags, all_qualified_targets, output): - + options, generator_flags, all_qualified_targets, flavor, + output): # The make generator does this always. # TODO: It would be nice to be able to tell CMake all dependencies. circular_libs = generator_flags.get('circular', True) @@ -633,6 +634,10 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, spec = target_dicts.get(qualified_target, {}) config = spec.get('configurations', {}).get(config_to_use, {}) + xcode_settings = None + if flavor == 'mac': + xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) + target_name = spec.get('target_name', '') target_type = spec.get('type', '') target_toolset = spec.get('toolset') @@ -904,10 +909,10 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, defines = config.get('defines') if defines is not None: SetTargetProperty(output, - cmake_target_name, - 'COMPILE_DEFINITIONS', - defines, - ';') + cmake_target_name, + 'COMPILE_DEFINITIONS', + defines, + ';') # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493 # CMake currently does not have target C and CXX flags. @@ -927,6 +932,13 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, cflags = config.get('cflags', []) cflags_c = config.get('cflags_c', []) cflags_cxx = config.get('cflags_cc', []) + if xcode_settings: + cflags = xcode_settings.GetCflags(config_to_use) + cflags_c = xcode_settings.GetCflagsC(config_to_use) + cflags_cxx = xcode_settings.GetCflagsCC(config_to_use) + #cflags_objc = xcode_settings.GetCflagsObjC(config_to_use) + #cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use) + if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources): SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ') @@ -965,6 +977,13 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, if ldflags is not None: SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ') + # XCode settings + xcode_settings = config.get('xcode_settings', {}) + for xcode_setting, xcode_value in xcode_settings.viewitems(): + SetTargetProperty(output, cmake_target_name, + "XCODE_ATTRIBUTE_%s" % xcode_setting, xcode_value, + '' if isinstance(xcode_value, str) else ' ') + # Note on Dependencies and Libraries: # CMake wants to handle link order, resolving the link line up front. # Gyp does not retain or enforce specifying enough information to do so. @@ -1029,7 +1048,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, output.write(cmake_target_name) output.write('\n') if static_deps: - write_group = circular_libs and len(static_deps) > 1 + write_group = circular_libs and len(static_deps) > 1 and flavor != 'mac' if write_group: output.write('-Wl,--start-group\n') for dep in gyp.common.uniquer(static_deps): @@ -1045,9 +1064,9 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, output.write('\n') if external_libs: for lib in gyp.common.uniquer(external_libs): - output.write(' ') - output.write(lib) - output.write('\n') + output.write(' "') + output.write(RemovePrefix(lib, "$(SDKROOT)")) + output.write('"\n') output.write(')\n') @@ -1059,6 +1078,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_to_use): options = params['options'] generator_flags = params['generator_flags'] + flavor = gyp.common.GetFlavor(params) # generator_dir: relative path from pwd to where make puts build files. # Makes migrating from make to cmake easier, cmake doesn't put anything here. @@ -1126,7 +1146,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, if cc: SetVariable(output, 'CMAKE_ASM_COMPILER', cc) - SetVariable(output, 'builddir', '${CMAKE_BINARY_DIR}') + SetVariable(output, 'builddir', '${CMAKE_CURRENT_BINARY_DIR}') SetVariable(output, 'obj', '${builddir}/obj') output.write('\n') @@ -1141,7 +1161,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, # Force ninja to use rsp files. Otherwise link and ar lines can get too long, # resulting in 'Argument list too long' errors. - output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n') + # However, rsp files don't work correctly on Mac. + if flavor != 'mac': + output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n') output.write('\n') namer = CMakeNamer(target_list) @@ -1156,8 +1178,13 @@ def GenerateOutputForConfig(target_list, target_dicts, data, all_qualified_targets.add(qualified_target) for qualified_target in target_list: + if flavor == 'mac': + gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) + spec = target_dicts[qualified_target] + gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec) + WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, - options, generator_flags, all_qualified_targets, output) + options, generator_flags, all_qualified_targets, flavor, output) output.close() diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py index 4a6b283f152329..b7da768fb33f5a 100644 --- a/tools/gyp/pylib/gyp/generator/make.py +++ b/tools/gyp/pylib/gyp/generator/make.py @@ -31,8 +31,6 @@ from gyp.common import GetEnvironFallback from gyp.common import GypError -import hashlib - generator_default_variables = { 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '', @@ -1581,7 +1579,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, for link_dep in link_deps: assert ' ' not in link_dep, ( "Spaces in alink input filenames not supported (%s)" % link_dep) - if (self.flavor not in ('mac', 'openbsd', 'win') and not + if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not self.is_standalone_static_library): self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin', part_of_all, postbuilds=postbuilds) @@ -1745,10 +1743,7 @@ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, # actual command. # - The intermediate recipe will 'touch' the intermediate file. # - The multi-output rule will have an do-nothing recipe. - - # Hash the target name to avoid generating overlong filenames. - cmddigest = hashlib.sha1(command if command else self.target).hexdigest() - intermediate = "%s.intermediate" % cmddigest + intermediate = "%s.intermediate" % (command if command else self.target) self.WriteLn('%s: %s' % (' '.join(outputs), intermediate)) self.WriteLn('\t%s' % '@:'); self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate)) @@ -2049,6 +2044,11 @@ def CalculateMakefilePath(build_file, base_name): header_params.update({ 'flock': 'lockf', }) + elif flavor == 'openbsd': + copy_archive_arguments = '-pPRf' + header_params.update({ + 'copy_archive_args': copy_archive_arguments, + }) elif flavor == 'aix': copy_archive_arguments = '-pPRf' header_params.update({ @@ -2063,10 +2063,10 @@ def CalculateMakefilePath(build_file, base_name): 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'), 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'), - 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'), - 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'), - 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'), - 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'), + 'CC.host': GetEnvironFallback(('CC_host',), 'gcc'), + 'AR.host': GetEnvironFallback(('AR_host',), 'ar'), + 'CXX.host': GetEnvironFallback(('CXX_host',), 'g++'), + 'LINK.host': GetEnvironFallback(('LINK_host',), '$(CXX.host)'), }) build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py index 44cc1304a2e8ed..7924aa1bcfe27b 100644 --- a/tools/gyp/pylib/gyp/generator/msvs.py +++ b/tools/gyp/pylib/gyp/generator/msvs.py @@ -46,6 +46,8 @@ def _import_OrderedDict(): generator_default_variables = { + 'DRIVER_PREFIX': '', + 'DRIVER_SUFFIX': '.sys', 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '.exe', 'STATIC_LIB_PREFIX': '', @@ -91,6 +93,7 @@ def _import_OrderedDict(): 'msvs_target_platform_minversion', ] +generator_filelist_paths = None # List of precompiled header related keys. precomp_keys = [ @@ -256,6 +259,8 @@ def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False): if not tools.get(tool_name): tools[tool_name] = dict() tool = tools[tool_name] + if 'CompileAsWinRT' == setting: + return if tool.get(setting): if only_if_unset: return if type(tool[setting]) == list and type(value) == list: @@ -269,6 +274,10 @@ def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False): tool[setting] = value +def _ConfigTargetVersion(config_data): + return config_data.get('msvs_target_version', 'Windows7') + + def _ConfigPlatform(config_data): return config_data.get('msvs_configuration_platform', 'Win32') @@ -285,6 +294,21 @@ def _ConfigFullName(config_name, config_data): return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name) +def _ConfigWindowsTargetPlatformVersion(config_data): + ver = config_data.get('msvs_windows_sdk_version') + + for key in [r'HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s', + r'HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s']: + sdk_dir = MSVSVersion._RegistryGetValue(key % ver, 'InstallationFolder') + if not sdk_dir: + continue + version = MSVSVersion._RegistryGetValue(key % ver, 'ProductVersion') or '' + # Find a matching entry in sdk_dir\include. + names = sorted([x for x in os.listdir(r'%s\include' % sdk_dir) + if x.startswith(version)], reverse=True) + return names[0] + + def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, quote_cmd, do_setup_env): @@ -901,6 +925,8 @@ def _GetMsbuildToolsetOfProject(proj_path, spec, version): toolset = default_config.get('msbuild_toolset') if not toolset and version.DefaultToolset(): toolset = version.DefaultToolset() + if spec['type'] == 'windows_driver': + toolset = 'WindowsKernelModeDriver10.0' return toolset @@ -1084,6 +1110,7 @@ def _GetMSVSConfigurationType(spec, build_file): 'shared_library': '2', # .dll 'loadable_module': '2', # .dll 'static_library': '4', # .lib + 'windows_driver': '5', # .sys 'none': '10', # Utility type }[spec['type']] except KeyError: @@ -1268,6 +1295,7 @@ def _GetOutputFilePathAndTool(spec, msbuild): 'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'), 'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'), 'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'), + 'windows_driver': ('VCLinkerTool', 'Link', '$(OutDir)', '.sys'), 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'), } output_file_props = output_file_map.get(spec['type']) @@ -1330,7 +1358,8 @@ def _GetDisabledWarnings(config): def _GetModuleDefinition(spec): def_file = '' - if spec['type'] in ['shared_library', 'loadable_module', 'executable']: + if spec['type'] in ['shared_library', 'loadable_module', 'executable', + 'windows_driver']: def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] if len(def_files) == 1: def_file = _FixPath(def_files[0]) @@ -1937,6 +1966,19 @@ def PerformBuild(data, configurations, params): rtn = subprocess.check_call(arguments) +def CalculateGeneratorInputInfo(params): + if params.get('flavor') == 'ninja': + toplevel = params['options'].toplevel_dir + qualified_out_dir = os.path.normpath(os.path.join( + toplevel, ninja_generator.ComputeOutputDir(params), + 'gypfiles-msvs-ninja')) + + global generator_filelist_paths + generator_filelist_paths = { + 'toplevel': toplevel, + 'qualified_out_dir': qualified_out_dir, + } + def GenerateOutput(target_list, target_dicts, data, params): """Generate .sln and .vcproj files. @@ -2662,16 +2704,37 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name): else: properties[0].append(['ApplicationType', 'Windows Store']) + platform_name = None + msvs_windows_sdk_version = None + for configuration in spec['configurations'].itervalues(): + platform_name = platform_name or _ConfigPlatform(configuration) + msvs_windows_sdk_version = (msvs_windows_sdk_version or + _ConfigWindowsTargetPlatformVersion(configuration)) + if platform_name and msvs_windows_sdk_version: + break + + if platform_name == 'ARM': + properties[0].append(['WindowsSDKDesktopARMSupport', 'true']) + if msvs_windows_sdk_version: + properties[0].append(['WindowsTargetPlatformVersion', + str(msvs_windows_sdk_version)]) + return properties + def _GetMSBuildConfigurationDetails(spec, build_file): properties = {} for name, settings in spec['configurations'].iteritems(): msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) condition = _GetConfigurationCondition(name, settings) character_set = msbuild_attributes.get('CharacterSet') + config_type = msbuild_attributes.get('ConfigurationType') _AddConditionalProperty(properties, condition, 'ConfigurationType', - msbuild_attributes['ConfigurationType']) + config_type) + if config_type == 'Driver': + _AddConditionalProperty(properties, condition, 'DriverType', 'WDM') + _AddConditionalProperty(properties, condition, 'TargetVersion', + _ConfigTargetVersion(settings)) if character_set: if 'msvs_enable_winrt' not in spec : _AddConditionalProperty(properties, condition, 'CharacterSet', @@ -2770,6 +2833,7 @@ def _ConvertMSVSConfigurationType(config_type): '1': 'Application', '2': 'DynamicLibrary', '4': 'StaticLibrary', + '5': 'Driver', '10': 'Utility' }[config_type] return config_type @@ -2809,6 +2873,7 @@ def _GetMSBuildAttributes(spec, config, build_file): 'executable': 'Link', 'shared_library': 'Link', 'loadable_module': 'Link', + 'windows_driver': 'Link', 'static_library': 'Lib', } msbuild_tool = msbuild_tool_map.get(spec['type']) diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/tools/gyp/pylib/gyp/generator/ninja.py index 0e8ae9790853ea..0555a4a90d3885 100644 --- a/tools/gyp/pylib/gyp/generator/ninja.py +++ b/tools/gyp/pylib/gyp/generator/ninja.py @@ -139,12 +139,18 @@ def __init__(self, type): self.bundle = None # On Windows, incremental linking requires linking against all the .objs # that compose a .lib (rather than the .lib itself). That list is stored - # here. + # here. In this case, we also need to save the compile_deps for the target, + # so that the the target that directly depends on the .objs can also depend + # on those. self.component_objs = None + self.compile_deps = None # Windows only. The import .lib is the output of a build step, but # because dependents only link against the lib (not both the lib and the # dll) we keep track of the import library here. self.import_lib = None + # Track if this target contains any C++ files, to decide if gcc or g++ + # should be used for linking. + self.uses_cpp = False def Linkable(self): """Return true if this is a target that can be linked against.""" @@ -372,14 +378,17 @@ def WriteSpec(self, spec, config_name, generator_flags): self.target = Target(spec['type']) self.is_standalone_static_library = bool( spec.get('standalone_static_library', 0)) - # Track if this target contains any C++ files, to decide if gcc or g++ - # should be used for linking. - self.uses_cpp = False + + self.target_rpath = generator_flags.get('target_rpath', r'\$$ORIGIN/lib/') self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) self.xcode_settings = self.msvs_settings = None if self.flavor == 'mac': self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) + mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None) + if mac_toolchain_dir: + self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir + if self.flavor == 'win': self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, generator_flags) @@ -416,6 +425,8 @@ def WriteSpec(self, spec, config_name, generator_flags): target = self.target_outputs[dep] actions_depends.append(target.PreActionInput(self.flavor)) compile_depends.append(target.PreCompileInput()) + if target.uses_cpp: + self.target.uses_cpp = True actions_depends = filter(None, actions_depends) compile_depends = filter(None, compile_depends) actions_depends = self.WriteCollapsedDependencies('actions_depends', @@ -441,7 +452,12 @@ def WriteSpec(self, spec, config_name, generator_flags): # Write out the compilation steps, if any. link_deps = [] - sources = extra_sources + spec.get('sources', []) + try: + sources = extra_sources + spec.get('sources', []) + except TypeError: + print 'extra_sources: ', str(extra_sources) + print 'spec.get("sources"): ', str(spec.get('sources')) + raise if sources: if self.flavor == 'mac' and len(self.archs) > 1: # Write subninja file containing compile and link commands scoped to @@ -474,16 +490,17 @@ def WriteSpec(self, spec, config_name, generator_flags): elif self.flavor == 'mac' and len(self.archs) > 1: link_deps = collections.defaultdict(list) - + compile_deps = self.target.actions_stamp or actions_depends if self.flavor == 'win' and self.target.type == 'static_library': self.target.component_objs = link_deps + self.target.compile_deps = compile_deps # Write out a link step, if needed. output = None is_empty_bundle = not link_deps and not mac_bundle_depends if link_deps or self.target.actions_stamp or actions_depends: output = self.WriteTarget(spec, config_name, config, link_deps, - self.target.actions_stamp or actions_depends) + compile_deps) if self.is_mac_bundle: mac_bundle_depends.append(output) @@ -555,6 +572,9 @@ def WriteActionsRulesCopies(self, spec, extra_sources, prebuild, if 'sources' in spec and self.flavor == 'win': outputs += self.WriteWinIdlFiles(spec, prebuild) + if self.xcode_settings and self.xcode_settings.IsIosFramework(): + self.WriteiOSFrameworkHeaders(spec, outputs, prebuild) + stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs) if self.is_mac_bundle: @@ -652,6 +672,7 @@ def WriteRules(self, rules, extra_sources, prebuild, for var in special_locals: if '${%s}' % var in argument: needed_variables.add(var) + needed_variables = sorted(needed_variables) def cygwin_munge(path): # pylint: disable=cell-var-from-loop @@ -725,6 +746,7 @@ def cygwin_munge(path): # WriteNewNinjaRule uses unique_name for creating an rsp file on win. extra_bindings.append(('unique_name', hashlib.md5(outputs[0]).hexdigest())) + self.ninja.build(outputs, rule_name, self.GypPathToNinja(source), implicit=inputs, order_only=prebuild, @@ -736,7 +758,11 @@ def cygwin_munge(path): def WriteCopies(self, copies, prebuild, mac_bundle_depends): outputs = [] - env = self.GetToolchainEnv() + if self.xcode_settings: + extra_env = self.xcode_settings.GetPerTargetSettings() + env = self.GetToolchainEnv(additional_settings=extra_env) + else: + env = self.GetToolchainEnv() for copy in copies: for path in copy['files']: # Normalize the path so trailing slashes don't confuse us. @@ -758,18 +784,38 @@ def WriteCopies(self, copies, prebuild, mac_bundle_depends): return outputs + def WriteiOSFrameworkHeaders(self, spec, outputs, prebuild): + """Prebuild steps to generate hmap files and copy headers to destination.""" + framework = self.ComputeMacBundleOutput() + all_sources = spec['sources'] + copy_headers = spec['mac_framework_headers'] + output = self.GypPathToUniqueOutput('headers.hmap') + self.xcode_settings.header_map_path = output + all_headers = map(self.GypPathToNinja, + filter(lambda x:x.endswith(('.h')), all_sources)) + variables = [('framework', framework), + ('copy_headers', map(self.GypPathToNinja, copy_headers))] + outputs.extend(self.ninja.build( + output, 'compile_ios_framework_headers', all_headers, + variables=variables, order_only=prebuild)) + def WriteMacBundleResources(self, resources, bundle_depends): """Writes ninja edges for 'mac_bundle_resources'.""" xcassets = [] + + extra_env = self.xcode_settings.GetPerTargetSettings() + env = self.GetSortedXcodeEnv(additional_settings=extra_env) + env = self.ComputeExportEnvString(env) + isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) + for output, res in gyp.xcode_emulation.GetMacBundleResources( generator_default_variables['PRODUCT_DIR'], self.xcode_settings, map(self.GypPathToNinja, resources)): output = self.ExpandSpecial(output) if os.path.splitext(output)[-1] != '.xcassets': - isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) self.ninja.build(output, 'mac_tool', res, variables=[('mactool_cmd', 'copy-bundle-resource'), \ - ('binary', isBinary)]) + ('env', env), ('binary', isBinary)]) bundle_depends.append(output) else: xcassets.append(res) @@ -988,7 +1034,7 @@ def WriteSourcesForArch(self, ninja_file, config_name, config, sources, obj_ext = self.obj_ext if ext in ('cc', 'cpp', 'cxx'): command = 'cxx' - self.uses_cpp = True + self.target.uses_cpp = True elif ext == 'c' or (ext == 'S' and self.flavor != 'win'): command = 'cc' elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files. @@ -1003,7 +1049,7 @@ def WriteSourcesForArch(self, ninja_file, config_name, config, sources, command = 'objc' elif self.flavor == 'mac' and ext == 'mm': command = 'objcxx' - self.uses_cpp = True + self.target.uses_cpp = True elif self.flavor == 'win' and ext == 'rc': command = 'rc' obj_ext = '.res' @@ -1054,16 +1100,16 @@ def WritePchTargets(self, ninja_file, pch_commands): cmd = map.get(lang) ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)]) - def WriteLink(self, spec, config_name, config, link_deps): + def WriteLink(self, spec, config_name, config, link_deps, compile_deps): """Write out a link step. Fills out target.binary. """ if self.flavor != 'mac' or len(self.archs) == 1: return self.WriteLinkForArch( - self.ninja, spec, config_name, config, link_deps) + self.ninja, spec, config_name, config, link_deps, compile_deps) else: output = self.ComputeOutput(spec) inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec, config_name, config, link_deps[arch], - arch=arch) + compile_deps, arch=arch) for arch in self.archs] extra_bindings = [] build_output = output @@ -1082,7 +1128,7 @@ def WriteLink(self, spec, config_name, config, link_deps): return output def WriteLinkForArch(self, ninja_file, spec, config_name, config, - link_deps, arch=None): + link_deps, compile_deps, arch=None): """Write out a link step. Fills out target.binary. """ command = { 'executable': 'link', @@ -1093,6 +1139,15 @@ def WriteLinkForArch(self, ninja_file, spec, config_name, config, implicit_deps = set() solibs = set() + order_deps = set() + + if compile_deps: + # Normally, the compiles of the target already depend on compile_deps, + # but a shared_library target might have no sources and only link together + # a few static_library deps, so the link step also needs to depend + # on compile_deps to make sure actions in the shared_library target + # get run before the link. + order_deps.add(compile_deps) if 'dependencies' in spec: # Two kinds of dependencies: @@ -1111,6 +1166,8 @@ def WriteLinkForArch(self, ninja_file, spec, config_name, config, target.component_objs and self.msvs_settings.IsUseLibraryDependencyInputs(config_name)): new_deps = target.component_objs + if target.compile_deps: + order_deps.add(target.compile_deps) elif self.flavor == 'win' and target.import_lib: new_deps = [target.import_lib] elif target.UsesToc(self.flavor): @@ -1128,7 +1185,7 @@ def WriteLinkForArch(self, ninja_file, spec, config_name, config, implicit_deps.add(final_output) extra_bindings = [] - if self.uses_cpp and self.flavor != 'win': + if self.target.uses_cpp and self.flavor != 'win': extra_bindings.append(('ld', '$ldxx')) output = self.ComputeOutput(spec, arch) @@ -1171,7 +1228,9 @@ def WriteLinkForArch(self, ninja_file, spec, config_name, config, rpath = 'lib/' if self.toolset != 'target': rpath += self.toolset - ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath) + ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath) + else: + ldflags.append('-Wl,-rpath=%s' % self.target_rpath) ldflags.append('-Wl,-rpath-link=%s' % rpath) self.WriteVariableList(ninja_file, 'ldflags', map(self.ExpandSpecial, ldflags)) @@ -1245,10 +1304,12 @@ def WriteLinkForArch(self, ninja_file, spec, config_name, config, if len(solibs): - extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs))) + extra_bindings.append(('solibs', + gyp.common.EncodePOSIXShellList(sorted(solibs)))) ninja_file.build(output, command + command_suffix, link_deps, - implicit=list(implicit_deps), + implicit=sorted(implicit_deps), + order_only=list(order_deps), variables=extra_bindings) return linked_binary @@ -1263,7 +1324,7 @@ def WriteTarget(self, spec, config_name, config, link_deps, compile_deps): self.target.type = 'none' elif spec['type'] == 'static_library': self.target.binary = self.ComputeOutput(spec) - if (self.flavor not in ('mac', 'openbsd', 'win') and not + if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not self.is_standalone_static_library): self.ninja.build(self.target.binary, 'alink_thin', link_deps, order_only=compile_deps) @@ -1300,7 +1361,8 @@ def WriteTarget(self, spec, config_name, config, link_deps, compile_deps): # needed. variables=variables) else: - self.target.binary = self.WriteLink(spec, config_name, config, link_deps) + self.target.binary = self.WriteLink(spec, config_name, config, link_deps, + compile_deps) return self.target.binary def WriteMacBundle(self, spec, mac_bundle_depends, is_empty): @@ -1313,9 +1375,13 @@ def WriteMacBundle(self, spec, mac_bundle_depends, is_empty): self.AppendPostbuildVariable(variables, spec, output, self.target.binary, is_command_start=not package_framework) if package_framework and not is_empty: - variables.append(('version', self.xcode_settings.GetFrameworkVersion())) - self.ninja.build(output, 'package_framework', mac_bundle_depends, - variables=variables) + if spec['type'] == 'shared_library' and self.xcode_settings.isIOS: + self.ninja.build(output, 'package_ios_framework', mac_bundle_depends, + variables=variables) + else: + variables.append(('version', self.xcode_settings.GetFrameworkVersion())) + self.ninja.build(output, 'package_framework', mac_bundle_depends, + variables=variables) else: self.ninja.build(output, 'stamp', mac_bundle_depends, variables=variables) @@ -1802,7 +1868,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, master_ninja = ninja_syntax.Writer(master_ninja_file, width=120) # Put build-time support tools in out/{config_name}. - gyp.common.CopyTool(flavor, toplevel_build) + gyp.common.CopyTool(flavor, toplevel_build, generator_flags) # Grab make settings for CC/CXX. # The rules are @@ -1828,7 +1894,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, ld_host = '$cc_host' ldxx_host = '$cxx_host' - ar_host = 'ar' + ar_host = ar cc_host = None cxx_host = None cc_host_global_setting = None @@ -1883,6 +1949,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, key_prefix = re.sub(r'\.HOST$', '.host', key_prefix) wrappers[key_prefix] = os.path.join(build_to_root, value) + mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None) + if mac_toolchain_dir: + wrappers['LINK'] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir + if flavor == 'win': configs = [target_dicts[qualified_target]['configurations'][config_name] for qualified_target in target_list] @@ -1893,7 +1963,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, configs, generator_flags) cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles( toplevel_build, generator_flags, shared_system_includes, OpenOutput) - for arch, path in cl_paths.iteritems(): + for arch, path in sorted(cl_paths.iteritems()): if clang_cl: # If we have selected clang-cl, use that instead. path = clang_cl @@ -2216,6 +2286,12 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, 'compile_xcassets', description='COMPILE XCASSETS $in', command='$env ./gyp-mac-tool compile-xcassets $keys $in') + master_ninja.rule( + 'compile_ios_framework_headers', + description='COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in', + command='$env ./gyp-mac-tool compile-ios-framework-header-map $out ' + '$framework $in && $env ./gyp-mac-tool ' + 'copy-ios-framework-headers $framework $copy_headers') master_ninja.rule( 'mac_tool', description='MACTOOL $mactool_cmd $in', @@ -2225,6 +2301,11 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, description='PACKAGE FRAMEWORK $out, POSTBUILDS', command='./gyp-mac-tool package-framework $out $version$postbuilds ' '&& touch $out') + master_ninja.rule( + 'package_ios_framework', + description='PACKAGE IOS FRAMEWORK $out, POSTBUILDS', + command='./gyp-mac-tool package-ios-framework $out $postbuilds ' + '&& touch $out') if flavor == 'win': master_ninja.rule( 'stamp', @@ -2329,7 +2410,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, # able to run actions and build libraries by their short name. master_ninja.newline() master_ninja.comment('Short names for targets.') - for short_name in target_short_names: + for short_name in sorted(target_short_names): master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in target_short_names[short_name]]) @@ -2345,7 +2426,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, if all_outputs: master_ninja.newline() - master_ninja.build('all', 'phony', list(all_outputs)) + master_ninja.build('all', 'phony', sorted(all_outputs)) master_ninja.default(generator_flags.get('default_target', 'all')) master_ninja_file.close() diff --git a/tools/gyp/pylib/gyp/generator/xcode.py b/tools/gyp/pylib/gyp/generator/xcode.py index 482b53ac8ad9ec..db99d6ab81ed5c 100644 --- a/tools/gyp/pylib/gyp/generator/xcode.py +++ b/tools/gyp/pylib/gyp/generator/xcode.py @@ -77,6 +77,7 @@ 'mac_framework_headers', 'mac_framework_private_headers', 'mac_xctest_bundle', + 'mac_xcuitest_bundle', 'xcode_create_dependents_test_runner', ] @@ -87,6 +88,8 @@ 'mac_framework_private_headers', ] +generator_filelist_paths = None + # Xcode's standard set of library directories, which don't need to be duplicated # in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay. xcode_standard_library_dirs = frozenset([ @@ -578,6 +581,26 @@ def PerformBuild(data, configurations, params): subprocess.check_call(arguments) +def CalculateGeneratorInputInfo(params): + toplevel = params['options'].toplevel_dir + if params.get('flavor') == 'ninja': + generator_dir = os.path.relpath(params['options'].generator_output or '.') + output_dir = params.get('generator_flags', {}).get('output_dir', 'out') + output_dir = os.path.normpath(os.path.join(generator_dir, output_dir)) + qualified_out_dir = os.path.normpath(os.path.join( + toplevel, output_dir, 'gypfiles-xcode-ninja')) + else: + output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild')) + qualified_out_dir = os.path.normpath(os.path.join( + toplevel, output_dir, 'gypfiles')) + + global generator_filelist_paths + generator_filelist_paths = { + 'toplevel': toplevel, + 'qualified_out_dir': qualified_out_dir, + } + + def GenerateOutput(target_list, target_dicts, data, params): # Optionally configure each spec to use ninja as the external builder. ninja_wrapper = params.get('flavor') == 'ninja' @@ -590,6 +613,15 @@ def GenerateOutput(target_list, target_dicts, data, params): parallel_builds = generator_flags.get('xcode_parallel_builds', True) serialize_all_tests = \ generator_flags.get('xcode_serialize_all_test_runs', True) + upgrade_check_project_version = \ + generator_flags.get('xcode_upgrade_check_project_version', None) + + # Format upgrade_check_project_version with leading zeros as needed. + if upgrade_check_project_version: + upgrade_check_project_version = str(upgrade_check_project_version) + while len(upgrade_check_project_version) < 4: + upgrade_check_project_version = '0' + upgrade_check_project_version + skip_excluded_files = \ not generator_flags.get('xcode_list_excluded_files', True) xcode_projects = {} @@ -604,9 +636,17 @@ def GenerateOutput(target_list, target_dicts, data, params): xcode_projects[build_file] = xcp pbxp = xcp.project + # Set project-level attributes from multiple options + project_attributes = {}; if parallel_builds: - pbxp.SetProperty('attributes', - {'BuildIndependentTargetsInParallel': 'YES'}) + project_attributes['BuildIndependentTargetsInParallel'] = 'YES' + if upgrade_check_project_version: + project_attributes['LastUpgradeCheck'] = upgrade_check_project_version + project_attributes['LastTestingUpgradeCheck'] = \ + upgrade_check_project_version + project_attributes['LastSwiftUpdateCheck'] = \ + upgrade_check_project_version + pbxp.SetProperty('attributes', project_attributes) # Add gyp/gypi files to project if not generator_flags.get('standalone'): @@ -648,14 +688,18 @@ def GenerateOutput(target_list, target_dicts, data, params): 'loadable_module': 'com.googlecode.gyp.xcode.bundle', 'shared_library': 'com.apple.product-type.library.dynamic', 'static_library': 'com.apple.product-type.library.static', + 'mac_kernel_extension': 'com.apple.product-type.kernel-extension', 'executable+bundle': 'com.apple.product-type.application', 'loadable_module+bundle': 'com.apple.product-type.bundle', 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test', + 'loadable_module+xcuitest': 'com.apple.product-type.bundle.ui-testing', 'shared_library+bundle': 'com.apple.product-type.framework', 'executable+extension+bundle': 'com.apple.product-type.app-extension', 'executable+watch+extension+bundle': 'com.apple.product-type.watchkit-extension', - 'executable+watch+bundle': 'com.apple.product-type.application.watchapp', + 'executable+watch+bundle': + 'com.apple.product-type.application.watchapp', + 'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension', } target_properties = { @@ -665,13 +709,19 @@ def GenerateOutput(target_list, target_dicts, data, params): type = spec['type'] is_xctest = int(spec.get('mac_xctest_bundle', 0)) + is_xcuitest = int(spec.get('mac_xcuitest_bundle', 0)) is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest is_app_extension = int(spec.get('ios_app_extension', 0)) is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0)) is_watch_app = int(spec.get('ios_watch_app', 0)) if type != 'none': type_bundle_key = type - if is_xctest: + if is_xcuitest: + type_bundle_key += '+xcuitest' + assert type == 'loadable_module', ( + 'mac_xcuitest_bundle targets must have type loadable_module ' + '(target %s)' % target_name) + elif is_xctest: type_bundle_key += '+xctest' assert type == 'loadable_module', ( 'mac_xctest_bundle targets must have type loadable_module ' @@ -703,6 +753,9 @@ def GenerateOutput(target_list, target_dicts, data, params): assert not is_bundle, ( 'mac_bundle targets cannot have type none (target "%s")' % target_name) + assert not is_xcuitest, ( + 'mac_xcuitest_bundle targets cannot have type none (target "%s")' % + target_name) assert not is_xctest, ( 'mac_xctest_bundle targets cannot have type none (target "%s")' % target_name) diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py index bc68c3765dba98..a046a15cc1d2d6 100644 --- a/tools/gyp/pylib/gyp/input.py +++ b/tools/gyp/pylib/gyp/input.py @@ -28,7 +28,13 @@ # A list of types that are treated as linkable. -linkable_types = ['executable', 'shared_library', 'loadable_module'] +linkable_types = [ + 'executable', + 'shared_library', + 'loadable_module', + 'mac_kernel_extension', + 'windows_driver', +] # A list of sections that contain links to other targets. dependency_sections = ['dependencies', 'export_dependent_settings'] @@ -1534,11 +1540,15 @@ def FlattenToList(self): # dependents. flat_list = OrderedSet() + def ExtractNodeRef(node): + """Extracts the object that the node represents from the given node.""" + return node.ref + # in_degree_zeros is the list of DependencyGraphNodes that have no # dependencies not in flat_list. Initially, it is a copy of the children # of this node, because when the graph was built, nodes with no # dependencies were made implicit dependents of the root node. - in_degree_zeros = set(self.dependents[:]) + in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef) while in_degree_zeros: # Nodes in in_degree_zeros have no dependencies not in flat_list, so they @@ -1550,12 +1560,13 @@ def FlattenToList(self): # Look at dependents of the node just added to flat_list. Some of them # may now belong in in_degree_zeros. - for node_dependent in node.dependents: + for node_dependent in sorted(node.dependents, key=ExtractNodeRef): is_in_degree_zero = True # TODO: We want to check through the # node_dependent.dependencies list but if it's long and we # always start at the beginning, then we get O(n^2) behaviour. - for node_dependent_dependency in node_dependent.dependencies: + for node_dependent_dependency in (sorted(node_dependent.dependencies, + key=ExtractNodeRef)): if not node_dependent_dependency.ref in flat_list: # The dependent one or more dependencies not in flat_list. There # will be more chances to add it to flat_list when examining @@ -1568,7 +1579,7 @@ def FlattenToList(self): # All of the dependent's dependencies are already in flat_list. Add # it to in_degree_zeros where it will be processed in a future # iteration of the outer loop. - in_degree_zeros.add(node_dependent) + in_degree_zeros += [node_dependent] return list(flat_list) @@ -1724,11 +1735,13 @@ def _LinkDependenciesInternal(self, targets, include_shared_libraries, dependencies.add(self.ref) return dependencies - # Executables and loadable modules are already fully and finally linked. - # Nothing else can be a link dependency of them, there can only be - # dependencies in the sense that a dependent target might run an - # executable or load the loadable_module. - if not initial and target_type in ('executable', 'loadable_module'): + # Executables, mac kernel extensions, windows drivers and loadable modules + # are already fully and finally linked. Nothing else can be a link + # dependency of them, there can only be dependencies in the sense that a + # dependent target might run an executable or load the loadable_module. + if not initial and target_type in ('executable', 'loadable_module', + 'mac_kernel_extension', + 'windows_driver'): return dependencies # Shared libraries are already fully linked. They should only be included @@ -2479,7 +2492,7 @@ def ValidateTargetType(target, target_dict): """ VALID_TARGET_TYPES = ('executable', 'loadable_module', 'static_library', 'shared_library', - 'none') + 'mac_kernel_extension', 'none', 'windows_driver') target_type = target_dict.get('type', None) if target_type not in VALID_TARGET_TYPES: raise GypError("Target %s has an invalid target type '%s'. " diff --git a/tools/gyp/pylib/gyp/mac_tool.py b/tools/gyp/pylib/gyp/mac_tool.py index eeeaceb0c7aa23..055d79cf226be9 100755 --- a/tools/gyp/pylib/gyp/mac_tool.py +++ b/tools/gyp/pylib/gyp/mac_tool.py @@ -17,6 +17,7 @@ import re import shutil import string +import struct import subprocess import sys import tempfile @@ -48,6 +49,7 @@ def _CommandifyName(self, name_string): def ExecCopyBundleResource(self, source, dest, convert_to_binary): """Copies a resource file to the bundle/Resources directory, performing any necessary compilation on each resource.""" + convert_to_binary = convert_to_binary == 'True' extension = os.path.splitext(source)[1].lower() if os.path.isdir(source): # Copy tree. @@ -61,11 +63,16 @@ def ExecCopyBundleResource(self, source, dest, convert_to_binary): return self._CopyXIBFile(source, dest) elif extension == '.storyboard': return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest, convert_to_binary) + elif extension == '.strings' and not convert_to_binary: + self._CopyStringsFile(source, dest) else: + if os.path.exists(dest): + os.unlink(dest) shutil.copy(source, dest) + if convert_to_binary and extension in ('.plist', '.strings'): + self._ConvertToBinary(dest) + def _CopyXIBFile(self, source, dest): """Compiles a XIB file with ibtool into a binary plist in the bundle.""" @@ -76,8 +83,26 @@ def _CopyXIBFile(self, source, dest): if os.path.relpath(dest): dest = os.path.join(base, dest) - args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', - '--output-format', 'human-readable-text', '--compile', dest, source] + args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices'] + + if os.environ['XCODE_VERSION_ACTUAL'] > '0700': + args.extend(['--auto-activate-custom-fonts']) + if 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ: + args.extend([ + '--target-device', 'iphone', '--target-device', 'ipad', + '--minimum-deployment-target', + os.environ['IPHONEOS_DEPLOYMENT_TARGET'], + ]) + else: + args.extend([ + '--target-device', 'mac', + '--minimum-deployment-target', + os.environ['MACOSX_DEPLOYMENT_TARGET'], + ]) + + args.extend(['--output-format', 'human-readable-text', '--compile', dest, + source]) + ibtool_section_re = re.compile(r'/\*.*\*/') ibtool_re = re.compile(r'.*note:.*is clipping its content') ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) @@ -96,7 +121,7 @@ def _ConvertToBinary(self, dest): subprocess.check_call([ 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) - def _CopyStringsFile(self, source, dest, convert_to_binary): + def _CopyStringsFile(self, source, dest): """Copies a .strings file using iconv to reconvert the input into UTF-16.""" input_code = self._DetectInputEncoding(source) or "UTF-8" @@ -116,9 +141,6 @@ def _CopyStringsFile(self, source, dest, convert_to_binary): fp.write(s.decode(input_code).encode('UTF-16')) fp.close() - if convert_to_binary == 'True': - self._ConvertToBinary(dest) - def _DetectInputEncoding(self, file_name): """Reads the first few bytes from file_name and tries to guess the text encoding. Returns None as a guess if it can't detect it.""" @@ -153,7 +175,7 @@ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): # Go through all the environment variables and replace them as variables in # the file. - IDENT_RE = re.compile(r'[/\s]') + IDENT_RE = re.compile(r'[_/\s]') for key in os.environ: if key.startswith('_'): continue @@ -228,7 +250,8 @@ def ExecFlock(self, lockfile, *cmd_list): def ExecFilterLibtool(self, *cmd_list): """Calls libtool and filters out '/path/to/libtool: file: foo.o has no symbols'.""" - libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') + libtool_re = re.compile(r'^.*libtool: (?:for architecture: \S* )?' + r'file: .* has no symbols$') libtool_re5 = re.compile( r'^.*libtool: warning for library: ' + r'.* the table of contents is empty ' + @@ -253,6 +276,23 @@ def ExecFilterLibtool(self, *cmd_list): break return libtoolout.returncode + def ExecPackageIosFramework(self, framework): + # Find the name of the binary based on the part before the ".framework". + binary = os.path.basename(framework).split('.')[0] + module_path = os.path.join(framework, 'Modules'); + if not os.path.exists(module_path): + os.mkdir(module_path) + module_template = 'framework module %s {\n' \ + ' umbrella header "%s.h"\n' \ + '\n' \ + ' export *\n' \ + ' module * { export * }\n' \ + '}\n' % (binary, binary) + + module_file = open(os.path.join(module_path, 'module.modulemap'), "w") + module_file.write(module_template) + module_file.close() + def ExecPackageFramework(self, framework, version): """Takes a path to Something.framework and the Current version of that and sets up all the symlinks.""" @@ -289,6 +329,23 @@ def _Relink(self, dest, link): os.remove(link) os.symlink(dest, link) + def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers): + framework_name = os.path.basename(framework).split('.')[0] + all_headers = map(os.path.abspath, all_headers) + filelist = {} + for header in all_headers: + filename = os.path.basename(header) + filelist[filename] = header + filelist[os.path.join(framework_name, filename)] = header + WriteHmap(out, filelist) + + def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers): + header_path = os.path.join(framework, 'Headers'); + if not os.path.exists(header_path): + os.makedirs(header_path) + for header in copy_headers: + shutil.copy(header, os.path.join(header_path, os.path.basename(header))) + def ExecCompileXcassets(self, keys, *inputs): """Compiles multiple .xcassets files into a single .car file. @@ -349,49 +406,28 @@ def ExecMergeInfoPlist(self, output, *inputs): self._MergePlist(merged_plist, plist) plistlib.writePlist(merged_plist, output) - def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): + def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve): """Code sign a bundle. This function tries to code sign an iOS bundle, following the same algorithm as Xcode: - 1. copy ResourceRules.plist from the user or the SDK into the bundle, - 2. pick the provisioning profile that best match the bundle identifier, + 1. pick the provisioning profile that best match the bundle identifier, and copy it into the bundle as embedded.mobileprovision, - 3. copy Entitlements.plist from user or SDK next to the bundle, - 4. code sign the bundle. + 2. copy Entitlements.plist from user or SDK next to the bundle, + 3. code sign the bundle. """ - resource_rules_path = self._InstallResourceRules(resource_rules) substitutions, overrides = self._InstallProvisioningProfile( provisioning, self._GetCFBundleIdentifier()) entitlements_path = self._InstallEntitlements( entitlements, substitutions, overrides) - subprocess.check_call([ - 'codesign', '--force', '--sign', key, '--resource-rules', - resource_rules_path, '--entitlements', entitlements_path, - os.path.join( - os.environ['TARGET_BUILD_DIR'], - os.environ['FULL_PRODUCT_NAME'])]) - def _InstallResourceRules(self, resource_rules): - """Installs ResourceRules.plist from user or SDK into the bundle. - - Args: - resource_rules: string, optional, path to the ResourceRules.plist file - to use, default to "${SDKROOT}/ResourceRules.plist" - - Returns: - Path to the copy of ResourceRules.plist into the bundle. - """ - source_path = resource_rules - target_path = os.path.join( - os.environ['BUILT_PRODUCTS_DIR'], - os.environ['CONTENTS_FOLDER_PATH'], - 'ResourceRules.plist') - if not source_path: - source_path = os.path.join( - os.environ['SDKROOT'], 'ResourceRules.plist') - shutil.copy2(source_path, target_path) - return target_path + args = ['codesign', '--force', '--sign', key] + if preserve == 'True': + args.extend(['--deep', '--preserve-metadata=identifier,entitlements']) + else: + args.extend(['--entitlements', entitlements_path]) + args.extend(['--timestamp=none', path]) + subprocess.check_call(args) def _InstallProvisioningProfile(self, profile, bundle_identifier): """Installs embedded.mobileprovision into the bundle. @@ -606,5 +642,71 @@ def _ExpandVariables(self, data, substitutions): return {k: self._ExpandVariables(data[k], substitutions) for k in data} return data +def NextGreaterPowerOf2(x): + return 2**(x).bit_length() + +def WriteHmap(output_name, filelist): + """Generates a header map based on |filelist|. + + Per Mark Mentovai: + A header map is structured essentially as a hash table, keyed by names used + in #includes, and providing pathnames to the actual files. + + The implementation below and the comment above comes from inspecting: + http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt + while also looking at the implementation in clang in: + https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp + """ + magic = 1751998832 + version = 1 + _reserved = 0 + count = len(filelist) + capacity = NextGreaterPowerOf2(count) + strings_offset = 24 + (12 * capacity) + max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1]) + + out = open(output_name, "wb") + out.write(struct.pack('= '0430': + sdk_version = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-version') + cache['DTSDKName'] = sdk_root + (sdk_version or '') + if xcode >= '0720': cache['DTSDKBuild'] = self._GetSdkVersionInfoItem( - sdk_root, 'ProductBuildVersion') + sdk_root, '--show-sdk-build-version') + elif xcode >= '0430': + cache['DTSDKBuild'] = sdk_version else: cache['DTSDKBuild'] = cache['BuildMachineOSBuild'] if self.isIOS: - cache['DTPlatformName'] = cache['DTSDKName'] + cache['MinimumOSVersion'] = self.xcode_settings[configname].get( + 'IPHONEOS_DEPLOYMENT_TARGET') + cache['DTPlatformName'] = sdk_root + cache['DTPlatformVersion'] = sdk_version + if configname.endswith("iphoneos"): - cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem( - sdk_root, 'ProductVersion') cache['CFBundleSupportedPlatforms'] = ['iPhoneOS'] + cache['DTPlatformBuild'] = cache['DTSDKBuild'] else: cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator'] + # This is weird, but Xcode sets DTPlatformBuild to an empty field + # for simulator builds. + cache['DTPlatformBuild'] = "" XcodeSettings._plist_cache[configname] = cache # Include extra plist items that are per-target, not per global @@ -1334,7 +1475,10 @@ def IsMacBundle(flavor, spec): Bundles are directories with a certain subdirectory structure, instead of just a single file. Bundle rules do not produce a binary but also package resources into that directory.""" - is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac') + is_mac_bundle = int(spec.get('mac_xctest_bundle', 0)) != 0 or \ + int(spec.get('mac_xcuitest_bundle', 0)) != 0 or \ + (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac') + if is_mac_bundle: assert spec['type'] != 'none', ( 'mac_bundle targets cannot have type none (target "%s")' % @@ -1444,14 +1588,16 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, additional_settings: An optional dict with more values to add to the result. """ + if not xcode_settings: return {} # This function is considered a friend of XcodeSettings, so let it reach into # its implementation details. spec = xcode_settings.spec - # These are filled in on a as-needed basis. + # These are filled in on an as-needed basis. env = { + 'BUILT_FRAMEWORKS_DIR' : built_products_dir, 'BUILT_PRODUCTS_DIR' : built_products_dir, 'CONFIGURATION' : configuration, 'PRODUCT_NAME' : xcode_settings.GetProductName(), @@ -1462,12 +1608,16 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, # written for bundles: 'TARGET_BUILD_DIR' : built_products_dir, 'TEMP_DIR' : '${TMPDIR}', + 'XCODE_VERSION_ACTUAL' : XcodeVersion()[0], } if xcode_settings.GetPerConfigSetting('SDKROOT', configuration): env['SDKROOT'] = xcode_settings._SdkPath(configuration) else: env['SDKROOT'] = '' + if xcode_settings.mac_toolchain_dir: + env['DEVELOPER_DIR'] = xcode_settings.mac_toolchain_dir + if spec['type'] in ( 'executable', 'static_library', 'shared_library', 'loadable_module'): env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName() @@ -1478,10 +1628,27 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, env['MACH_O_TYPE'] = mach_o_type env['PRODUCT_TYPE'] = xcode_settings.GetProductType() if xcode_settings._IsBundle(): + # xcodeproj_file.py sets the same Xcode subfolder value for this as for + # FRAMEWORKS_FOLDER_PATH so Xcode builds will actually use FFP's value. + env['BUILT_FRAMEWORKS_DIR'] = \ + os.path.join(built_products_dir + os.sep \ + + xcode_settings.GetBundleFrameworksFolderPath()) env['CONTENTS_FOLDER_PATH'] = \ - xcode_settings.GetBundleContentsFolderPath() + xcode_settings.GetBundleContentsFolderPath() + env['EXECUTABLE_FOLDER_PATH'] = \ + xcode_settings.GetBundleExecutableFolderPath() env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \ xcode_settings.GetBundleResourceFolder() + env['JAVA_FOLDER_PATH'] = xcode_settings.GetBundleJavaFolderPath() + env['FRAMEWORKS_FOLDER_PATH'] = \ + xcode_settings.GetBundleFrameworksFolderPath() + env['SHARED_FRAMEWORKS_FOLDER_PATH'] = \ + xcode_settings.GetBundleSharedFrameworksFolderPath() + env['SHARED_SUPPORT_FOLDER_PATH'] = \ + xcode_settings.GetBundleSharedSupportFolderPath() + env['PLUGINS_FOLDER_PATH'] = xcode_settings.GetBundlePlugInsFolderPath() + env['XPCSERVICES_FOLDER_PATH'] = \ + xcode_settings.GetBundleXPCServicesFolderPath() env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath() env['WRAPPER_NAME'] = xcode_settings.GetWrapperName() @@ -1495,8 +1662,6 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, sdk_root = xcode_settings._SdkRoot(configuration) if not sdk_root: sdk_root = xcode_settings._XcodeSdkPath('') - if sdk_root is None: - sdk_root = '' env['SDKROOT'] = sdk_root if not additional_settings: @@ -1612,11 +1777,12 @@ def _AddIOSDeviceConfigurations(targets): for target_dict in targets.itervalues(): toolset = target_dict['toolset'] configs = target_dict['configurations'] - for config_name, config_dict in dict(configs).iteritems(): - iphoneos_config_dict = copy.deepcopy(config_dict) + for config_name, simulator_config_dict in dict(configs).iteritems(): + iphoneos_config_dict = copy.deepcopy(simulator_config_dict) configs[config_name + '-iphoneos'] = iphoneos_config_dict - configs[config_name + '-iphonesimulator'] = config_dict + configs[config_name + '-iphonesimulator'] = simulator_config_dict if toolset == 'target': + simulator_config_dict['xcode_settings']['SDKROOT'] = 'iphonesimulator' iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos' return targets diff --git a/tools/gyp/pylib/gyp/xcode_ninja.py b/tools/gyp/pylib/gyp/xcode_ninja.py index 3820d6bf04817f..bc76ffff4e9b58 100644 --- a/tools/gyp/pylib/gyp/xcode_ninja.py +++ b/tools/gyp/pylib/gyp/xcode_ninja.py @@ -92,11 +92,16 @@ def _TargetFromSpec(old_spec, params): new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO" new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \ old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] + for key in ['BUNDLE_LOADER', 'TEST_HOST']: + if key in old_xcode_settings: + new_xcode_settings[key] = old_xcode_settings[key] + ninja_target['configurations'][config] = {} ninja_target['configurations'][config]['xcode_settings'] = \ new_xcode_settings ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0) + ninja_target['mac_xctest_bundle'] = old_spec.get('mac_xctest_bundle', 0) ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0) ninja_target['ios_watchkit_extension'] = \ old_spec.get('ios_watchkit_extension', 0) @@ -138,9 +143,10 @@ def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec): if target_extras is not None and re.search(target_extras, target_name): return True - # Otherwise just show executable targets. - if spec.get('type', '') == 'executable' and \ - spec.get('product_extension', '') != 'bundle': + # Otherwise just show executable targets and xc_tests. + if (int(spec.get('mac_xctest_bundle', 0)) != 0 or + (spec.get('type', '') == 'executable' and + spec.get('product_extension', '') != 'bundle')): # If there is a filter and the target does not match, exclude the target. if executable_target_pattern is not None: @@ -227,13 +233,26 @@ def CreateWrapper(target_list, target_dicts, data, params): # Tell Xcode to look everywhere for headers. sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } } + # Put excluded files into the sources target so they can be opened in Xcode. + skip_excluded_files = \ + not generator_flags.get('xcode_ninja_list_excluded_files', True) + sources = [] for target, target_dict in target_dicts.iteritems(): base = os.path.dirname(target) files = target_dict.get('sources', []) + \ target_dict.get('mac_bundle_resources', []) + + if not skip_excluded_files: + files.extend(target_dict.get('sources_excluded', []) + + target_dict.get('mac_bundle_resources_excluded', [])) + for action in target_dict.get('actions', []): files.extend(action.get('inputs', [])) + + if not skip_excluded_files: + files.extend(action.get('inputs_excluded', [])) + # Remove files starting with $. These are mostly intermediate files for the # build system. files = [ file for file in files if not file.startswith('$')] diff --git a/tools/gyp/pylib/gyp/xcodeproj_file.py b/tools/gyp/pylib/gyp/xcodeproj_file.py index 034a0d2d4fcc23..e69235f7241ffe 100644 --- a/tools/gyp/pylib/gyp/xcodeproj_file.py +++ b/tools/gyp/pylib/gyp/xcodeproj_file.py @@ -1492,6 +1492,7 @@ def __init__(self, properties=None, id=None, parent=None): 'icns': 'image.icns', 'java': 'sourcecode.java', 'js': 'sourcecode.javascript', + 'kext': 'wrapper.kext', 'm': 'sourcecode.c.objc', 'mm': 'sourcecode.cpp.objcpp', 'nib': 'wrapper.nib', @@ -1944,24 +1945,40 @@ class PBXCopyFilesBuildPhase(XCBuildPhase): 'name': [0, str, 0, 0], }) - # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is - # "DIR", match group 3 is "path" or None. - path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$') - - # path_tree_to_subfolder maps names of Xcode variables to the associated - # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object. - path_tree_to_subfolder = { - 'BUILT_PRODUCTS_DIR': 16, # Products Directory - # Other types that can be chosen via the Xcode UI. - # TODO(mark): Map Xcode variable names to these. - # : 1, # Wrapper - # : 6, # Executables: 6 - # : 7, # Resources - # : 15, # Java Resources - # : 10, # Frameworks - # : 11, # Shared Frameworks - # : 12, # Shared Support - # : 13, # PlugIns + # path_tree_re matches "$(DIR)/path", "$(DIR)/$(DIR2)/path" or just "$(DIR)". + # Match group 1 is "DIR", group 3 is "path" or "$(DIR2") or "$(DIR2)/path" + # or None. If group 3 is "path", group 4 will be None otherwise group 4 is + # "DIR2" and group 6 is "path". + path_tree_re = re.compile(r'^\$\((.*?)\)(/(\$\((.*?)\)(/(.*)|)|(.*)|)|)$') + + # path_tree_{first,second}_to_subfolder map names of Xcode variables to the + # associated dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase + # object. + path_tree_first_to_subfolder = { + # Types that can be chosen via the Xcode UI. + 'BUILT_PRODUCTS_DIR': 16, # Products Directory + 'BUILT_FRAMEWORKS_DIR': 10, # Not an official Xcode macro. + # Existed before support for the + # names below was added. Maps to + # "Frameworks". + } + + path_tree_second_to_subfolder = { + 'WRAPPER_NAME': 1, # Wrapper + # Although Xcode's friendly name is "Executables", the destination + # is demonstrably the value of the build setting + # EXECUTABLE_FOLDER_PATH not EXECUTABLES_FOLDER_PATH. + 'EXECUTABLE_FOLDER_PATH': 6, # Executables. + 'UNLOCALIZED_RESOURCES_FOLDER_PATH': 7, # Resources + 'JAVA_FOLDER_PATH': 15, # Java Resources + 'FRAMEWORKS_FOLDER_PATH': 10, # Frameworks + 'SHARED_FRAMEWORKS_FOLDER_PATH': 11, # Shared Frameworks + 'SHARED_SUPPORT_FOLDER_PATH': 12, # Shared Support + 'PLUGINS_FOLDER_PATH': 13, # PlugIns + # For XPC Services, Xcode sets both dstPath and dstSubfolderSpec. + # Note that it re-uses the BUILT_PRODUCTS_DIR value for + # dstSubfolderSpec. dstPath is set below. + 'XPCSERVICES_FOLDER_PATH': 16, # XPC Services. } def Name(self): @@ -1982,14 +1999,61 @@ def SetDestination(self, path): path_tree_match = self.path_tree_re.search(path) if path_tree_match: - # Everything else needs to be relative to an Xcode variable. - path_tree = path_tree_match.group(1) - relative_path = path_tree_match.group(3) - - if path_tree in self.path_tree_to_subfolder: - subfolder = self.path_tree_to_subfolder[path_tree] + path_tree = path_tree_match.group(1); + if path_tree in self.path_tree_first_to_subfolder: + subfolder = self.path_tree_first_to_subfolder[path_tree] + relative_path = path_tree_match.group(3) if relative_path is None: relative_path = '' + + if subfolder == 16 and path_tree_match.group(4) is not None: + # BUILT_PRODUCTS_DIR (16) is the first element in a path whose + # second element is possibly one of the variable names in + # path_tree_second_to_subfolder. Xcode sets the values of all these + # variables to relative paths so .gyp files must prefix them with + # BUILT_PRODUCTS_DIR, e.g. + # $(BUILT_PRODUCTS_DIR)/$(PLUGINS_FOLDER_PATH). Then + # xcode_emulation.py can export these variables with the same values + # as Xcode yet make & ninja files can determine the absolute path + # to the target. Xcode uses the dstSubfolderSpec value set here + # to determine the full path. + # + # An alternative of xcode_emulation.py setting the values to absolute + # paths when exporting these variables has been ruled out because + # then the values would be different depending on the build tool. + # + # Another alternative is to invent new names for the variables used + # to match to the subfolder indices in the second table. .gyp files + # then will not need to prepend $(BUILT_PRODUCTS_DIR) because + # xcode_emulation.py can set the values of those variables to + # the absolute paths when exporting. This is possibly the thinking + # behind BUILT_FRAMEWORKS_DIR which is used in exactly this manner. + # + # Requiring prepending BUILT_PRODUCTS_DIR has been chosen because + # this same way could be used to specify destinations in .gyp files + # that pre-date this addition to GYP. However they would only work + # with the Xcode generator. The previous version of xcode_emulation.py + # does not export these variables. Such files will get the benefit + # of the Xcode UI showing the proper destination name simply by + # regenerating the projects with this version of GYP. + path_tree = path_tree_match.group(4) + relative_path = path_tree_match.group(6) + separator = '/' + + if path_tree in self.path_tree_second_to_subfolder: + subfolder = self.path_tree_second_to_subfolder[path_tree] + if relative_path is None: + relative_path = '' + separator = '' + if path_tree == 'XPCSERVICES_FOLDER_PATH': + relative_path = '$(CONTENTS_FOLDER_PATH)/XPCServices' \ + + separator + relative_path + else: + # subfolder = 16 from above + # The second element of the path is an unrecognized variable. + # Include it and any remaining elements in relative_path. + relative_path = path_tree_match.group(3); + else: # The path starts with an unrecognized Xcode variable # name like $(SRCROOT). Xcode will still handle this @@ -2260,8 +2324,12 @@ class PBXNativeTarget(XCTarget): '', ''], 'com.apple.product-type.bundle.unit-test': ['wrapper.cfbundle', '', '.xctest'], + 'com.apple.product-type.bundle.ui-testing': ['wrapper.cfbundle', + '', '.xctest'], 'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib', '', '.so'], + 'com.apple.product-type.kernel-extension': ['wrapper.kext', + '', '.kext'], } def __init__(self, properties=None, id=None, parent=None, @@ -2314,7 +2382,9 @@ def __init__(self, properties=None, id=None, parent=None, force_extension = suffix[1:] if self._properties['productType'] == \ - 'com.apple.product-type-bundle.unit.test': + 'com.apple.product-type-bundle.unit.test' or \ + self._properties['productType'] == \ + 'com.apple.product-type-bundle.ui-testing': if force_extension is None: force_extension = suffix[1:] diff --git a/tools/gyp/tools/pretty_gyp.py b/tools/gyp/tools/pretty_gyp.py index c51d35872cce6d..d5736bbd4a6009 100755 --- a/tools/gyp/tools/pretty_gyp.py +++ b/tools/gyp/tools/pretty_gyp.py @@ -118,24 +118,23 @@ def prettyprint_input(lines): basic_offset = 2 last_line = "" for line in lines: - if COMMENT_RE.match(line): - print line - else: - line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix. - if len(line) > 0: + line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix. + if len(line) > 0: + brace_diff = 0 + if not COMMENT_RE.match(line): (brace_diff, after) = count_braces(line) - if brace_diff != 0: - if after: - print " " * (basic_offset * indent) + line - indent += brace_diff - else: - indent += brace_diff - print " " * (basic_offset * indent) + line + if brace_diff != 0: + if after: + print " " * (basic_offset * indent) + line + indent += brace_diff else: + indent += brace_diff print " " * (basic_offset * indent) + line else: - print "" - last_line = line + print " " * (basic_offset * indent) + line + else: + print "" + last_line = line def main(): From c2f1525913d442a933a12a0bf6e6e9e9ffc20c9a Mon Sep 17 00:00:00 2001 From: Shigeki Ohtsu Date: Tue, 10 Feb 2015 09:27:52 +0900 Subject: [PATCH 2/4] tools: fix gyp to work on MacOSX without XCode This issue has already submitted to the upstream in https://code.google.com/p/gyp/issues/detail?id=477 Use this commit until the upstream is to be fixed. PR-URL: https://github.com/iojs/io.js/pull/1325 Reviewed-By: Fedor Indutny Reviewed-By: Ben Noordhuis --- tools/gyp/pylib/gyp/xcode_emulation.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/gyp/pylib/gyp/xcode_emulation.py b/tools/gyp/pylib/gyp/xcode_emulation.py index dba8e7699e520f..667c53695a12d8 100644 --- a/tools/gyp/pylib/gyp/xcode_emulation.py +++ b/tools/gyp/pylib/gyp/xcode_emulation.py @@ -1662,6 +1662,8 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, sdk_root = xcode_settings._SdkRoot(configuration) if not sdk_root: sdk_root = xcode_settings._XcodeSdkPath('') + if sdk_root is None: + sdk_root = '' env['SDKROOT'] = sdk_root if not additional_settings: From c0141d83f7d6e301e6190f698bf781a3c4dbcf63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johan=20Bergstr=C3=B6m?= Date: Wed, 13 Apr 2016 11:34:22 +0900 Subject: [PATCH 3/4] gyp: inherit parent for `*.host` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Gyp defaults to gcc/g++ if CC.host/CXX.host is unset. This is not suitable for environments that only uses the clang toolchain. Since we already assume that the user will provide clang/clang++ through CC/CXX, lean against it (then drop to gcc/g++). Also apply the same logic for link/ar for consistency although it doesn't affect us. PR-URL: https://github.com/nodejs/node/pull/6173 Fixes: https://github.com/nodejs/node/issues/6152 Reviewed-By: João Reis Reviewed-By: Ben Noordhuis --- tools/gyp/pylib/gyp/generator/make.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py index b7da768fb33f5a..91f3df77f0bcbd 100644 --- a/tools/gyp/pylib/gyp/generator/make.py +++ b/tools/gyp/pylib/gyp/generator/make.py @@ -2063,10 +2063,10 @@ def CalculateMakefilePath(build_file, base_name): 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'), 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'), - 'CC.host': GetEnvironFallback(('CC_host',), 'gcc'), - 'AR.host': GetEnvironFallback(('AR_host',), 'ar'), - 'CXX.host': GetEnvironFallback(('CXX_host',), 'g++'), - 'LINK.host': GetEnvironFallback(('LINK_host',), '$(CXX.host)'), + 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'), + 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'), + 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'), + 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'), }) build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) From 369b9d574bdbc7cde51ec4f4a7f432ded4f14531 Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Sat, 28 May 2016 00:10:28 +0200 Subject: [PATCH 4/4] deps: float gyp patch for long filenames MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Pulling in https://codereview.chromium.org/2019133002/ in its current state, as gyp seems to be largely abandoned as a project. Original commit message: Hash intermediate file name to avoid ENAMETOOLONG Hash the intermediate Makefile target used for multi-output rules so that it still works when the involved file names are very long. Since the intermediate file's name is effectively arbitrary, this does not come with notable behavioural changes. The `import hashlib` boilerplate is taken directly from `xcodeproj_file.py`. Concretely, this makes the V8 inspector build currently fail when long pathnames are involved, notably when using ecryptfs which has a lower file name length limit. Fixes: https://github.com/nodejs/node/issues/7959 Ref: https://github.com/nodejs/node/issues/7510 PR-URL: https://github.com/nodejs/node/pull/7963 Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Ben Noordhuis Reviewed-By: James M Snell Reviewed-By: Saúl Ibarra Corretgé --- tools/gyp/pylib/gyp/generator/make.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py index 91f3df77f0bcbd..ca7c742d9a5bfe 100644 --- a/tools/gyp/pylib/gyp/generator/make.py +++ b/tools/gyp/pylib/gyp/generator/make.py @@ -31,6 +31,8 @@ from gyp.common import GetEnvironFallback from gyp.common import GypError +import hashlib + generator_default_variables = { 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '', @@ -1743,7 +1745,10 @@ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, # actual command. # - The intermediate recipe will 'touch' the intermediate file. # - The multi-output rule will have an do-nothing recipe. - intermediate = "%s.intermediate" % (command if command else self.target) + + # Hash the target name to avoid generating overlong filenames. + cmddigest = hashlib.sha1(command if command else self.target).hexdigest() + intermediate = "%s.intermediate" % cmddigest self.WriteLn('%s: %s' % (' '.join(outputs), intermediate)) self.WriteLn('\t%s' % '@:'); self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))