desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Merge multiple .plist files into a single .plist file.'
def ExecMergeInfoPlist(self, output, *inputs):
merged_plist = {} for path in inputs: plist = self._LoadPlistMaybeBinary(path) self._MergePlist(merged_plist, plist) plistlib.writePlist(merged_plist, output)
'Code sign a bundle. This function tries to code sign an iOS bundle, following the same algorithm as Xcode: 1. copy ResourceRules.plist from the user or the SDK into the bundle, 2. pick the provisioning profile that best match the bundle identifier, and copy it into the bundle as embedded.mobileprovision, 3. copy Entitlements.plist from user or SDK next to the bundle, 4. code sign the bundle.'
def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
resource_rules_path = self._InstallResourceRules(resource_rules) (substitutions, overrides) = self._InstallProvisioningProfile(provisioning, self._GetCFBundleIdentifier()) entitlements_path = self._InstallEntitlements(entitlements, substitutions, overrides) subprocess.check_call(['codesign', '--force', '--sign', key, '--resource-rules', resource_rules_path, '--entitlements', entitlements_path, os.path.join(os.environ['TARGET_BUILD_DIR'], os.environ['FULL_PRODUCT_NAME'])])
'Installs ResourceRules.plist from user or SDK into the bundle. Args: resource_rules: string, optional, path to the ResourceRules.plist file to use, default to "${SDKROOT}/ResourceRules.plist" Returns: Path to the copy of ResourceRules.plist into the bundle.'
def _InstallResourceRules(self, resource_rules):
source_path = resource_rules target_path = os.path.join(os.environ['BUILT_PRODUCTS_DIR'], os.environ['CONTENTS_FOLDER_PATH'], 'ResourceRules.plist') if (not source_path): source_path = os.path.join(os.environ['SDKROOT'], 'ResourceRules.plist') shutil.copy2(source_path, target_path) return target_path
'Installs embedded.mobileprovision into the bundle. Args: profile: string, optional, short name of the .mobileprovision file to use, if empty or the file is missing, the best file installed will be used bundle_identifier: string, value of CFBundleIdentifier from Info.plist Returns: A tuple containing two dictionary: variables substitutions and values to overrides when generating the entitlements file.'
def _InstallProvisioningProfile(self, profile, bundle_identifier):
(source_path, provisioning_data, team_id) = self._FindProvisioningProfile(profile, bundle_identifier) target_path = os.path.join(os.environ['BUILT_PRODUCTS_DIR'], os.environ['CONTENTS_FOLDER_PATH'], 'embedded.mobileprovision') shutil.copy2(source_path, target_path) substitutions = self._GetSubstitutions(bundle_identifier, (team_id + '.')) return (substitutions, provisioning_data['Entitlements'])
'Finds the .mobileprovision file to use for signing the bundle. Checks all the installed provisioning profiles (or if the user specified the PROVISIONING_PROFILE variable, only consult it) and select the most specific that correspond to the bundle identifier. Args: profile: string, optional, short name of the .mobileprovision file to use, if empty or the file is missing, the best file installed will be used bundle_identifier: string, value of CFBundleIdentifier from Info.plist Returns: A tuple of the path to the selected provisioning profile, the data of the embedded plist in the provisioning profile and the team identifier to use for code signing. Raises: SystemExit: if no .mobileprovision can be used to sign the bundle.'
def _FindProvisioningProfile(self, profile, bundle_identifier):
profiles_dir = os.path.join(os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') if (not os.path.isdir(profiles_dir)): print >>sys.stderr, ('cannot find mobile provisioning for %s' % bundle_identifier) sys.exit(1) provisioning_profiles = None if profile: profile_path = os.path.join(profiles_dir, (profile + '.mobileprovision')) if os.path.exists(profile_path): provisioning_profiles = [profile_path] if (not provisioning_profiles): provisioning_profiles = glob.glob(os.path.join(profiles_dir, '*.mobileprovision')) valid_provisioning_profiles = {} for profile_path in provisioning_profiles: profile_data = self._LoadProvisioningProfile(profile_path) app_id_pattern = profile_data.get('Entitlements', {}).get('application-identifier', '') for team_identifier in profile_data.get('TeamIdentifier', []): app_id = ('%s.%s' % (team_identifier, bundle_identifier)) if fnmatch.fnmatch(app_id, app_id_pattern): valid_provisioning_profiles[app_id_pattern] = (profile_path, profile_data, team_identifier) if (not valid_provisioning_profiles): print >>sys.stderr, ('cannot find mobile provisioning for %s' % bundle_identifier) sys.exit(1) selected_key = max(valid_provisioning_profiles, key=(lambda v: len(v))) return valid_provisioning_profiles[selected_key]
'Extracts the plist embedded in a provisioning profile. Args: profile_path: string, path to the .mobileprovision file Returns: Content of the plist embedded in the provisioning profile as a dictionary.'
def _LoadProvisioningProfile(self, profile_path):
with tempfile.NamedTemporaryFile() as temp: subprocess.check_call(['security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) return self._LoadPlistMaybeBinary(temp.name)
'Merge |plist| into |merged_plist|.'
def _MergePlist(self, merged_plist, plist):
for (key, value) in plist.iteritems(): if isinstance(value, dict): merged_value = merged_plist.get(key, {}) if isinstance(merged_value, dict): self._MergePlist(merged_value, value) merged_plist[key] = merged_value else: merged_plist[key] = value else: merged_plist[key] = value
'Loads into a memory a plist possibly encoded in binary format. This is a wrapper around plistlib.readPlist that tries to convert the plist to the XML format if it can\'t be parsed (assuming that it is in the binary format). Args: plist_path: string, path to a plist file, in XML or binary format Returns: Content of the plist as a dictionary.'
def _LoadPlistMaybeBinary(self, plist_path):
try: return plistlib.readPlist(plist_path) except: pass with tempfile.NamedTemporaryFile() as temp: shutil.copy2(plist_path, temp.name) subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) return plistlib.readPlist(temp.name)
'Constructs a dictionary of variable substitutions for Entitlements.plist. Args: bundle_identifier: string, value of CFBundleIdentifier from Info.plist app_identifier_prefix: string, value for AppIdentifierPrefix Returns: Dictionary of substitutions to apply when generating Entitlements.plist.'
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
return {'CFBundleIdentifier': bundle_identifier, 'AppIdentifierPrefix': app_identifier_prefix}
'Extracts CFBundleIdentifier value from Info.plist in the bundle. Returns: Value of CFBundleIdentifier in the Info.plist located in the bundle.'
def _GetCFBundleIdentifier(self):
info_plist_path = os.path.join(os.environ['TARGET_BUILD_DIR'], os.environ['INFOPLIST_PATH']) info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) return info_plist_data['CFBundleIdentifier']
'Generates and install the ${BundleName}.xcent entitlements file. Expands variables "$(variable)" pattern in the source entitlements file, add extra entitlements defined in the .mobileprovision file and the copy the generated plist to "${BundlePath}.xcent". Args: entitlements: string, optional, path to the Entitlements.plist template to use, defaults to "${SDKROOT}/Entitlements.plist" substitutions: dictionary, variable substitutions overrides: dictionary, values to add to the entitlements Returns: Path to the generated entitlements file.'
def _InstallEntitlements(self, entitlements, substitutions, overrides):
source_path = entitlements target_path = os.path.join(os.environ['BUILT_PRODUCTS_DIR'], (os.environ['PRODUCT_NAME'] + '.xcent')) if (not source_path): source_path = os.path.join(os.environ['SDKROOT'], 'Entitlements.plist') shutil.copy2(source_path, target_path) data = self._LoadPlistMaybeBinary(target_path) data = self._ExpandVariables(data, substitutions) if overrides: for key in overrides: if (key not in data): data[key] = overrides[key] plistlib.writePlist(data, target_path) return target_path
'Expands variables "$(variable)" in data. Args: data: object, can be either string, list or dictionary substitutions: dictionary, variable substitutions to perform Returns: Copy of data where each references to "$(variable)" has been replaced by the corresponding value found in substitutions, or left intact if the key was not found.'
def _ExpandVariables(self, data, substitutions):
if isinstance(data, str): for (key, value) in substitutions.iteritems(): data = data.replace(('$(%s)' % key), value) return data if isinstance(data, list): return [self._ExpandVariables(v, substitutions) for v in data] if isinstance(data, dict): return {k: self._ExpandVariables(data[k], substitutions) for k in data} return data
'Make a copy of this object. The new object will have its own copy of lists and dicts. Any XCObject objects owned by this object (marked "strong") will be copied in the new object, even those found in lists. If this object has any weak references to other XCObjects, the same references are added to the new object without making a copy.'
def Copy(self):
that = self.__class__(id=self.id, parent=self.parent) for (key, value) in self._properties.iteritems(): is_strong = self._schema[key][2] if isinstance(value, XCObject): if is_strong: new_value = value.Copy() new_value.parent = that that._properties[key] = new_value else: that._properties[key] = value elif (isinstance(value, str) or isinstance(value, unicode) or isinstance(value, int)): that._properties[key] = value elif isinstance(value, list): if is_strong: that._properties[key] = [] for item in value: new_item = item.Copy() new_item.parent = that that._properties[key].append(new_item) else: that._properties[key] = value[:] elif isinstance(value, dict): if is_strong: raise TypeError(((('Strong dict for key ' + key) + ' in ') + self.__class__.__name__)) else: that._properties[key] = value.copy() else: raise TypeError(((((('Unexpected type ' + value.__class__.__name__) + ' for key ') + key) + ' in ') + self.__class__.__name__)) return that
'Return the name corresponding to an object. Not all objects necessarily need to be nameable, and not all that do have a "name" property. Override as needed.'
def Name(self):
if (('name' in self._properties) or (('name' in self._schema) and self._schema['name'][3])): return self._properties['name'] raise NotImplementedError((self.__class__.__name__ + ' must implement Name'))
'Return a comment string for the object. Most objects just use their name as the comment, but PBXProject uses different values. The returned comment is not escaped and does not have any comment marker strings applied to it.'
def Comment(self):
return self.Name()
'Set "id" properties deterministically. An object\'s "id" property is set based on a hash of its class type and name, as well as the class type and name of all ancestor objects. As such, it is only advisable to call ComputeIDs once an entire project file tree is built. If recursive is True, recurse into all descendant objects and update their hashes. If overwrite is True, any existing value set in the "id" property will be replaced.'
def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None):
def _HashUpdate(hash, data): "Update hash with data's length and contents.\n\n If the hash were updated only with the value of data, it would be\n possible for clowns to induce collisions by manipulating the names of\n their objects. By adding the length, it's exceedingly less likely that\n ID collisions will be encountered, intentionally or not.\n " hash.update(struct.pack('>i', len(data))) hash.update(data) if (seed_hash is None): seed_hash = _new_sha1() hash = seed_hash.copy() hashables = self.Hashables() assert (len(hashables) > 0) for hashable in hashables: _HashUpdate(hash, hashable) if recursive: hashables_for_child = self.HashablesForChild() if (hashables_for_child is None): child_hash = hash else: assert (len(hashables_for_child) > 0) child_hash = seed_hash.copy() for hashable in hashables_for_child: _HashUpdate(child_hash, hashable) for child in self.Children(): child.ComputeIDs(recursive, overwrite, child_hash) if (overwrite or (self.id is None)): assert ((hash.digest_size % 4) == 0) digest_int_count = (hash.digest_size / 4) digest_ints = struct.unpack(('>' + ('I' * digest_int_count)), hash.digest()) id_ints = [0, 0, 0] for index in xrange(0, digest_int_count): id_ints[(index % 3)] ^= digest_ints[index] self.id = ('%08X%08X%08X' % tuple(id_ints))
'Verifies that no two objects have the same ID. Checks all descendants.'
def EnsureNoIDCollisions(self):
ids = {} descendants = self.Descendants() for descendant in descendants: if (descendant.id in ids): other = ids[descendant.id] raise KeyError(('Duplicate ID %s, objects "%s" and "%s" in "%s"' % (descendant.id, str(descendant._properties), str(other._properties), self._properties['rootObject'].Name()))) ids[descendant.id] = descendant
'Returns a list of all of this object\'s owned (strong) children.'
def Children(self):
children = [] for (property, attributes) in self._schema.iteritems(): (is_list, property_type, is_strong) = attributes[0:3] if (is_strong and (property in self._properties)): if (not is_list): children.append(self._properties[property]) else: children.extend(self._properties[property]) return children
'Returns a list of all of this object\'s descendants, including this object.'
def Descendants(self):
children = self.Children() descendants = [self] for child in children: descendants.extend(child.Descendants()) return descendants
'Encodes a comment to be placed in the project file output, mimicing Xcode behavior.'
def _EncodeComment(self, comment):
return (('/* ' + comment.replace('*/', '(*)/')) + ' */')
'Encodes a string to be placed in the project file output, mimicing Xcode behavior.'
def _EncodeString(self, value):
if (_unquoted.search(value) and (not _quoted.search(value))): return value return (('"' + _escaped.sub(self._EncodeTransform, value)) + '"')
'Returns a representation of value that may be printed in a project file, mimicing Xcode\'s behavior. _XCPrintableValue can handle str and int values, XCObjects (which are made printable by returning their id property), and list and dict objects composed of any of the above types. When printing a list or dict, and _should_print_single_line is False, the tabs parameter is used to determine how much to indent the lines corresponding to the items in the list or dict. If flatten_list is True, single-element lists will be transformed into strings.'
def _XCPrintableValue(self, tabs, value, flatten_list=False):
printable = '' comment = None if self._should_print_single_line: sep = ' ' element_tabs = '' end_tabs = '' else: sep = '\n' element_tabs = (' DCTB ' * (tabs + 1)) end_tabs = (' DCTB ' * tabs) if isinstance(value, XCObject): printable += value.id comment = value.Comment() elif isinstance(value, str): printable += self._EncodeString(value) elif isinstance(value, unicode): printable += self._EncodeString(value.encode('utf-8')) elif isinstance(value, int): printable += str(value) elif isinstance(value, list): if (flatten_list and (len(value) <= 1)): if (len(value) == 0): printable += self._EncodeString('') else: printable += self._EncodeString(value[0]) else: printable = ('(' + sep) for item in value: printable += (((element_tabs + self._XCPrintableValue((tabs + 1), item, flatten_list)) + ',') + sep) printable += (end_tabs + ')') elif isinstance(value, dict): printable = ('{' + sep) for (item_key, item_value) in sorted(value.iteritems()): printable += (((((element_tabs + self._XCPrintableValue((tabs + 1), item_key, flatten_list)) + ' = ') + self._XCPrintableValue((tabs + 1), item_value, flatten_list)) + ';') + sep) printable += (end_tabs + '}') else: raise TypeError((("Can't make " + value.__class__.__name__) + ' printable')) if (comment != None): printable += (' ' + self._EncodeComment(comment)) return printable
'Prints a key and value, members of an XCObject\'s _properties dictionary, to file. tabs is an int identifying the indentation level. If the class\' _should_print_single_line variable is True, tabs is ignored and the key-value pair will be followed by a space insead of a newline.'
def _XCKVPrint(self, file, tabs, key, value):
if self._should_print_single_line: printable = '' after_kv = ' ' else: printable = (' DCTB ' * tabs) after_kv = '\n' if ((key == 'remoteGlobalIDString') and isinstance(self, PBXContainerItemProxy)): value_to_print = value.id else: value_to_print = value if ((key == 'settings') and isinstance(self, PBXBuildFile)): strip_value_quotes = True else: strip_value_quotes = False if ((key == 'buildSettings') and isinstance(self, XCBuildConfiguration)): flatten_list = True else: flatten_list = False try: printable_key = self._XCPrintableValue(tabs, key, flatten_list) printable_value = self._XCPrintableValue(tabs, value_to_print, flatten_list) if (strip_value_quotes and (len(printable_value) > 1) and (printable_value[0] == '"') and (printable_value[(-1)] == '"')): printable_value = printable_value[1:(-1)] printable += ((((printable_key + ' = ') + printable_value) + ';') + after_kv) except TypeError as e: gyp.common.ExceptionAppend(e, ('while printing key "%s"' % key)) raise self._XCPrint(file, 0, printable)
'Prints a reprentation of this object to file, adhering to Xcode output formatting.'
def Print(self, file=sys.stdout):
self.VerifyHasRequiredProperties() if self._should_print_single_line: sep = '' end_tabs = 0 else: sep = '\n' end_tabs = 2 self._XCPrint(file, 2, ((self._XCPrintableValue(2, self) + ' = {') + sep)) self._XCKVPrint(file, 3, 'isa', self.__class__.__name__) for (property, value) in sorted(self._properties.iteritems()): self._XCKVPrint(file, 3, property, value) self._XCPrint(file, end_tabs, '};\n')
'Merge the supplied properties into the _properties dictionary. The input properties must adhere to the class schema or a KeyError or TypeError exception will be raised. If adding an object of an XCObject subclass and the schema indicates a strong relationship, the object\'s parent will be set to this object. If do_copy is True, then lists, dicts, strong-owned XCObjects, and strong-owned XCObjects in lists will be copied instead of having their references added.'
def UpdateProperties(self, properties, do_copy=False):
if (properties is None): return for (property, value) in properties.iteritems(): if (not (property in self._schema)): raise KeyError(((property + ' not in ') + self.__class__.__name__)) (is_list, property_type, is_strong) = self._schema[property][0:3] if is_list: if (value.__class__ != list): raise TypeError(((((property + ' of ') + self.__class__.__name__) + ' must be list, not ') + value.__class__.__name__)) for item in value: if ((not isinstance(item, property_type)) and (not ((item.__class__ == unicode) and (property_type == str)))): raise TypeError(((((((('item of ' + property) + ' of ') + self.__class__.__name__) + ' must be ') + property_type.__name__) + ', not ') + item.__class__.__name__)) elif ((not isinstance(value, property_type)) and (not ((value.__class__ == unicode) and (property_type == str)))): raise TypeError(((((((property + ' of ') + self.__class__.__name__) + ' must be ') + property_type.__name__) + ', not ') + value.__class__.__name__)) if do_copy: if isinstance(value, XCObject): if is_strong: self._properties[property] = value.Copy() else: self._properties[property] = value elif (isinstance(value, str) or isinstance(value, unicode) or isinstance(value, int)): self._properties[property] = value elif isinstance(value, list): if is_strong: self._properties[property] = [] for item in value: self._properties[property].append(item.Copy()) else: self._properties[property] = value[:] elif isinstance(value, dict): self._properties[property] = value.copy() else: raise TypeError(((((("Don't know how to copy a " + value.__class__.__name__) + ' object for ') + property) + ' in ') + self.__class__.__name__)) else: self._properties[property] = value if is_strong: if (not is_list): self._properties[property].parent = self else: for item in self._properties[property]: item.parent = self
'Ensure that all properties identified as required by the schema are set.'
def VerifyHasRequiredProperties(self):
for (property, attributes) in self._schema.iteritems(): (is_list, property_type, is_strong, is_required) = attributes[0:4] if (is_required and (not (property in self._properties))): raise KeyError(((self.__class__.__name__ + ' requires ') + property))
'Assign object default values according to the schema. This will not overwrite properties that have already been set.'
def _SetDefaultsFromSchema(self):
defaults = {} for (property, attributes) in self._schema.iteritems(): (is_list, property_type, is_strong, is_required) = attributes[0:4] if (is_required and (len(attributes) >= 5) and (not (property in self._properties))): default = attributes[4] defaults[property] = default if (len(defaults) > 0): self.UpdateProperties(defaults, do_copy=True)
'Custom hashables for XCHierarchicalElements. XCHierarchicalElements are special. Generally, their hashes shouldn\'t change if the paths don\'t change. The normal XCObject implementation of Hashables adds a hashable for each object, which means that if the hierarchical structure changes (possibly due to changes caused when TakeOverOnlyChild runs and encounters slight changes in the hierarchy), the hashes will change. For example, if a project file initially contains a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent a/b. If someone later adds a/f2 to the project file, a/b can no longer be collapsed, and f1 winds up with parent b and grandparent a. That would be sufficient to change f1\'s hash. To counteract this problem, hashables for all XCHierarchicalElements except for the main group (which has neither a name nor a path) are taken to be just the set of path components. Because hashables are inherited from parents, this provides assurance that a/b/f1 has the same set of hashables whether its parent is b or a/b. The main group is a special case. As it is permitted to have no name or path, it is permitted to use the standard XCObject hash mechanism. This is not considered a problem because there can be only one main group.'
def Hashables(self):
if (self == self.PBXProjectAncestor()._properties['mainGroup']): return XCObject.Hashables(self) hashables = [] if ('name' in self._properties): hashables.append((self.__class__.__name__ + '.name')) hashables.append(self._properties['name']) path = self.PathFromSourceTreeAndPath() if (path != None): components = path.split(posixpath.sep) for component in components: hashables.append((self.__class__.__name__ + '.path')) hashables.append(component) hashables.extend(self._hashables) return hashables
'Returns an existing or new file reference corresponding to path. If hierarchical is True, this method will create or use the necessary hierarchical group structure corresponding to path. Otherwise, it will look in and create an item in the current group only. If an existing matching reference is found, it is returned, otherwise, a new one will be created, added to the correct group, and returned. If path identifies a directory by virtue of carrying a trailing slash, this method returns a PBXFileReference of "folder" type. If path identifies a variant, by virtue of it identifying a file inside a directory with an ".lproj" extension, this method returns a PBXVariantGroup containing the variant named by path, and possibly other variants. For all other paths, a "normal" PBXFileReference will be returned.'
def AddOrGetFileByPath(self, path, hierarchical):
is_dir = False if path.endswith('/'): is_dir = True path = posixpath.normpath(path) if is_dir: path = (path + '/') variant_name = None parent = posixpath.dirname(path) grandparent = posixpath.dirname(parent) parent_basename = posixpath.basename(parent) (parent_root, parent_ext) = posixpath.splitext(parent_basename) if (parent_ext == '.lproj'): variant_name = parent_root if (grandparent == ''): grandparent = None assert ((not is_dir) or (variant_name is None)) path_split = path.split(posixpath.sep) if ((len(path_split) == 1) or ((is_dir or (variant_name != None)) and (len(path_split) == 2)) or (not hierarchical)): if (variant_name is None): file_ref = self.GetChildByPath(path) if (file_ref != None): assert (file_ref.__class__ == PBXFileReference) else: file_ref = PBXFileReference({'path': path}) self.AppendChild(file_ref) else: variant_group_name = posixpath.basename(path) variant_group_ref = self.AddOrGetVariantGroupByNameAndPath(variant_group_name, grandparent) variant_path = posixpath.sep.join(path_split[(-2):]) variant_ref = variant_group_ref.GetChildByPath(variant_path) if (variant_ref != None): assert (variant_ref.__class__ == PBXFileReference) else: variant_ref = PBXFileReference({'name': variant_name, 'path': variant_path}) variant_group_ref.AppendChild(variant_ref) file_ref = variant_group_ref return file_ref else: next_dir = path_split[0] group_ref = self.GetChildByPath(next_dir) if (group_ref != None): assert (group_ref.__class__ == PBXGroup) else: group_ref = PBXGroup({'path': next_dir}) self.AppendChild(group_ref) return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]), hierarchical)
'Returns an existing or new PBXVariantGroup for name and path. If a PBXVariantGroup identified by the name and path arguments is already present as a child of this object, it is returned. Otherwise, a new PBXVariantGroup with the correct properties is created, added as a child, and returned. This method will generally be called by AddOrGetFileByPath, which knows when to create a variant group based on the structure of the pathnames passed to it.'
def AddOrGetVariantGroupByNameAndPath(self, name, path):
key = (name, path) if (key in self._variant_children_by_name_and_path): variant_group_ref = self._variant_children_by_name_and_path[key] assert (variant_group_ref.__class__ == PBXVariantGroup) return variant_group_ref variant_group_properties = {'name': name} if (path != None): variant_group_properties['path'] = path variant_group_ref = PBXVariantGroup(variant_group_properties) self.AppendChild(variant_group_ref) return variant_group_ref
'If this PBXGroup has only one child and it\'s also a PBXGroup, take it over by making all of its children this object\'s children. This function will continue to take over only children when those children are groups. If there are three PBXGroups representing a, b, and c, with c inside b and b inside a, and a and b have no other children, this will result in a taking over both b and c, forming a PBXGroup for a/b/c. If recurse is True, this function will recurse into children and ask them to collapse themselves by taking over only children as well. Assuming an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f (d1, d2, and f are files, the rest are groups), recursion will result in a group for a/b/c containing a group for d3/e.'
def TakeOverOnlyChild(self, recurse=False):
while ((len(self._properties['children']) == 1) and (self._properties['children'][0].__class__ == PBXGroup)): child = self._properties['children'][0] old_properties = self._properties self._properties = child._properties self._children_by_path = child._children_by_path if ((not ('sourceTree' in self._properties)) or (self._properties['sourceTree'] == '<group>')): if ('path' in old_properties): if ('path' in self._properties): self._properties['path'] = posixpath.join(old_properties['path'], self._properties['path']) else: self._properties['path'] = old_properties['path'] if ('sourceTree' in old_properties): self._properties['sourceTree'] = old_properties['sourceTree'] if (('name' in old_properties) and (old_properties['name'] != None) and (old_properties['name'] != self.Name())): self._properties['name'] = old_properties['name'] if (('name' in self._properties) and ('path' in self._properties) and (self._properties['name'] == self._properties['path'])): del self._properties['name'] for child in self._properties['children']: child.parent = self if recurse: for child in self._properties['children']: if (child.__class__ == PBXGroup): child.TakeOverOnlyChild(recurse)
'Convenience accessor to obtain an XCBuildConfiguration by name.'
def ConfigurationNamed(self, name):
for configuration in self._properties['buildConfigurations']: if (configuration._properties['name'] == name): return configuration raise KeyError(name)
'Convenience accessor to obtain the default XCBuildConfiguration.'
def DefaultConfiguration(self):
return self.ConfigurationNamed(self._properties['defaultConfigurationName'])
'Determines the state of a build setting in all XCBuildConfiguration child objects. If all child objects have key in their build settings, and the value is the same in all child objects, returns 1. If no child objects have the key in their build settings, returns 0. If some, but not all, child objects have the key in their build settings, or if any children have different values for the key, returns -1.'
def HasBuildSetting(self, key):
has = None value = None for configuration in self._properties['buildConfigurations']: configuration_has = configuration.HasBuildSetting(key) if (has is None): has = configuration_has elif (has != configuration_has): return (-1) if configuration_has: configuration_value = configuration.GetBuildSetting(key) if (value is None): value = configuration_value elif (value != configuration_value): return (-1) if (not has): return 0 return 1
'Gets the build setting for key. All child XCConfiguration objects must have the same value set for the setting, or a ValueError will be raised.'
def GetBuildSetting(self, key):
value = None for configuration in self._properties['buildConfigurations']: configuration_value = configuration.GetBuildSetting(key) if (value is None): value = configuration_value elif (value != configuration_value): raise ValueError(('Variant values for ' + key)) return value
'Sets the build setting for key to value in all child XCBuildConfiguration objects.'
def SetBuildSetting(self, key, value):
for configuration in self._properties['buildConfigurations']: configuration.SetBuildSetting(key, value)
'Appends value to the build setting for key, which is treated as a list, in all child XCBuildConfiguration objects.'
def AppendBuildSetting(self, key, value):
for configuration in self._properties['buildConfigurations']: configuration.AppendBuildSetting(key, value)
'Deletes the build setting key from all child XCBuildConfiguration objects.'
def DelBuildSetting(self, key):
for configuration in self._properties['buildConfigurations']: configuration.DelBuildSetting(key)
'Sets the build configuration in all child XCBuildConfiguration objects.'
def SetBaseConfiguration(self, value):
for configuration in self._properties['buildConfigurations']: configuration.SetBaseConfiguration(value)
'Adds path to the dict tracking paths belonging to this build phase. If the path is already a member of this build phase, raises an exception.'
def _AddPathToDict(self, pbxbuildfile, path):
if (path in self._files_by_path): raise ValueError(('Found multiple build files with path ' + path)) self._files_by_path[path] = pbxbuildfile
'Maintains the _files_by_path and _files_by_xcfilelikeelement dicts. If path is specified, then it is the path that is being added to the phase, and pbxbuildfile must contain either a PBXFileReference directly referencing that path, or it must contain a PBXVariantGroup that itself contains a PBXFileReference referencing the path. If path is not specified, either the PBXFileReference\'s path or the paths of all children of the PBXVariantGroup are taken as being added to the phase. If the path is already present in the phase, raises an exception. If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile are already present in the phase, referenced by a different PBXBuildFile object, raises an exception. This does not raise an exception when a PBXFileReference or PBXVariantGroup reappear and are referenced by the same PBXBuildFile that has already introduced them, because in the case of PBXVariantGroup objects, they may correspond to multiple paths that are not all added simultaneously. When this situation occurs, the path needs to be added to _files_by_path, but nothing needs to change in _files_by_xcfilelikeelement, and the caller should have avoided adding the PBXBuildFile if it is already present in the list of children.'
def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
xcfilelikeelement = pbxbuildfile._properties['fileRef'] paths = [] if (path != None): if isinstance(xcfilelikeelement, PBXVariantGroup): paths.append(path) elif isinstance(xcfilelikeelement, PBXVariantGroup): for variant in xcfilelikeelement._properties['children']: paths.append(variant.FullPath()) else: paths.append(xcfilelikeelement.FullPath()) for a_path in paths: self._AddPathToDict(pbxbuildfile, a_path) if ((xcfilelikeelement in self._files_by_xcfilelikeelement) and (self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile)): raise ValueError(('Found multiple build files for ' + xcfilelikeelement.Name())) self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
'Set the dstSubfolderSpec and dstPath properties from path. path may be specified in the same notation used for XCHierarchicalElements, specifically, "$(DIR)/path".'
def SetDestination(self, path):
path_tree_match = self.path_tree_re.search(path) if path_tree_match: path_tree = path_tree_match.group(1) relative_path = path_tree_match.group(3) if (path_tree in self.path_tree_to_subfolder): subfolder = self.path_tree_to_subfolder[path_tree] if (relative_path is None): relative_path = '' else: subfolder = 0 relative_path = path elif path.startswith('/'): subfolder = 0 relative_path = path[1:] else: raise ValueError(("Can't use path %s in a %s" % (path, self.__class__.__name__))) self._properties['dstPath'] = relative_path self._properties['dstSubfolderSpec'] = subfolder
'Returns a PBXGroup child of this object to which path should be added. This method is intended to choose between SourceGroup and IntermediatesGroup on the basis of whether path is present in a source directory or an intermediates directory. For the purposes of this determination, any path located within a derived file directory such as PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates directory. The returned value is a two-element tuple. The first element is the PBXGroup, and the second element specifies whether that group should be organized hierarchically (True) or as a single flat list (False).'
def RootGroupForPath(self, path):
source_tree_groups = {'DERIVED_FILE_DIR': (self.IntermediatesGroup, True), 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True), 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True), 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True)} (source_tree, path) = SourceTreeAndPathFromPath(path) if ((source_tree != None) and (source_tree in source_tree_groups)): (group_func, hierarchical) = source_tree_groups[source_tree] group = group_func() return (group, hierarchical) return (self.SourceGroup(), True)
'Returns a PBXFileReference corresponding to path in the correct group according to RootGroupForPath\'s heuristics. If an existing PBXFileReference for path exists, it will be returned. Otherwise, one will be created and returned.'
def AddOrGetFileInRootGroup(self, path):
(group, hierarchical) = self.RootGroupForPath(path) return group.AddOrGetFileByPath(path, hierarchical)
'Calls TakeOverOnlyChild for all groups in the main group.'
def RootGroupsTakeOverOnlyChildren(self, recurse=False):
for group in self._properties['mainGroup']._properties['children']: if isinstance(group, PBXGroup): group.TakeOverOnlyChild(recurse)
'Add a reference to another project file (via PBXProject object) to this one. Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in this project file that contains a PBXReferenceProxy object for each product of each PBXNativeTarget in the other project file. ProjectRef is a PBXFileReference to the other project file. If this project file already references the other project file, the existing ProductGroup and ProjectRef are returned. The ProductGroup will still be updated if necessary.'
def AddOrGetProjectReference(self, other_pbxproject):
if (not ('projectReferences' in self._properties)): self._properties['projectReferences'] = [] product_group = None project_ref = None if (not (other_pbxproject in self._other_pbxprojects)): product_group = PBXGroup({'name': 'Products'}) product_group.parent = self product_group._hashables.extend(other_pbxproject.Hashables()) this_path = posixpath.dirname(self.Path()) projectDirPath = self.GetProperty('projectDirPath') if projectDirPath: if posixpath.isabs(projectDirPath[0]): this_path = projectDirPath else: this_path = posixpath.join(this_path, projectDirPath) other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path) project_ref = PBXFileReference({'lastKnownFileType': 'wrapper.pb-project', 'path': other_path, 'sourceTree': 'SOURCE_ROOT'}) self.ProjectsGroup().AppendChild(project_ref) ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref} self._other_pbxprojects[other_pbxproject] = ref_dict self.AppendProperty('projectReferences', ref_dict) self._properties['projectReferences'] = sorted(self._properties['projectReferences'], cmp=(lambda x, y: cmp(x['ProjectRef'].Name().lower(), y['ProjectRef'].Name().lower()))) else: project_ref_dict = self._other_pbxprojects[other_pbxproject] product_group = project_ref_dict['ProductGroup'] project_ref = project_ref_dict['ProjectRef'] self._SetUpProductReferences(other_pbxproject, product_group, project_ref) inherit_unique_symroot = self._AllSymrootsUnique(other_pbxproject, False) targets = other_pbxproject.GetProperty('targets') if all((self._AllSymrootsUnique(t, inherit_unique_symroot) for t in targets)): dir_path = project_ref._properties['path'] product_group._hashables.extend(dir_path) return [product_group, project_ref]
'Returns the MSBuild equivalent of the MSVS value given. Args: value: the MSVS value to convert. Returns: the MSBuild equivalent. Raises: ValueError if value is not valid.'
def ConvertToMSBuild(self, value):
return value
'Dispatches a string command to a method.'
def Dispatch(self, args):
if (len(args) < 1): raise Exception('Not enough arguments') method = ('Exec%s' % self._CommandifyName(args[0])) getattr(self, method)(*args[1:])
'Transforms a tool name like copy-info-plist to CopyInfoPlist'
def _CommandifyName(self, name_string):
return name_string.title().replace('-', '')
'Emulates the most basic behavior of Linux\'s flock(1).'
def ExecFlock(self, lockfile, *cmd_list):
fd = os.open(lockfile, ((os.O_WRONLY | os.O_NOCTTY) | os.O_CREAT), 438) if sys.platform.startswith('aix'): op = struct.pack('hhIllqq', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) else: op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) fcntl.fcntl(fd, fcntl.F_SETLK, op) return subprocess.call(cmd_list)
'Initializes Config. This is a separate method as it raises an exception if there is a parse error.'
def Init(self, params):
generator_flags = params.get('generator_flags', {}) config_path = generator_flags.get('config_path', None) if (not config_path): return try: f = open(config_path, 'r') config = json.load(f) f.close() except IOError: raise Exception(('Unable to open file ' + config_path)) except ValueError as e: raise Exception((('Unable to parse config file ' + config_path) + str(e))) if (not isinstance(config, dict)): raise Exception('config_path must be a JSON file containing a dictionary') self.files = config.get('files', []) self.additional_compile_target_names = set(config.get('additional_compile_targets', [])) self.test_target_names = set(config.get('test_targets', []))
'Returns the supplied test targets without \'all\'.'
def _supplied_target_names_no_all(self):
result = self._supplied_target_names() result.discard('all') return result
'Returns true if the supplied files impact the build at all.'
def is_build_impacted(self):
return self._changed_targets
'Returns the set of output test targets.'
def find_matching_test_target_names(self):
assert self.is_build_impacted() test_target_names_no_all = set(self._test_target_names) test_target_names_no_all.discard('all') test_targets_no_all = _LookupTargets(test_target_names_no_all, self._unqualified_mapping) test_target_names_contains_all = ('all' in self._test_target_names) if test_target_names_contains_all: test_targets = [x for x in (set(test_targets_no_all) | set(self._root_targets))] else: test_targets = [x for x in test_targets_no_all] print 'supplied test_targets' for target_name in self._test_target_names: print ' DCTB ', target_name print 'found test_targets' for target in test_targets: print ' DCTB ', target.name print 'searching for matching test targets' matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets) matching_test_targets_contains_all = (test_target_names_contains_all and (set(matching_test_targets) & set(self._root_targets))) if matching_test_targets_contains_all: matching_test_targets = [x for x in (set(matching_test_targets) & set(test_targets_no_all))] print 'matched test_targets' for target in matching_test_targets: print ' DCTB ', target.name matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1] for target in matching_test_targets] if matching_test_targets_contains_all: matching_target_names.append('all') print ' DCTB all' return matching_target_names
'Returns the set of output compile targets.'
def find_matching_compile_target_names(self):
assert self.is_build_impacted() for target in self._name_to_target.itervalues(): target.visited = False supplied_targets = _LookupTargets(self._supplied_target_names_no_all(), self._unqualified_mapping) if ('all' in self._supplied_target_names()): supplied_targets = [x for x in (set(supplied_targets) | set(self._root_targets))] print 'Supplied test_targets & compile_targets' for target in supplied_targets: print ' DCTB ', target.name print 'Finding compile targets' compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets) return [gyp.common.ParseQualifiedTarget(target.name)[1] for target in compile_targets]
'The main entry point: writes a .mk file for a single target. Arguments: qualified_target: target we\'re generating relative_target: qualified target name relative to the root base_path: path relative to source root we\'re building in, used to resolve target-relative paths output_filename: output .mk file name to write spec, configs: gyp info part_of_all: flag indicating this target is part of \'all\' write_alias_target: flag indicating whether to create short aliases for this target sdk_version: what to emit for LOCAL_SDK_VERSION in output'
def Write(self, qualified_target, relative_target, base_path, output_filename, spec, configs, part_of_all, write_alias_target, sdk_version):
gyp.common.EnsureDirExists(output_filename) self.fp = open(output_filename, 'w') self.fp.write(header) self.qualified_target = qualified_target self.relative_target = relative_target self.path = base_path self.target = spec['target_name'] self.type = spec['type'] self.toolset = spec['toolset'] (deps, link_deps) = self.ComputeDeps(spec) extra_outputs = [] extra_sources = [] self.android_class = MODULE_CLASSES.get(self.type, 'GYP') self.android_module = self.ComputeAndroidModule(spec) (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec) self.output = self.output_binary = self.ComputeOutput(spec) self.WriteLn('include $(CLEAR_VARS)\n') self.WriteLn(('LOCAL_MODULE_CLASS := ' + self.android_class)) self.WriteLn(('LOCAL_MODULE := ' + self.android_module)) if (self.android_stem != self.android_module): self.WriteLn(('LOCAL_MODULE_STEM := ' + self.android_stem)) self.WriteLn(('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)) if (self.toolset == 'host'): self.WriteLn('LOCAL_IS_HOST_MODULE := true') self.WriteLn('LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)') elif (sdk_version > 0): self.WriteLn('LOCAL_MODULE_TARGET_ARCH := $(TARGET_$(GYP_VAR_PREFIX)ARCH)') self.WriteLn(('LOCAL_SDK_VERSION := %s' % sdk_version)) if (self.toolset == 'host'): self.WriteLn('gyp_intermediate_dir := $(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))') else: self.WriteLn('gyp_intermediate_dir := $(call local-intermediates-dir,,$(GYP_VAR_PREFIX))') self.WriteLn('gyp_shared_intermediate_dir := $(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))') self.WriteLn() target_dependencies = [x[1] for x in deps if (x[0] == 'path')] self.WriteLn('# Make sure our deps are built first.') self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES', local_pathify=True) if ('actions' in spec): self.WriteActions(spec['actions'], extra_sources, extra_outputs) if ('rules' in spec): self.WriteRules(spec['rules'], extra_sources, extra_outputs) if ('copies' in spec): self.WriteCopies(spec['copies'], extra_outputs) self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True) self.WriteLn('# Make sure our deps and generated files are built first.') self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) $(GYP_GENERATED_OUTPUTS)') self.WriteLn() if (spec.get('sources', []) or extra_sources): self.WriteSources(spec, configs, extra_sources) self.WriteTarget(spec, configs, deps, link_deps, part_of_all, write_alias_target) target_outputs[qualified_target] = ('path', self.output_binary) if (self.type == 'static_library'): target_link_deps[qualified_target] = ('static', self.android_module) elif (self.type == 'shared_library'): target_link_deps[qualified_target] = ('shared', self.android_module) self.fp.close() return self.android_module
'Write Makefile code for any \'actions\' from the gyp input. extra_sources: a list that will be filled in with newly generated source files, if any extra_outputs: a list that will be filled in with any outputs of these actions (used to make other pieces dependent on these actions)'
def WriteActions(self, actions, extra_sources, extra_outputs):
for action in actions: name = make.StringToMakefileVariable(('%s_%s' % (self.relative_target, action['action_name']))) self.WriteLn(('### Rules for action "%s":' % action['action_name'])) inputs = action['inputs'] outputs = action['outputs'] dirs = set() for out in outputs: if (not out.startswith('$')): print ('WARNING: Action for target "%s" writes output to local path "%s".' % (self.target, out)) dir = os.path.split(out)[0] if dir: dirs.add(dir) if int(action.get('process_outputs_as_sources', False)): extra_sources += outputs command = gyp.common.EncodePOSIXShellList(action['action']) if ('message' in action): quiet_cmd = ('Gyp action: %s ($@)' % action['message']) else: quiet_cmd = ('Gyp action: %s ($@)' % name) if (len(dirs) > 0): command = ((('mkdir -p %s' % ' '.join(dirs)) + '; ') + command) cd_action = ('cd $(gyp_local_path)/%s; ' % self.path) command = (cd_action + command) main_output = make.QuoteSpaces(self.LocalPathify(outputs[0])) self.WriteLn(('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)) self.WriteLn(('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)) self.WriteLn(('%s: gyp_intermediate_dir := $(abspath $(gyp_intermediate_dir))' % main_output)) self.WriteLn(('%s: gyp_shared_intermediate_dir := $(abspath $(gyp_shared_intermediate_dir))' % main_output)) self.WriteLn(('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output)) for input in inputs: if ((not input.startswith('$(')) and (' ' in input)): raise gyp.common.GypError(('Action input filename "%s" in target %s contains a space' % (input, self.target))) for output in outputs: if ((not output.startswith('$(')) and (' ' in output)): raise gyp.common.GypError(('Action output filename "%s" in target %s contains a space' % (output, self.target))) self.WriteLn(('%s: %s $(GYP_TARGET_DEPENDENCIES)' % (main_output, ' '.join(map(self.LocalPathify, inputs))))) self.WriteLn((' DCTB @echo "%s"' % quiet_cmd)) self.WriteLn((' DCTB $(hide)%s\n' % command)) for output in outputs[1:]: self.WriteLn(('%s: %s ;' % (self.LocalPathify(output), main_output))) extra_outputs += outputs self.WriteLn() self.WriteLn()
'Write Makefile code for any \'rules\' from the gyp input. extra_sources: a list that will be filled in with newly generated source files, if any extra_outputs: a list that will be filled in with any outputs of these rules (used to make other pieces dependent on these rules)'
def WriteRules(self, rules, extra_sources, extra_outputs):
if (len(rules) == 0): return for rule in rules: if (len(rule.get('rule_sources', [])) == 0): continue name = make.StringToMakefileVariable(('%s_%s' % (self.relative_target, rule['rule_name']))) self.WriteLn(('\n### Generated for rule "%s":' % name)) self.WriteLn(('# "%s":' % rule)) inputs = rule.get('inputs') for rule_source in rule.get('rule_sources', []): (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) (rule_source_root, rule_source_ext) = os.path.splitext(rule_source_basename) outputs = [self.ExpandInputRoot(out, rule_source_root, rule_source_dirname) for out in rule['outputs']] dirs = set() for out in outputs: if (not out.startswith('$')): print ('WARNING: Rule for target %s writes output to local path %s' % (self.target, out)) dir = os.path.dirname(out) if dir: dirs.add(dir) extra_outputs += outputs if int(rule.get('process_outputs_as_sources', False)): extra_sources.extend(outputs) components = [] for component in rule['action']: component = self.ExpandInputRoot(component, rule_source_root, rule_source_dirname) if ('$(RULE_SOURCES)' in component): component = component.replace('$(RULE_SOURCES)', rule_source) components.append(component) command = gyp.common.EncodePOSIXShellList(components) cd_action = ('cd $(gyp_local_path)/%s; ' % self.path) command = (cd_action + command) if dirs: command = ((('mkdir -p %s' % ' '.join(dirs)) + '; ') + command) outputs = map(self.LocalPathify, outputs) main_output = outputs[0] self.WriteLn(('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)) self.WriteLn(('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)) self.WriteLn(('%s: gyp_intermediate_dir := $(abspath $(gyp_intermediate_dir))' % main_output)) self.WriteLn(('%s: gyp_shared_intermediate_dir := $(abspath $(gyp_shared_intermediate_dir))' % main_output)) self.WriteLn(('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output)) main_output_deps = self.LocalPathify(rule_source) if inputs: main_output_deps += ' ' main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs]) self.WriteLn(('%s: %s $(GYP_TARGET_DEPENDENCIES)' % (main_output, main_output_deps))) self.WriteLn((' DCTB %s\n' % command)) for output in outputs[1:]: self.WriteLn(('%s: %s ;' % (output, main_output))) self.WriteLn() self.WriteLn()
'Write Makefile code for any \'copies\' from the gyp input. extra_outputs: a list that will be filled in with any outputs of this action (used to make other pieces dependent on this action)'
def WriteCopies(self, copies, extra_outputs):
self.WriteLn('### Generated for copy rule.') variable = make.StringToMakefileVariable((self.relative_target + '_copies')) outputs = [] for copy in copies: for path in copy['files']: if (not copy['destination'].startswith('$')): print ('WARNING: Copy rule for target %s writes output to local path %s' % (self.target, copy['destination'])) path = Sourceify(self.LocalPathify(path)) filename = os.path.split(path)[1] output = Sourceify(self.LocalPathify(os.path.join(copy['destination'], filename))) self.WriteLn(('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' % (output, path))) self.WriteLn(' DCTB @echo Copying: $@') self.WriteLn(' DCTB $(hide) mkdir -p $(dir $@)') self.WriteLn(' DCTB $(hide) $(ACP) -rpf $< $@') self.WriteLn() outputs.append(output) self.WriteLn(('%s = %s' % (variable, ' '.join(map(make.QuoteSpaces, outputs))))) extra_outputs.append(('$(%s)' % variable)) self.WriteLn()
'Write out the flags and include paths used to compile source files for the current target. Args: spec, configs: input from gyp.'
def WriteSourceFlags(self, spec, configs):
for (configname, config) in sorted(configs.iteritems()): extracted_includes = [] self.WriteLn('\n# Flags passed to both C and C++ files.') (cflags, includes_from_cflags) = self.ExtractIncludesFromCFlags((config.get('cflags', []) + config.get('cflags_c', []))) extracted_includes.extend(includes_from_cflags) self.WriteList(cflags, ('MY_CFLAGS_%s' % configname)) self.WriteList(config.get('defines'), ('MY_DEFS_%s' % configname), prefix='-D', quoter=make.EscapeCppDefine) self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS') includes = list(config.get('include_dirs', [])) includes.extend(extracted_includes) includes = map(Sourceify, map(self.LocalPathify, includes)) includes = self.NormalizeIncludePaths(includes) self.WriteList(includes, ('LOCAL_C_INCLUDES_%s' % configname)) self.WriteLn('\n# Flags passed to only C++ (and not C) files.') self.WriteList(config.get('cflags_cc'), ('LOCAL_CPPFLAGS_%s' % configname)) self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) $(MY_DEFS_$(GYP_CONFIGURATION))') if (self.toolset == 'host'): self.WriteLn('# Undefine ANDROID for host modules') self.WriteLn('LOCAL_CFLAGS += -UANDROID') self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) $(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))') self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))') self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
'Write Makefile code for any \'sources\' from the gyp input. These are source files necessary to build the current target. We need to handle shared_intermediate directory source files as a special case by copying them to the intermediate directory and treating them as a genereated sources. Otherwise the Android build rules won\'t pick them up. Args: spec, configs: input from gyp. extra_sources: Sources generated from Actions or Rules.'
def WriteSources(self, spec, configs, extra_sources):
sources = filter(make.Compilable, spec.get('sources', [])) generated_not_sources = [x for x in extra_sources if (not make.Compilable(x))] extra_sources = filter(make.Compilable, extra_sources) all_sources = (sources + extra_sources) local_cpp_extension = '.cpp' for source in all_sources: (root, ext) = os.path.splitext(source) if IsCPPExtension(ext): local_cpp_extension = ext break if (local_cpp_extension != '.cpp'): self.WriteLn(('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension)) local_files = [] for source in sources: (root, ext) = os.path.splitext(source) if ('$(gyp_shared_intermediate_dir)' in source): extra_sources.append(source) elif ('$(gyp_intermediate_dir)' in source): extra_sources.append(source) elif (IsCPPExtension(ext) and (ext != local_cpp_extension)): extra_sources.append(source) else: local_files.append(os.path.normpath(os.path.join(self.path, source))) final_generated_sources = [] origin_src_dirs = [] for source in extra_sources: local_file = source if (not ('$(gyp_intermediate_dir)/' in local_file)): basename = os.path.basename(local_file) local_file = ('$(gyp_intermediate_dir)/' + basename) (root, ext) = os.path.splitext(local_file) if (IsCPPExtension(ext) and (ext != local_cpp_extension)): local_file = (root + local_cpp_extension) if (local_file != source): self.WriteLn(('%s: %s' % (local_file, self.LocalPathify(source)))) self.WriteLn(' DCTB mkdir -p $(@D); cp $< $@') origin_src_dirs.append(os.path.dirname(source)) final_generated_sources.append(local_file) final_generated_sources.extend(generated_not_sources) self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES') origin_src_dirs = gyp.common.uniquer(origin_src_dirs) origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs)) self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS') self.WriteList(local_files, 'LOCAL_SRC_FILES') self.WriteSourceFlags(spec, configs)
'Return the Android module name used for a gyp spec. We use the complete qualified target name to avoid collisions between duplicate targets in different directories. We also add a suffix to distinguish gyp-generated module names.'
def ComputeAndroidModule(self, spec):
if int(spec.get('android_unmangled_name', 0)): assert ((self.type != 'shared_library') or self.target.startswith('lib')) return self.target if (self.type == 'shared_library'): prefix = 'lib_' else: prefix = '' if (spec['toolset'] == 'host'): suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp' else: suffix = '_gyp' if self.path: middle = make.StringToMakefileVariable(('%s_%s' % (self.path, self.target))) else: middle = make.StringToMakefileVariable(self.target) return ''.join([prefix, middle, suffix])
'Return the \'output basename\' of a gyp spec, split into filename + ext. Android libraries must be named the same thing as their module name, otherwise the linker can\'t find them, so product_name and so on must be ignored if we are building a library, and the "lib" prepending is not done for Android.'
def ComputeOutputParts(self, spec):
assert (self.type != 'loadable_module') target = spec['target_name'] target_prefix = '' target_ext = '' if (self.type == 'static_library'): target = self.ComputeAndroidModule(spec) target_ext = '.a' elif (self.type == 'shared_library'): target = self.ComputeAndroidModule(spec) target_ext = '.so' elif (self.type == 'none'): target_ext = '.stamp' elif (self.type != 'executable'): print ('ERROR: What output file should be generated?', 'type', self.type, 'target', target) if ((self.type != 'static_library') and (self.type != 'shared_library')): target_prefix = spec.get('product_prefix', target_prefix) target = spec.get('product_name', target) product_ext = spec.get('product_extension') if product_ext: target_ext = ('.' + product_ext) target_stem = (target_prefix + target) return (target_stem, target_ext)
'Return the \'output basename\' of a gyp spec. E.g., the loadable module \'foobar\' in directory \'baz\' will produce \'libfoobar.so\''
def ComputeOutputBasename(self, spec):
return ''.join(self.ComputeOutputParts(spec))
'Return the \'output\' (full output path) of a gyp spec. E.g., the loadable module \'foobar\' in directory \'baz\' will produce \'$(obj)/baz/libfoobar.so\''
def ComputeOutput(self, spec):
if (self.type == 'executable'): path = '$(gyp_shared_intermediate_dir)' elif (self.type == 'shared_library'): if (self.toolset == 'host'): path = '$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)' else: path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)' elif (self.toolset == 'host'): path = ('$(call intermediates-dir-for,%s,%s,true,,$(GYP_HOST_VAR_PREFIX))' % (self.android_class, self.android_module)) else: path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))' % (self.android_class, self.android_module)) assert (spec.get('product_dir') is None) return os.path.join(path, self.ComputeOutputBasename(spec))
'Normalize include_paths. Convert absolute paths to relative to the Android top directory. Args: include_paths: A list of unprocessed include paths. Returns: A list of normalized include paths.'
def NormalizeIncludePaths(self, include_paths):
normalized = [] for path in include_paths: if (path[0] == '/'): path = gyp.common.RelativePath(path, self.android_top_dir) normalized.append(path) return normalized
'Extract includes "-I..." out from cflags Args: cflags: A list of compiler flags, which may be mixed with "-I.." Returns: A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.'
def ExtractIncludesFromCFlags(self, cflags):
clean_cflags = [] include_paths = [] for flag in cflags: if flag.startswith('-I'): include_paths.append(flag[2:]) else: clean_cflags.append(flag) return (clean_cflags, include_paths)
'Filter the \'libraries\' key to separate things that shouldn\'t be ldflags. Library entries that look like filenames should be converted to android module names instead of being passed to the linker as flags. Args: libraries: the value of spec.get(\'libraries\') Returns: A tuple (static_lib_modules, dynamic_lib_modules, ldflags)'
def FilterLibraries(self, libraries):
static_lib_modules = [] dynamic_lib_modules = [] ldflags = [] for libs in libraries: for lib in libs.split(): if ((lib == '-lc') or (lib == '-lstdc++') or (lib == '-lm') or lib.endswith('libgcc.a')): continue match = re.search('([^/]+)\\.a$', lib) if match: static_lib_modules.append(match.group(1)) continue match = re.search('([^/]+)\\.so$', lib) if match: dynamic_lib_modules.append(match.group(1)) continue if lib.startswith('-l'): ldflags.append(lib) return (static_lib_modules, dynamic_lib_modules, ldflags)
'Compute the dependencies of a gyp spec. Returns a tuple (deps, link_deps), where each is a list of filenames that will need to be put in front of make for either building (deps) or linking (link_deps).'
def ComputeDeps(self, spec):
deps = [] link_deps = [] if ('dependencies' in spec): deps.extend([target_outputs[dep] for dep in spec['dependencies'] if target_outputs[dep]]) for dep in spec['dependencies']: if (dep in target_link_deps): link_deps.append(target_link_deps[dep]) deps.extend(link_deps) return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
'Write Makefile code to specify the link flags and library dependencies. spec, configs: input from gyp. link_deps: link dependency list; see ComputeDeps()'
def WriteTargetFlags(self, spec, configs, link_deps):
libraries = gyp.common.uniquer(spec.get('libraries', [])) (static_libs, dynamic_libs, ldflags_libs) = self.FilterLibraries(libraries) if (self.type != 'static_library'): for (configname, config) in sorted(configs.iteritems()): ldflags = list(config.get('ldflags', [])) self.WriteLn('') self.WriteList(ldflags, ('LOCAL_LDFLAGS_%s' % configname)) self.WriteList(ldflags_libs, 'LOCAL_GYP_LIBS') self.WriteLn('LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) $(LOCAL_GYP_LIBS)') if (self.type != 'static_library'): static_link_deps = [x[1] for x in link_deps if (x[0] == 'static')] shared_link_deps = [x[1] for x in link_deps if (x[0] == 'shared')] else: static_link_deps = [] shared_link_deps = [] if (static_libs or static_link_deps): self.WriteLn('') self.WriteList((static_libs + static_link_deps), 'LOCAL_STATIC_LIBRARIES') self.WriteLn('# Enable grouping to fix circular references') self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true') if (dynamic_libs or shared_link_deps): self.WriteLn('') self.WriteList((dynamic_libs + shared_link_deps), 'LOCAL_SHARED_LIBRARIES')
'Write Makefile code to produce the final target of the gyp spec. spec, configs: input from gyp. deps, link_deps: dependency lists; see ComputeDeps() part_of_all: flag indicating this target is part of \'all\' write_alias_target: flag indicating whether to create short aliases for this target'
def WriteTarget(self, spec, configs, deps, link_deps, part_of_all, write_alias_target):
self.WriteLn('### Rules for final target.') if (self.type != 'none'): self.WriteTargetFlags(spec, configs, link_deps) settings = spec.get('aosp_build_settings', {}) if settings: self.WriteLn('### Set directly by aosp_build_settings.') for (k, v) in settings.iteritems(): if isinstance(v, list): self.WriteList(v, k) else: self.WriteLn(('%s := %s' % (k, make.QuoteIfNecessary(v)))) self.WriteLn('') if (part_of_all and write_alias_target): self.WriteLn('# Add target alias to "gyp_all_modules" target.') self.WriteLn('.PHONY: gyp_all_modules') self.WriteLn(('gyp_all_modules: %s' % self.android_module)) self.WriteLn('') if ((self.target != self.android_module) and write_alias_target): self.WriteLn('# Alias gyp target name.') self.WriteLn(('.PHONY: %s' % self.target)) self.WriteLn(('%s: %s' % (self.target, self.android_module))) self.WriteLn('') modifier = '' if (self.toolset == 'host'): modifier = 'HOST_' if (self.type == 'static_library'): self.WriteLn(('include $(BUILD_%sSTATIC_LIBRARY)' % modifier)) elif (self.type == 'shared_library'): self.WriteLn('LOCAL_PRELINK_MODULE := false') self.WriteLn(('include $(BUILD_%sSHARED_LIBRARY)' % modifier)) elif (self.type == 'executable'): self.WriteLn('LOCAL_CXX_STL := libc++_static') self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)') self.WriteLn(('include $(BUILD_%sEXECUTABLE)' % modifier)) else: self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp') self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true') if (self.toolset == 'target'): self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)') else: self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)') self.WriteLn() self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk') self.WriteLn() self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)') self.WriteLn(' DCTB $(hide) echo "Gyp timestamp: $@"') self.WriteLn(' DCTB $(hide) mkdir -p $(dir $@)') self.WriteLn(' DCTB $(hide) touch $@') self.WriteLn() self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=')
'Write a variable definition that is a list of values. E.g. WriteList([\'a\',\'b\'], \'foo\', prefix=\'blah\') writes out foo = blaha blahb but in a pretty-printed style.'
def WriteList(self, value_list, variable=None, prefix='', quoter=make.QuoteIfNecessary, local_pathify=False):
values = '' if value_list: value_list = [quoter((prefix + l)) for l in value_list] if local_pathify: value_list = [self.LocalPathify(l) for l in value_list] values = (' \\\n DCTB ' + ' \\\n DCTB '.join(value_list)) self.fp.write(('%s :=%s\n\n' % (variable, values)))
'Convert a subdirectory-relative path into a normalized path which starts with the make variable $(LOCAL_PATH) (i.e. the top of the project tree). Absolute paths, or paths that contain variables, are just normalized.'
def LocalPathify(self, path):
if (('$(' in path) or os.path.isabs(path)): return os.path.normpath(path) local_path = os.path.join('$(LOCAL_PATH)', self.path, path) local_path = os.path.normpath(local_path) assert local_path.startswith('$(LOCAL_PATH)'), ('Path %s attempts to escape from gyp path %s !)' % (path, self.path)) return local_path
'Return true if this is a target that can be linked against.'
def Linkable(self):
return (self.type in ('static_library', 'shared_library'))
'Return true if the target should produce a restat rule based on a TOC file.'
def UsesToc(self, flavor):
if ((flavor == 'win') or self.bundle): return False return (self.type in ('shared_library', 'loadable_module'))
'Return the path, if any, that should be used as a dependency of any dependent action step.'
def PreActionInput(self, flavor):
if self.UsesToc(flavor): return (self.FinalOutput() + '.TOC') return (self.FinalOutput() or self.preaction_stamp)
'Return the path, if any, that should be used as a dependency of any dependent compile step.'
def PreCompileInput(self):
return (self.actions_stamp or self.precompile_stamp)
'Return the last output of the target, which depends on all prior steps.'
def FinalOutput(self):
return (self.bundle or self.binary or self.actions_stamp)
'base_dir: path from source root to directory containing this gyp file, by gyp semantics, all input paths are relative to this build_dir: path from source root to build output toplevel_dir: path to the toplevel directory'
def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir, output_file, toplevel_build, output_file_name, flavor, toplevel_dir=None):
self.hash_for_rules = hash_for_rules self.target_outputs = target_outputs self.base_dir = base_dir self.build_dir = build_dir self.ninja = ninja_syntax.Writer(output_file) self.toplevel_build = toplevel_build self.output_file_name = output_file_name self.flavor = flavor self.abs_build_dir = None if (toplevel_dir is not None): self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, build_dir)) self.obj_ext = ('.obj' if (flavor == 'win') else '.o') if (flavor == 'win'): self.win_env = {} for arch in ('x86', 'x64'): self.win_env[arch] = ('environment.' + arch) build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) self.build_to_base = os.path.join(build_to_top, base_dir) base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) self.base_to_build = os.path.join(base_to_top, build_dir)
'Expand specials like $!PRODUCT_DIR in |path|. If |product_dir| is None, assumes the cwd is already the product dir. Otherwise, |product_dir| is the relative path to the product dir.'
def ExpandSpecial(self, path, product_dir=None):
PRODUCT_DIR = '$!PRODUCT_DIR' if (PRODUCT_DIR in path): if product_dir: path = path.replace(PRODUCT_DIR, product_dir) else: path = path.replace((PRODUCT_DIR + '/'), '') path = path.replace((PRODUCT_DIR + '\\'), '') path = path.replace(PRODUCT_DIR, '.') INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR' if (INTERMEDIATE_DIR in path): int_dir = self.GypPathToUniqueOutput('gen') path = path.replace(INTERMEDIATE_DIR, os.path.join((product_dir or ''), int_dir)) CONFIGURATION_NAME = '$|CONFIGURATION_NAME' path = path.replace(CONFIGURATION_NAME, self.config_name) return path
'Translate a gyp path to a ninja path, optionally expanding environment variable references in |path| with |env|. See the above discourse on path conversions.'
def GypPathToNinja(self, path, env=None):
if env: if (self.flavor == 'mac'): path = gyp.xcode_emulation.ExpandEnvVars(path, env) elif (self.flavor == 'win'): path = gyp.msvs_emulation.ExpandMacros(path, env) if path.startswith('$!'): expanded = self.ExpandSpecial(path) if (self.flavor == 'win'): expanded = os.path.normpath(expanded) return expanded if ('$|' in path): path = self.ExpandSpecial(path) assert ('$' not in path), path return os.path.normpath(os.path.join(self.build_to_base, path))
'Translate a gyp path to a ninja path for writing output. If qualified is True, qualify the resulting filename with the name of the target. This is necessary when e.g. compiling the same path twice for two separate output targets. See the above discourse on path conversions.'
def GypPathToUniqueOutput(self, path, qualified=True):
path = self.ExpandSpecial(path) assert (not path.startswith('$')), path obj = 'obj' if (self.toolset != 'target'): obj += ('.' + self.toolset) (path_dir, path_basename) = os.path.split(path) assert (not os.path.isabs(path_dir)), ("'%s' can not be absolute path (see crbug.com/462153)." % path_dir) if qualified: path_basename = ((self.name + '.') + path_basename) return os.path.normpath(os.path.join(obj, self.base_dir, path_dir, path_basename))
'Given a list of targets, return a path for a single file representing the result of building all the targets or None. Uses a stamp file if necessary.'
def WriteCollapsedDependencies(self, name, targets, order_only=None):
assert (targets == filter(None, targets)), targets if (len(targets) == 0): assert (not order_only) return None if ((len(targets) > 1) or order_only): stamp = self.GypPathToUniqueOutput((name + '.stamp')) targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only) self.ninja.newline() return targets[0]
'The main entry point for NinjaWriter: write the build rules for a spec. Returns a Target object, which represents the output paths for this spec. Returns None if there are no outputs (e.g. a settings-only \'none\' type target).'
def WriteSpec(self, spec, config_name, generator_flags):
self.config_name = config_name self.name = spec['target_name'] self.toolset = spec['toolset'] config = spec['configurations'][config_name] self.target = Target(spec['type']) self.is_standalone_static_library = bool(spec.get('standalone_static_library', 0)) self.uses_cpp = False self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) self.xcode_settings = self.msvs_settings = None if (self.flavor == 'mac'): self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) if (self.flavor == 'win'): self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, generator_flags) arch = self.msvs_settings.GetArch(config_name) self.ninja.variable('arch', self.win_env[arch]) self.ninja.variable('cc', ('$cl_' + arch)) self.ninja.variable('cxx', ('$cl_' + arch)) self.ninja.variable('cc_host', ('$cl_' + arch)) self.ninja.variable('cxx_host', ('$cl_' + arch)) self.ninja.variable('asm', ('$ml_' + arch)) if (self.flavor == 'mac'): self.archs = self.xcode_settings.GetActiveArchs(config_name) if (len(self.archs) > 1): self.arch_subninjas = dict(((arch, ninja_syntax.Writer(OpenOutput(os.path.join(self.toplevel_build, self._SubninjaNameForArch(arch)), 'w'))) for arch in self.archs)) actions_depends = [] compile_depends = [] if ('dependencies' in spec): for dep in spec['dependencies']: if (dep in self.target_outputs): target = self.target_outputs[dep] actions_depends.append(target.PreActionInput(self.flavor)) compile_depends.append(target.PreCompileInput()) actions_depends = filter(None, actions_depends) compile_depends = filter(None, compile_depends) actions_depends = self.WriteCollapsedDependencies('actions_depends', actions_depends) compile_depends = self.WriteCollapsedDependencies('compile_depends', compile_depends) self.target.preaction_stamp = actions_depends self.target.precompile_stamp = compile_depends extra_sources = [] mac_bundle_depends = [] self.target.actions_stamp = self.WriteActionsRulesCopies(spec, extra_sources, actions_depends, mac_bundle_depends) compile_depends_stamp = (self.target.actions_stamp or compile_depends) link_deps = [] sources = (extra_sources + spec.get('sources', [])) if sources: if ((self.flavor == 'mac') and (len(self.archs) > 1)): for arch in self.archs: self.ninja.subninja(self._SubninjaNameForArch(arch)) pch = None if (self.flavor == 'win'): gyp.msvs_emulation.VerifyMissingSources(sources, self.abs_build_dir, generator_flags, self.GypPathToNinja) pch = gyp.msvs_emulation.PrecompiledHeader(self.msvs_settings, config_name, self.GypPathToNinja, self.GypPathToUniqueOutput, self.obj_ext) else: pch = gyp.xcode_emulation.MacPrefixHeader(self.xcode_settings, self.GypPathToNinja, (lambda path, lang: self.GypPathToUniqueOutput(((path + '-') + lang)))) link_deps = self.WriteSources(self.ninja, config_name, config, sources, compile_depends_stamp, pch, spec) obj_outputs = [f for f in sources if f.endswith(self.obj_ext)] if obj_outputs: if ((self.flavor != 'mac') or (len(self.archs) == 1)): link_deps += [self.GypPathToNinja(o) for o in obj_outputs] else: print ("Warning: Actions/rules writing object files don't work with multiarch targets, dropping. (target %s)" % spec['target_name']) elif ((self.flavor == 'mac') and (len(self.archs) > 1)): link_deps = collections.defaultdict(list) compile_deps = (self.target.actions_stamp or actions_depends) if ((self.flavor == 'win') and (self.target.type == 'static_library')): self.target.component_objs = link_deps self.target.compile_deps = compile_deps output = None is_empty_bundle = ((not link_deps) and (not mac_bundle_depends)) if (link_deps or self.target.actions_stamp or actions_depends): output = self.WriteTarget(spec, config_name, config, link_deps, compile_deps) if self.is_mac_bundle: mac_bundle_depends.append(output) if self.is_mac_bundle: output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle) if (not output): return None assert self.target.FinalOutput(), output return self.target
'Handle the implicit VS .idl rule for one source file. Fills |outputs| with files that are generated.'
def _WinIdlRule(self, source, prebuild, outputs):
(outdir, output, vars, flags) = self.msvs_settings.GetIdlBuildData(source, self.config_name) outdir = self.GypPathToNinja(outdir) def fix_path(path, rel=None): path = os.path.join(outdir, path) (dirname, basename) = os.path.split(source) (root, ext) = os.path.splitext(basename) path = self.ExpandRuleVariables(path, root, dirname, source, ext, basename) if rel: path = os.path.relpath(path, rel) return path vars = [(name, fix_path(value, outdir)) for (name, value) in vars] output = [fix_path(p) for p in output] vars.append(('outdir', outdir)) vars.append(('idlflags', flags)) input = self.GypPathToNinja(source) self.ninja.build(output, 'idl', input, variables=vars, order_only=prebuild) outputs.extend(output)
'Writes rules to match MSVS\'s implicit idl handling.'
def WriteWinIdlFiles(self, spec, prebuild):
assert (self.flavor == 'win') if self.msvs_settings.HasExplicitIdlRulesOrActions(spec): return [] outputs = [] for source in filter((lambda x: x.endswith('.idl')), spec['sources']): self._WinIdlRule(source, prebuild, outputs) return outputs
'Write out the Actions, Rules, and Copies steps. Return a path representing the outputs of these steps.'
def WriteActionsRulesCopies(self, spec, extra_sources, prebuild, mac_bundle_depends):
outputs = [] if self.is_mac_bundle: mac_bundle_resources = spec.get('mac_bundle_resources', [])[:] else: mac_bundle_resources = [] extra_mac_bundle_resources = [] if ('actions' in spec): outputs += self.WriteActions(spec['actions'], extra_sources, prebuild, extra_mac_bundle_resources) if ('rules' in spec): outputs += self.WriteRules(spec['rules'], extra_sources, prebuild, mac_bundle_resources, extra_mac_bundle_resources) if ('copies' in spec): outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends) if (('sources' in spec) and (self.flavor == 'win')): outputs += self.WriteWinIdlFiles(spec, prebuild) stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs) if self.is_mac_bundle: xcassets = self.WriteMacBundleResources((extra_mac_bundle_resources + mac_bundle_resources), mac_bundle_depends) partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends) self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends) return stamp
'Generate and return a description of a build step. |verb| is the short summary, e.g. ACTION or RULE. |message| is a hand-written description, or None if not available. |fallback| is the gyp-level name of the step, usable as a fallback.'
def GenerateDescription(self, verb, message, fallback):
if (self.toolset != 'target'): verb += ('(%s)' % self.toolset) if message: return ('%s %s' % (verb, self.ExpandSpecial(message))) else: return ('%s %s: %s' % (verb, self.name, fallback))
'Writes ninja edges for \'mac_bundle_resources\'.'
def WriteMacBundleResources(self, resources, bundle_depends):
xcassets = [] for (output, res) in gyp.xcode_emulation.GetMacBundleResources(generator_default_variables['PRODUCT_DIR'], self.xcode_settings, map(self.GypPathToNinja, resources)): output = self.ExpandSpecial(output) if (os.path.splitext(output)[(-1)] != '.xcassets'): isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) self.ninja.build(output, 'mac_tool', res, variables=[('mactool_cmd', 'copy-bundle-resource'), ('binary', isBinary)]) bundle_depends.append(output) else: xcassets.append(res) return xcassets
'Writes ninja edges for \'mac_bundle_resources\' .xcassets files. This add an invocation of \'actool\' via the \'mac_tool.py\' helper script. It assumes that the assets catalogs define at least one imageset and thus an Assets.car file will be generated in the application resources directory. If this is not the case, then the build will probably be done at each invocation of ninja.'
def WriteMacXCassets(self, xcassets, bundle_depends):
if (not xcassets): return extra_arguments = {} settings_to_arg = {'XCASSETS_APP_ICON': 'app-icon', 'XCASSETS_LAUNCH_IMAGE': 'launch-image'} settings = self.xcode_settings.xcode_settings[self.config_name] for (settings_key, arg_name) in settings_to_arg.iteritems(): value = settings.get(settings_key) if value: extra_arguments[arg_name] = value partial_info_plist = None if extra_arguments: partial_info_plist = self.GypPathToUniqueOutput('assetcatalog_generated_info.plist') extra_arguments['output-partial-info-plist'] = partial_info_plist outputs = [] outputs.append(os.path.join(self.xcode_settings.GetBundleResourceFolder(), 'Assets.car')) if partial_info_plist: outputs.append(partial_info_plist) keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor) extra_env = self.xcode_settings.GetPerTargetSettings() env = self.GetSortedXcodeEnv(additional_settings=extra_env) env = self.ComputeExportEnvString(env) bundle_depends.extend(self.ninja.build(outputs, 'compile_xcassets', xcassets, variables=[('env', env), ('keys', keys)])) return partial_info_plist
'Write build rules for bundle Info.plist files.'
def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
(info_plist, out, defines, extra_env) = gyp.xcode_emulation.GetMacInfoPlist(generator_default_variables['PRODUCT_DIR'], self.xcode_settings, self.GypPathToNinja) if (not info_plist): return out = self.ExpandSpecial(out) if defines: intermediate_plist = self.GypPathToUniqueOutput(os.path.basename(info_plist)) defines = ' '.join([Define(d, self.flavor) for d in defines]) info_plist = self.ninja.build(intermediate_plist, 'preprocess_infoplist', info_plist, variables=[('defines', defines)]) env = self.GetSortedXcodeEnv(additional_settings=extra_env) env = self.ComputeExportEnvString(env) if partial_info_plist: intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist') info_plist = self.ninja.build(intermediate_plist, 'merge_infoplist', [partial_info_plist, info_plist]) keys = self.xcode_settings.GetExtraPlistItems(self.config_name) keys = QuoteShellArgument(json.dumps(keys), self.flavor) isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) self.ninja.build(out, 'copy_infoplist', info_plist, variables=[('env', env), ('keys', keys), ('binary', isBinary)]) bundle_depends.append(out)
'Write build rules to compile all of |sources|.'
def WriteSources(self, ninja_file, config_name, config, sources, predepends, precompiled_header, spec):
if (self.toolset == 'host'): self.ninja.variable('ar', '$ar_host') self.ninja.variable('cc', '$cc_host') self.ninja.variable('cxx', '$cxx_host') self.ninja.variable('ld', '$ld_host') self.ninja.variable('ldxx', '$ldxx_host') self.ninja.variable('nm', '$nm_host') self.ninja.variable('readelf', '$readelf_host') if ((self.flavor != 'mac') or (len(self.archs) == 1)): return self.WriteSourcesForArch(self.ninja, config_name, config, sources, predepends, precompiled_header, spec) else: return dict(((arch, self.WriteSourcesForArch(self.arch_subninjas[arch], config_name, config, sources, predepends, precompiled_header, spec, arch=arch)) for arch in self.archs))
'Write build rules to compile all of |sources|.'
def WriteSourcesForArch(self, ninja_file, config_name, config, sources, predepends, precompiled_header, spec, arch=None):
extra_defines = [] if (self.flavor == 'mac'): cflags = self.xcode_settings.GetCflags(config_name, arch=arch) cflags_c = self.xcode_settings.GetCflagsC(config_name) cflags_cc = self.xcode_settings.GetCflagsCC(config_name) cflags_objc = (['$cflags_c'] + self.xcode_settings.GetCflagsObjC(config_name)) cflags_objcc = (['$cflags_cc'] + self.xcode_settings.GetCflagsObjCC(config_name)) elif (self.flavor == 'win'): asmflags = self.msvs_settings.GetAsmflags(config_name) cflags = self.msvs_settings.GetCflags(config_name) cflags_c = self.msvs_settings.GetCflagsC(config_name) cflags_cc = self.msvs_settings.GetCflagsCC(config_name) extra_defines = self.msvs_settings.GetComputedDefines(config_name) pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(config_name, self.ExpandSpecial) if (not pdbpath_c): obj = 'obj' if (self.toolset != 'target'): obj += ('.' + self.toolset) pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name)) pdbpath_c = (pdbpath + '.c.pdb') pdbpath_cc = (pdbpath + '.cc.pdb') self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c]) self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc]) self.WriteVariableList(ninja_file, 'pchprefix', [self.name]) else: cflags = config.get('cflags', []) cflags_c = config.get('cflags_c', []) cflags_cc = config.get('cflags_cc', []) if (self.toolset == 'target'): cflags_c = ((os.environ.get('CPPFLAGS', '').split() + os.environ.get('CFLAGS', '').split()) + cflags_c) cflags_cc = ((os.environ.get('CPPFLAGS', '').split() + os.environ.get('CXXFLAGS', '').split()) + cflags_cc) elif (self.toolset == 'host'): cflags_c = ((os.environ.get('CPPFLAGS_host', '').split() + os.environ.get('CFLAGS_host', '').split()) + cflags_c) cflags_cc = ((os.environ.get('CPPFLAGS_host', '').split() + os.environ.get('CXXFLAGS_host', '').split()) + cflags_cc) defines = (config.get('defines', []) + extra_defines) self.WriteVariableList(ninja_file, 'defines', [Define(d, self.flavor) for d in defines]) if (self.flavor == 'win'): self.WriteVariableList(ninja_file, 'asmflags', map(self.ExpandSpecial, asmflags)) self.WriteVariableList(ninja_file, 'rcflags', [QuoteShellArgument(self.ExpandSpecial(f), self.flavor) for f in self.msvs_settings.GetRcflags(config_name, self.GypPathToNinja)]) include_dirs = config.get('include_dirs', []) env = self.GetToolchainEnv() if (self.flavor == 'win'): include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs, config_name) self.WriteVariableList(ninja_file, 'includes', [QuoteShellArgument(('-I' + self.GypPathToNinja(i, env)), self.flavor) for i in include_dirs]) if (self.flavor == 'win'): midl_include_dirs = config.get('midl_include_dirs', []) midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(midl_include_dirs, config_name) self.WriteVariableList(ninja_file, 'midl_includes', [QuoteShellArgument(('-I' + self.GypPathToNinja(i, env)), self.flavor) for i in midl_include_dirs]) pch_commands = precompiled_header.GetPchBuildCommands(arch) if (self.flavor == 'mac'): for (ext, var) in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'), ('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]: include = precompiled_header.GetInclude(ext, arch) if include: ninja_file.variable(var, include) arflags = config.get('arflags', []) self.WriteVariableList(ninja_file, 'cflags', map(self.ExpandSpecial, cflags)) self.WriteVariableList(ninja_file, 'cflags_c', map(self.ExpandSpecial, cflags_c)) self.WriteVariableList(ninja_file, 'cflags_cc', map(self.ExpandSpecial, cflags_cc)) if (self.flavor == 'mac'): self.WriteVariableList(ninja_file, 'cflags_objc', map(self.ExpandSpecial, cflags_objc)) self.WriteVariableList(ninja_file, 'cflags_objcc', map(self.ExpandSpecial, cflags_objcc)) self.WriteVariableList(ninja_file, 'arflags', map(self.ExpandSpecial, arflags)) ninja_file.newline() outputs = [] has_rc_source = False for source in sources: (filename, ext) = os.path.splitext(source) ext = ext[1:] obj_ext = self.obj_ext if (ext in ('cc', 'cpp', 'cxx')): command = 'cxx' self.uses_cpp = True elif ((ext == 'c') or ((ext == 'S') and (self.flavor != 'win'))): command = 'cc' elif ((ext == 's') and (self.flavor != 'win')): command = 'cc_s' elif ((self.flavor == 'win') and (ext == 'asm') and (not self.msvs_settings.HasExplicitAsmRules(spec))): command = 'asm' obj_ext = '_asm.obj' elif ((self.flavor == 'mac') and (ext == 'm')): command = 'objc' elif ((self.flavor == 'mac') and (ext == 'mm')): command = 'objcxx' self.uses_cpp = True elif ((self.flavor == 'win') and (ext == 'rc')): command = 'rc' obj_ext = '.res' has_rc_source = True else: continue input = self.GypPathToNinja(source) output = self.GypPathToUniqueOutput((filename + obj_ext)) if (arch is not None): output = AddArch(output, arch) implicit = precompiled_header.GetObjDependencies([input], [output], arch) variables = [] if (self.flavor == 'win'): (variables, output, implicit) = precompiled_header.GetFlagsModifications(input, output, implicit, command, cflags_c, cflags_cc, self.ExpandSpecial) ninja_file.build(output, command, input, implicit=[gch for (_, _, gch) in implicit], order_only=predepends, variables=variables) outputs.append(output) if has_rc_source: resource_include_dirs = config.get('resource_include_dirs', include_dirs) self.WriteVariableList(ninja_file, 'resource_includes', [QuoteShellArgument(('-I' + self.GypPathToNinja(i, env)), self.flavor) for i in resource_include_dirs]) self.WritePchTargets(ninja_file, pch_commands) ninja_file.newline() return outputs
'Writes ninja rules to compile prefix headers.'
def WritePchTargets(self, ninja_file, pch_commands):
if (not pch_commands): return for (gch, lang_flag, lang, input) in pch_commands: var_name = {'c': 'cflags_pch_c', 'cc': 'cflags_pch_cc', 'm': 'cflags_pch_objc', 'mm': 'cflags_pch_objcc'}[lang] map = {'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx'} cmd = map.get(lang) ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
'Write out a link step. Fills out target.binary.'
def WriteLink(self, spec, config_name, config, link_deps):
if ((self.flavor != 'mac') or (len(self.archs) == 1)): return self.WriteLinkForArch(self.ninja, spec, config_name, config, link_deps) else: output = self.ComputeOutput(spec) inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec, config_name, config, link_deps[arch], arch=arch) for arch in self.archs] extra_bindings = [] build_output = output if (not self.is_mac_bundle): self.AppendPostbuildVariable(extra_bindings, spec, output, output) if ((spec['type'] in ('shared_library', 'loadable_module')) and (not self.is_mac_bundle)): extra_bindings.append(('lib', output)) self.ninja.build([output, (output + '.TOC')], 'solipo', inputs, variables=extra_bindings) else: self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings) return output
'Write out a link step. Fills out target.binary.'
def WriteLinkForArch(self, ninja_file, spec, config_name, config, link_deps, arch=None):
command = {'executable': 'link', 'loadable_module': 'solink_module', 'shared_library': 'solink'}[spec['type']] command_suffix = '' implicit_deps = set() solibs = set() order_deps = set() if ('dependencies' in spec): extra_link_deps = set() for dep in spec['dependencies']: target = self.target_outputs.get(dep) if (not target): continue linkable = target.Linkable() if linkable: new_deps = [] if ((self.flavor == 'win') and target.component_objs and self.msvs_settings.IsUseLibraryDependencyInputs(config_name)): new_deps = target.component_objs if target.compile_deps: order_deps.add(target.compile_deps) elif ((self.flavor == 'win') and target.import_lib): new_deps = [target.import_lib] elif target.UsesToc(self.flavor): solibs.add(target.binary) implicit_deps.add((target.binary + '.TOC')) else: new_deps = [target.binary] for new_dep in new_deps: if (new_dep not in extra_link_deps): extra_link_deps.add(new_dep) link_deps.append(new_dep) final_output = target.FinalOutput() if ((not linkable) or (final_output != target.binary)): implicit_deps.add(final_output) extra_bindings = [] if (self.uses_cpp and (self.flavor != 'win')): extra_bindings.append(('ld', '$ldxx')) output = self.ComputeOutput(spec, arch) if ((arch is None) and (not self.is_mac_bundle)): self.AppendPostbuildVariable(extra_bindings, spec, output, output) is_executable = (spec['type'] == 'executable') env_ldflags = os.environ.get('LDFLAGS', '').split() if (self.flavor == 'mac'): ldflags = self.xcode_settings.GetLdflags(config_name, self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), self.GypPathToNinja, arch) ldflags = (env_ldflags + ldflags) elif (self.flavor == 'win'): manifest_base_name = self.GypPathToUniqueOutput(self.ComputeOutputFileName(spec)) (ldflags, intermediate_manifest, manifest_files) = self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja, self.ExpandSpecial, manifest_base_name, output, is_executable, self.toplevel_build) ldflags = (env_ldflags + ldflags) self.WriteVariableList(ninja_file, 'manifests', manifest_files) implicit_deps = implicit_deps.union(manifest_files) if intermediate_manifest: self.WriteVariableList(ninja_file, 'intermediatemanifest', [intermediate_manifest]) command_suffix = _GetWinLinkRuleNameSuffix(self.msvs_settings.IsEmbedManifest(config_name)) def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja) if def_file: implicit_deps.add(def_file) else: ldflags = (env_ldflags + config.get('ldflags', [])) if (is_executable and len(solibs)): rpath = 'lib/' if (self.toolset != 'target'): rpath += self.toolset ldflags.append(('-Wl,-rpath=\\$$ORIGIN/%s' % rpath)) ldflags.append(('-Wl,-rpath-link=%s' % rpath)) self.WriteVariableList(ninja_file, 'ldflags', map(self.ExpandSpecial, ldflags)) library_dirs = config.get('library_dirs', []) if (self.flavor == 'win'): library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name) for l in library_dirs] library_dirs = [('/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l), self.flavor)) for l in library_dirs] else: library_dirs = [QuoteShellArgument(('-L' + self.GypPathToNinja(l)), self.flavor) for l in library_dirs] libraries = gyp.common.uniquer(map(self.ExpandSpecial, spec.get('libraries', []))) if (self.flavor == 'mac'): libraries = self.xcode_settings.AdjustLibraries(libraries, config_name) elif (self.flavor == 'win'): libraries = self.msvs_settings.AdjustLibraries(libraries) self.WriteVariableList(ninja_file, 'libs', (library_dirs + libraries)) linked_binary = output if (command in ('solink', 'solink_module')): extra_bindings.append(('soname', os.path.split(output)[1])) extra_bindings.append(('lib', gyp.common.EncodePOSIXShellArgument(output))) if (self.flavor != 'win'): link_file_list = output if self.is_mac_bundle: link_file_list = self.xcode_settings.GetWrapperName() if arch: link_file_list += ('.' + arch) link_file_list += '.rsp' link_file_list = link_file_list.replace(' ', '_') extra_bindings.append(('link_file_list', gyp.common.EncodePOSIXShellArgument(link_file_list))) if (self.flavor == 'win'): extra_bindings.append(('binary', output)) if (('/NOENTRY' not in ldflags) and (not self.msvs_settings.GetNoImportLibrary(config_name))): self.target.import_lib = (output + '.lib') extra_bindings.append(('implibflag', ('/IMPLIB:%s' % self.target.import_lib))) pdbname = self.msvs_settings.GetPDBName(config_name, self.ExpandSpecial, (output + '.pdb')) output = [output, self.target.import_lib] if pdbname: output.append(pdbname) elif (not self.is_mac_bundle): output = [output, (output + '.TOC')] else: command = (command + '_notoc') elif (self.flavor == 'win'): extra_bindings.append(('binary', output)) pdbname = self.msvs_settings.GetPDBName(config_name, self.ExpandSpecial, (output + '.pdb')) if pdbname: output = [output, pdbname] if len(solibs): extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs))) ninja_file.build(output, (command + command_suffix), link_deps, implicit=list(implicit_deps), order_only=list(order_deps), variables=extra_bindings) return linked_binary
'Returns the variables toolchain would set for build steps.'
def GetToolchainEnv(self, additional_settings=None):
env = self.GetSortedXcodeEnv(additional_settings=additional_settings) if (self.flavor == 'win'): env = self.GetMsvsToolchainEnv(additional_settings=additional_settings) return env
'Returns the variables Visual Studio would set for build steps.'
def GetMsvsToolchainEnv(self, additional_settings=None):
return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR', config=self.config_name)
'Returns the variables Xcode would set for build steps.'
def GetSortedXcodeEnv(self, additional_settings=None):
assert self.abs_build_dir abs_build_dir = self.abs_build_dir return gyp.xcode_emulation.GetSortedXcodeEnv(self.xcode_settings, abs_build_dir, os.path.join(abs_build_dir, self.build_to_base), self.config_name, additional_settings)
'Returns the variables Xcode would set for postbuild steps.'
def GetSortedXcodePostbuildEnv(self):
postbuild_settings = {} strip_save_file = self.xcode_settings.GetPerTargetSetting('CHROMIUM_STRIP_SAVE_FILE') if strip_save_file: postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)