desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Adds a \'postbuild\' variable if there is a postbuild for |output|.'
def AppendPostbuildVariable(self, variables, spec, output, binary, is_command_start=False):
postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start) if postbuild: variables.append(('postbuilds', postbuild))
'Returns a shell command that runs all the postbuilds, and removes |output| if any of them fails. If |is_command_start| is False, then the returned string will start with \' && \'.'
def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
if ((not self.xcode_settings) or (spec['type'] == 'none') or (not output)): return '' output = QuoteShellArgument(output, self.flavor) postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True) if (output_binary is not None): postbuilds = self.xcode_settings.AddImplicitPostbuilds(self.config_name, os.path.normpath(os.path.join(self.base_to_build, output)), QuoteShellArgument(os.path.normpath(os.path.join(self.base_to_build, output_binary)), self.flavor), postbuilds, quiet=True) if (not postbuilds): return '' postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.build_to_base])) env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv()) commands = ((env + ' (') + ' && '.join([ninja_syntax.escape(command) for command in postbuilds])) command_string = ((commands + ('); G=$$?; ((exit $$G) || rm -rf %s) ' % output)) + '&& exit $$G)') if is_command_start: return (('(' + command_string) + ' && ') else: return ('$ && (' + command_string)
'Given an environment, returns a string looking like \'export FOO=foo; export BAR="${FOO} bar;\' that exports |env| to the shell.'
def ComputeExportEnvString(self, env):
export_str = [] for (k, v) in env: export_str.append(('export %s=%s;' % (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))) return ' '.join(export_str)
'Return the \'output\' (full output path) to a bundle output directory.'
def ComputeMacBundleOutput(self):
assert self.is_mac_bundle path = generator_default_variables['PRODUCT_DIR'] return self.ExpandSpecial(os.path.join(path, self.xcode_settings.GetWrapperName()))
'Compute the filename of the final output for the current target.'
def ComputeOutputFileName(self, spec, type=None):
if (not type): type = spec['type'] default_variables = copy.copy(generator_default_variables) CalculateVariables(default_variables, {'flavor': self.flavor}) DEFAULT_PREFIX = {'loadable_module': default_variables['SHARED_LIB_PREFIX'], 'shared_library': default_variables['SHARED_LIB_PREFIX'], 'static_library': default_variables['STATIC_LIB_PREFIX'], 'executable': default_variables['EXECUTABLE_PREFIX']} prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, '')) DEFAULT_EXTENSION = {'loadable_module': default_variables['SHARED_LIB_SUFFIX'], 'shared_library': default_variables['SHARED_LIB_SUFFIX'], 'static_library': default_variables['STATIC_LIB_SUFFIX'], 'executable': default_variables['EXECUTABLE_SUFFIX']} extension = spec.get('product_extension') if extension: extension = ('.' + extension) else: extension = DEFAULT_EXTENSION.get(type, '') if ('product_name' in spec): target = spec['product_name'] else: target = spec['target_name'] if (prefix == 'lib'): target = StripPrefix(target, 'lib') if (type in ('static_library', 'loadable_module', 'shared_library', 'executable')): return ('%s%s%s' % (prefix, target, extension)) elif (type == 'none'): return ('%s.stamp' % target) else: raise Exception(('Unhandled output type %s' % type))
'Compute the path for the final output of the spec.'
def ComputeOutput(self, spec, arch=None):
type = spec['type'] if (self.flavor == 'win'): override = self.msvs_settings.GetOutputName(self.config_name, self.ExpandSpecial) if override: return override if ((arch is None) and (self.flavor == 'mac') and (type in ('static_library', 'executable', 'shared_library', 'loadable_module'))): filename = self.xcode_settings.GetExecutablePath() else: filename = self.ComputeOutputFileName(spec, type) if ((arch is None) and ('product_dir' in spec)): path = os.path.join(spec['product_dir'], filename) return self.ExpandSpecial(path) type_in_output_root = ['executable', 'loadable_module'] if ((self.flavor == 'mac') and (self.toolset == 'target')): type_in_output_root += ['shared_library', 'static_library'] elif ((self.flavor == 'win') and (self.toolset == 'target')): type_in_output_root += ['shared_library'] if (arch is not None): archdir = 'arch' if (self.toolset != 'target'): archdir = os.path.join('arch', ('%s' % self.toolset)) return os.path.join(archdir, AddArch(filename, arch)) elif ((type in type_in_output_root) or self.is_standalone_static_library): return filename elif (type == 'shared_library'): libdir = 'lib' if (self.toolset != 'target'): libdir = os.path.join('lib', ('%s' % self.toolset)) return os.path.join(libdir, filename) else: return self.GypPathToUniqueOutput(filename, qualified=False)
'Write out a new ninja "rule" statement for a given command. Returns the name of the new rule, and a copy of |args| with variables expanded.'
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool, depfile=None):
if (self.flavor == 'win'): args = [self.msvs_settings.ConvertVSMacros(arg, self.base_to_build, config=self.config_name) for arg in args] description = self.msvs_settings.ConvertVSMacros(description, config=self.config_name) elif (self.flavor == 'mac'): args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args] description = gyp.xcode_emulation.ExpandEnvVars(description, env) rule_name = self.name if (self.toolset == 'target'): rule_name += ('.' + self.toolset) rule_name += ('.' + name) rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name) protect = ['${root}', '${dirname}', '${source}', '${ext}', '${name}'] protect = (('(?!' + '|'.join(map(re.escape, protect))) + ')') description = re.sub((protect + '\\$'), '_', description) rspfile = None rspfile_content = None args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args] if (self.flavor == 'win'): rspfile = (rule_name + '.$unique_name.rsp') run_in = ('' if is_cygwin else (' ' + self.build_to_base)) if is_cygwin: rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(args, self.build_to_base) else: rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args) command = ((('%s gyp-win-tool action-wrapper $arch ' % sys.executable) + rspfile) + run_in) else: env = self.ComputeExportEnvString(env) command = gyp.common.EncodePOSIXShellList(args) command = ((('cd %s; ' % self.build_to_base) + env) + command) self.ninja.rule(rule_name, command, description, depfile=depfile, restat=True, pool=pool, rspfile=rspfile, rspfile_content=rspfile_content) self.ninja.newline() return (rule_name, args)
'The main entry point: writes a .mk file for a single target. Arguments: qualified_target: target we\'re generating base_path: path relative to source root we\'re building in, used to resolve target-relative paths output_filename: output .mk file name to write spec, configs: gyp info part_of_all: flag indicating this target is part of \'all\''
def Write(self, qualified_target, base_path, output_filename, spec, configs, part_of_all):
gyp.common.EnsureDirExists(output_filename) self.fp = open(output_filename, 'w') self.fp.write(header) self.qualified_target = qualified_target self.path = base_path self.target = spec['target_name'] self.type = spec['type'] self.toolset = spec['toolset'] self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) if (self.flavor == 'mac'): self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) else: self.xcode_settings = None (deps, link_deps) = self.ComputeDeps(spec) extra_outputs = [] extra_sources = [] extra_link_deps = [] extra_mac_bundle_resources = [] mac_bundle_deps = [] if self.is_mac_bundle: self.output = self.ComputeMacBundleOutput(spec) self.output_binary = self.ComputeMacBundleBinaryOutput(spec) else: self.output = self.output_binary = self.ComputeOutput(spec) self.is_standalone_static_library = bool(spec.get('standalone_static_library', 0)) self._INSTALLABLE_TARGETS = ('executable', 'loadable_module', 'shared_library') if (self.is_standalone_static_library or (self.type in self._INSTALLABLE_TARGETS)): self.alias = os.path.basename(self.output) install_path = self._InstallableTargetInstallPath() else: self.alias = self.output install_path = self.output self.WriteLn(('TOOLSET := ' + self.toolset)) self.WriteLn(('TARGET := ' + self.target)) if ('actions' in spec): self.WriteActions(spec['actions'], extra_sources, extra_outputs, extra_mac_bundle_resources, part_of_all) if ('rules' in spec): self.WriteRules(spec['rules'], extra_sources, extra_outputs, extra_mac_bundle_resources, part_of_all) if ('copies' in spec): self.WriteCopies(spec['copies'], extra_outputs, part_of_all) if self.is_mac_bundle: all_mac_bundle_resources = (spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources) self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps) self.WriteMacInfoPlist(mac_bundle_deps) all_sources = (spec.get('sources', []) + extra_sources) if all_sources: if (self.flavor == 'mac'): _ValidateSourcesForOSX(spec, all_sources) self.WriteSources(configs, deps, all_sources, extra_outputs, extra_link_deps, part_of_all, gyp.xcode_emulation.MacPrefixHeader(self.xcode_settings, (lambda p: Sourceify(self.Absolutify(p))), self.Pchify)) sources = filter(Compilable, all_sources) if sources: self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) extensions = set([os.path.splitext(s)[1] for s in sources]) for ext in extensions: if (ext in self.suffix_rules_srcdir): self.WriteLn(self.suffix_rules_srcdir[ext]) self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2) for ext in extensions: if (ext in self.suffix_rules_objdir1): self.WriteLn(self.suffix_rules_objdir1[ext]) for ext in extensions: if (ext in self.suffix_rules_objdir2): self.WriteLn(self.suffix_rules_objdir2[ext]) self.WriteLn('# End of this set of suffix rules') if self.is_mac_bundle: mac_bundle_deps.append(self.output_binary) self.WriteTarget(spec, configs, deps, (extra_link_deps + link_deps), mac_bundle_deps, extra_outputs, part_of_all) target_outputs[qualified_target] = install_path if (self.type in ('static_library', 'shared_library')): target_link_deps[qualified_target] = self.output_binary if self.generator_flags.get('android_ndk_version', None): self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps) self.fp.close()
'Write a "sub-project" Makefile. This is a small, wrapper Makefile that calls the top-level Makefile to build the targets from a single gyp file (i.e. a sub-project). Arguments: output_filename: sub-project Makefile name to write makefile_path: path to the top-level Makefile targets: list of "all" targets for this sub-project build_dir: build output directory, relative to the sub-project'
def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
gyp.common.EnsureDirExists(output_filename) self.fp = open(output_filename, 'w') self.fp.write(header) self.WriteLn(('export builddir_name ?= %s' % os.path.join(os.path.dirname(output_filename), build_dir))) self.WriteLn('.PHONY: all') self.WriteLn('all:') if makefile_path: makefile_path = (' -C ' + makefile_path) self.WriteLn((' DCTB $(MAKE)%s %s' % (makefile_path, ' '.join(targets)))) self.fp.close()
'Write Makefile code for any \'actions\' from the gyp input. extra_sources: a list that will be filled in with newly generated source files, if any extra_outputs: a list that will be filled in with any outputs of these actions (used to make other pieces dependent on these actions) part_of_all: flag indicating this target is part of \'all\''
def WriteActions(self, actions, extra_sources, extra_outputs, extra_mac_bundle_resources, part_of_all):
env = self.GetSortedXcodeEnv() for action in actions: name = StringToMakefileVariable(('%s_%s' % (self.qualified_target, action['action_name']))) self.WriteLn(('### Rules for action "%s":' % action['action_name'])) inputs = action['inputs'] outputs = action['outputs'] dirs = set() for out in outputs: dir = os.path.split(out)[0] if dir: dirs.add(dir) if int(action.get('process_outputs_as_sources', False)): extra_sources += outputs if int(action.get('process_outputs_as_mac_bundle_resources', False)): extra_mac_bundle_resources += outputs action_commands = action['action'] if (self.flavor == 'mac'): action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env) for command in action_commands] command = gyp.common.EncodePOSIXShellList(action_commands) if ('message' in action): self.WriteLn(('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))) else: self.WriteLn(('quiet_cmd_%s = ACTION %s $@' % (name, name))) if (len(dirs) > 0): command = ((('mkdir -p %s' % ' '.join(dirs)) + '; ') + command) cd_action = ('cd %s; ' % Sourceify((self.path or '.'))) command = command.replace('$(TARGET)', self.target) cd_action = cd_action.replace('$(TARGET)', self.target) self.WriteLn(('cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; export LD_LIBRARY_PATH; %s%s' % (name, cd_action, command))) self.WriteLn() outputs = map(self.Absolutify, outputs) self.WriteLn(('%s: obj := $(abs_obj)' % QuoteSpaces(outputs[0]))) self.WriteLn(('%s: builddir := $(abs_builddir)' % QuoteSpaces(outputs[0]))) self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv()) for input in inputs: assert (' ' not in input), ('Spaces in action input filenames not supported (%s)' % input) for output in outputs: assert (' ' not in output), ('Spaces in action output filenames not supported (%s)' % output) outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)), part_of_all=part_of_all, command=name) outputs_variable = ('action_%s_outputs' % name) self.WriteLn(('%s := %s' % (outputs_variable, ' '.join(outputs)))) extra_outputs.append(('$(%s)' % outputs_variable)) self.WriteLn() self.WriteLn()
'Write Makefile code for any \'rules\' from the gyp input. extra_sources: a list that will be filled in with newly generated source files, if any extra_outputs: a list that will be filled in with any outputs of these rules (used to make other pieces dependent on these rules) part_of_all: flag indicating this target is part of \'all\''
def WriteRules(self, rules, extra_sources, extra_outputs, extra_mac_bundle_resources, part_of_all):
env = self.GetSortedXcodeEnv() for rule in rules: name = StringToMakefileVariable(('%s_%s' % (self.qualified_target, rule['rule_name']))) count = 0 self.WriteLn(('### Generated for rule %s:' % name)) all_outputs = [] for rule_source in rule.get('rule_sources', []): dirs = set() (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) (rule_source_root, rule_source_ext) = os.path.splitext(rule_source_basename) outputs = [self.ExpandInputRoot(out, rule_source_root, rule_source_dirname) for out in rule['outputs']] for out in outputs: dir = os.path.dirname(out) if dir: dirs.add(dir) if int(rule.get('process_outputs_as_sources', False)): extra_sources += outputs if int(rule.get('process_outputs_as_mac_bundle_resources', False)): extra_mac_bundle_resources += outputs inputs = map(Sourceify, map(self.Absolutify, ([rule_source] + rule.get('inputs', [])))) actions = [('$(call do_cmd,%s_%d)' % (name, count))] if (name == 'resources_grit'): actions += ['@touch --no-create $@'] outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] outputs = map(self.Absolutify, outputs) all_outputs += outputs self.WriteLn(('%s: obj := $(abs_obj)' % outputs[0])) self.WriteLn(('%s: builddir := $(abs_builddir)' % outputs[0])) self.WriteMakeRule(outputs, inputs, actions, command=('%s_%d' % (name, count))) variables_with_spaces = re.compile('\\$\\([^ ]* \\$<\\)') for output in outputs: output = re.sub(variables_with_spaces, '', output) assert (' ' not in output), ('Spaces in rule filenames not yet supported (%s)' % output) self.WriteLn(('all_deps += %s' % ' '.join(outputs))) action = [self.ExpandInputRoot(ac, rule_source_root, rule_source_dirname) for ac in rule['action']] mkdirs = '' if (len(dirs) > 0): mkdirs = ('mkdir -p %s; ' % ' '.join(dirs)) cd_action = ('cd %s; ' % Sourceify((self.path or '.'))) if (self.flavor == 'mac'): action = [gyp.xcode_emulation.ExpandEnvVars(command, env) for command in action] action = gyp.common.EncodePOSIXShellList(action) action = action.replace('$(TARGET)', self.target) cd_action = cd_action.replace('$(TARGET)', self.target) mkdirs = mkdirs.replace('$(TARGET)', self.target) self.WriteLn(('cmd_%(name)s_%(count)d = LD_LIBRARY_PATH=$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; export LD_LIBRARY_PATH; %(cd_action)s%(mkdirs)s%(action)s' % {'action': action, 'cd_action': cd_action, 'count': count, 'mkdirs': mkdirs, 'name': name})) self.WriteLn(('quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {'count': count, 'name': name})) self.WriteLn() count += 1 outputs_variable = ('rule_%s_outputs' % name) self.WriteList(all_outputs, outputs_variable) extra_outputs.append(('$(%s)' % outputs_variable)) self.WriteLn(('### Finished generating for rule: %s' % name)) self.WriteLn() self.WriteLn('### Finished generating for all rules') self.WriteLn('')
'Write Makefile code for any \'copies\' from the gyp input. extra_outputs: a list that will be filled in with any outputs of this action (used to make other pieces dependent on this action) part_of_all: flag indicating this target is part of \'all\''
def WriteCopies(self, copies, extra_outputs, part_of_all):
self.WriteLn('### Generated for copy rule.') variable = StringToMakefileVariable((self.qualified_target + '_copies')) outputs = [] for copy in copies: for path in copy['files']: path = Sourceify(self.Absolutify(path)) filename = os.path.split(path)[1] output = Sourceify(self.Absolutify(os.path.join(copy['destination'], filename))) env = self.GetSortedXcodeEnv() output = gyp.xcode_emulation.ExpandEnvVars(output, env) path = gyp.xcode_emulation.ExpandEnvVars(path, env) self.WriteDoCmd([output], [path], 'copy', part_of_all) outputs.append(output) self.WriteLn(('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs))))) extra_outputs.append(('$(%s)' % variable)) self.WriteLn()
'Writes Makefile code for \'mac_bundle_resources\'.'
def WriteMacBundleResources(self, resources, bundle_deps):
self.WriteLn('### Generated for mac_bundle_resources') for (output, res) in gyp.xcode_emulation.GetMacBundleResources(generator_default_variables['PRODUCT_DIR'], self.xcode_settings, map(Sourceify, map(self.Absolutify, resources))): (_, ext) = os.path.splitext(output) if (ext != '.xcassets'): self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource', part_of_all=True) bundle_deps.append(output)
'Write Makefile code for bundle Info.plist files.'
def WriteMacInfoPlist(self, bundle_deps):
(info_plist, out, defines, extra_env) = gyp.xcode_emulation.GetMacInfoPlist(generator_default_variables['PRODUCT_DIR'], self.xcode_settings, (lambda p: Sourceify(self.Absolutify(p)))) if (not info_plist): return if defines: intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' + os.path.basename(info_plist)) self.WriteList(defines, (intermediate_plist + ': INFOPLIST_DEFINES'), '-D', quoter=EscapeCppDefine) self.WriteMakeRule([intermediate_plist], [info_plist], ['$(call do_cmd,infoplist)', '@plutil -convert xml1 $@ $@']) info_plist = intermediate_plist self.WriteSortedXcodeEnv(out, self.GetSortedXcodeEnv(additional_settings=extra_env)) self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist', part_of_all=True) bundle_deps.append(out)
'Write Makefile code for any \'sources\' from the gyp input. These are source files necessary to build the current target. configs, deps, sources: input from gyp. extra_outputs: a list of extra outputs this action should be dependent on; used to serialize action/rules before compilation extra_link_deps: a list that will be filled in with any outputs of compilation (to be used in link lines) part_of_all: flag indicating this target is part of \'all\''
def WriteSources(self, configs, deps, sources, extra_outputs, extra_link_deps, part_of_all, precompiled_header):
for configname in sorted(configs.keys()): config = configs[configname] self.WriteList(config.get('defines'), ('DEFS_%s' % configname), prefix='-D', quoter=EscapeCppDefine) if (self.flavor == 'mac'): cflags = self.xcode_settings.GetCflags(configname) cflags_c = self.xcode_settings.GetCflagsC(configname) cflags_cc = self.xcode_settings.GetCflagsCC(configname) cflags_objc = self.xcode_settings.GetCflagsObjC(configname) cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname) else: cflags = config.get('cflags') cflags_c = config.get('cflags_c') cflags_cc = config.get('cflags_cc') self.WriteLn('# Flags passed to all source files.') self.WriteList(cflags, ('CFLAGS_%s' % configname)) self.WriteLn('# Flags passed to only C files.') self.WriteList(cflags_c, ('CFLAGS_C_%s' % configname)) self.WriteLn('# Flags passed to only C++ files.') self.WriteList(cflags_cc, ('CFLAGS_CC_%s' % configname)) if (self.flavor == 'mac'): self.WriteLn('# Flags passed to only ObjC files.') self.WriteList(cflags_objc, ('CFLAGS_OBJC_%s' % configname)) self.WriteLn('# Flags passed to only ObjC++ files.') self.WriteList(cflags_objcc, ('CFLAGS_OBJCC_%s' % configname)) includes = config.get('include_dirs') if includes: includes = map(Sourceify, map(self.Absolutify, includes)) self.WriteList(includes, ('INCS_%s' % configname), prefix='-I') compilable = filter(Compilable, sources) objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable))) self.WriteList(objs, 'OBJS') for obj in objs: assert (' ' not in obj), ('Spaces in object filenames not supported (%s)' % obj) self.WriteLn('# Add to the list of files we specially track dependencies for.') self.WriteLn('all_deps += $(OBJS)') self.WriteLn() if deps: self.WriteMakeRule(['$(OBJS)'], deps, comment='Make sure our dependencies are built before any of us.', order_only=True) if extra_outputs: self.WriteMakeRule(['$(OBJS)'], extra_outputs, comment='Make sure our actions/rules run before any of us.', order_only=True) pchdeps = precompiled_header.GetObjDependencies(compilable, objs) if pchdeps: self.WriteLn('# Dependencies from obj files to their precompiled headers') for (source, obj, gch) in pchdeps: self.WriteLn(('%s: %s' % (obj, gch))) self.WriteLn('# End precompiled header dependencies') if objs: extra_link_deps.append('$(OBJS)') self.WriteLn('# CFLAGS et al overrides must be target-local.\n# See "Target-specific Variable Values" in the GNU Make manual.') self.WriteLn('$(OBJS): TOOLSET := $(TOOLSET)') self.WriteLn((('$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) %s ' % precompiled_header.GetInclude('c')) + '$(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))')) self.WriteLn((('$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) %s ' % precompiled_header.GetInclude('cc')) + '$(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))')) if (self.flavor == 'mac'): self.WriteLn((('$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) %s ' % precompiled_header.GetInclude('m')) + '$(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))')) self.WriteLn((('$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) %s ' % precompiled_header.GetInclude('mm')) + '$(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))')) self.WritePchTargets(precompiled_header.GetPchBuildCommands()) extra_link_deps += filter(Linkable, sources) self.WriteLn()
'Writes make rules to compile prefix headers.'
def WritePchTargets(self, pch_commands):
if (not pch_commands): return for (gch, lang_flag, lang, input) in pch_commands: extra_flags = {'c': '$(CFLAGS_C_$(BUILDTYPE))', 'cc': '$(CFLAGS_CC_$(BUILDTYPE))', 'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))', 'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))'}[lang] var_name = {'c': 'GYP_PCH_CFLAGS', 'cc': 'GYP_PCH_CXXFLAGS', 'm': 'GYP_PCH_OBJCFLAGS', 'mm': 'GYP_PCH_OBJCXXFLAGS'}[lang] self.WriteLn(((('%s: %s := %s ' % (gch, var_name, lang_flag)) + '$(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) ') + extra_flags)) self.WriteLn(('%s: %s FORCE_DO_CMD' % (gch, input))) self.WriteLn((' DCTB @$(call do_cmd,pch_%s,1)' % lang)) self.WriteLn('') assert (' ' not in gch), ('Spaces in gch filenames not supported (%s)' % gch) self.WriteLn(('all_deps += %s' % gch)) self.WriteLn('')
'Return the \'output basename\' of a gyp spec. E.g., the loadable module \'foobar\' in directory \'baz\' will produce \'libfoobar.so\''
def ComputeOutputBasename(self, spec):
assert (not self.is_mac_bundle) if ((self.flavor == 'mac') and (self.type in ('static_library', 'executable', 'shared_library', 'loadable_module'))): return self.xcode_settings.GetExecutablePath() target = spec['target_name'] target_prefix = '' target_ext = '' if (self.type == 'static_library'): if (target[:3] == 'lib'): target = target[3:] target_prefix = 'lib' target_ext = '.a' elif (self.type in ('loadable_module', 'shared_library')): if (target[:3] == 'lib'): target = target[3:] target_prefix = 'lib' target_ext = '.so' elif (self.type == 'none'): target = ('%s.stamp' % target) elif (self.type != 'executable'): print ('ERROR: What output file should be generated?', 'type', self.type, 'target', target) target_prefix = spec.get('product_prefix', target_prefix) target = spec.get('product_name', target) product_ext = spec.get('product_extension') if product_ext: target_ext = ('.' + product_ext) return ((target_prefix + target) + target_ext)
'Return the \'output\' (full output path) of a gyp spec. E.g., the loadable module \'foobar\' in directory \'baz\' will produce \'$(obj)/baz/libfoobar.so\''
def ComputeOutput(self, spec):
assert (not self.is_mac_bundle) path = os.path.join(('$(obj).' + self.toolset), self.path) if ((self.type == 'executable') or self._InstallImmediately()): path = '$(builddir)' path = spec.get('product_dir', path) return os.path.join(path, self.ComputeOutputBasename(spec))
'Return the \'output\' (full output path) to a bundle output directory.'
def ComputeMacBundleOutput(self, spec):
assert self.is_mac_bundle path = generator_default_variables['PRODUCT_DIR'] return os.path.join(path, self.xcode_settings.GetWrapperName())
'Return the \'output\' (full output path) to the binary in a bundle.'
def ComputeMacBundleBinaryOutput(self, spec):
path = generator_default_variables['PRODUCT_DIR'] return os.path.join(path, self.xcode_settings.GetExecutablePath())
'Compute the dependencies of a gyp spec. Returns a tuple (deps, link_deps), where each is a list of filenames that will need to be put in front of make for either building (deps) or linking (link_deps).'
def ComputeDeps(self, spec):
deps = [] link_deps = [] if ('dependencies' in spec): deps.extend([target_outputs[dep] for dep in spec['dependencies'] if target_outputs[dep]]) for dep in spec['dependencies']: if (dep in target_link_deps): link_deps.append(target_link_deps[dep]) deps.extend(link_deps) return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
'Write Makefile code to produce the final target of the gyp spec. spec, configs: input from gyp. deps, link_deps: dependency lists; see ComputeDeps() extra_outputs: any extra outputs that our target should depend on part_of_all: flag indicating this target is part of \'all\''
def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, extra_outputs, part_of_all):
self.WriteLn('### Rules for final target.') if extra_outputs: self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs) self.WriteMakeRule(extra_outputs, deps, comment='Preserve order dependency of special output on deps.', order_only=True) target_postbuilds = {} if (self.type != 'none'): for configname in sorted(configs.keys()): config = configs[configname] if (self.flavor == 'mac'): ldflags = self.xcode_settings.GetLdflags(configname, generator_default_variables['PRODUCT_DIR'], (lambda p: Sourceify(self.Absolutify(p)))) gyp_to_build = gyp.common.InvertRelativePath(self.path) target_postbuild = self.xcode_settings.AddImplicitPostbuilds(configname, QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, self.output))), QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, self.output_binary)))) if target_postbuild: target_postbuilds[configname] = target_postbuild else: ldflags = config.get('ldflags', []) if any(((dep.endswith('.so') or ('.so.' in dep)) for dep in deps)): ldflags.append(('-Wl,-rpath=\\$$ORIGIN/lib.%s/' % self.toolset)) ldflags.append(('-Wl,-rpath-link=\\$(builddir)/lib.%s/' % self.toolset)) library_dirs = config.get('library_dirs', []) ldflags += [('-L%s' % library_dir) for library_dir in library_dirs] self.WriteList(ldflags, ('LDFLAGS_%s' % configname)) if (self.flavor == 'mac'): self.WriteList(self.xcode_settings.GetLibtoolflags(configname), ('LIBTOOLFLAGS_%s' % configname)) libraries = spec.get('libraries') if libraries: libraries = gyp.common.uniquer(libraries) if (self.flavor == 'mac'): libraries = self.xcode_settings.AdjustLibraries(libraries) self.WriteList(libraries, 'LIBS') self.WriteLn(('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % QuoteSpaces(self.output_binary))) self.WriteLn(('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary))) if (self.flavor == 'mac'): self.WriteLn(('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' % QuoteSpaces(self.output_binary))) postbuilds = [] if (self.flavor == 'mac'): if target_postbuilds: postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))') postbuilds.extend(gyp.xcode_emulation.GetSpecPostbuildCommands(spec)) if postbuilds: self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv()) for configname in target_postbuilds: self.WriteLn(('%s: TARGET_POSTBUILDS_%s := %s' % (QuoteSpaces(self.output), configname, gyp.common.EncodePOSIXShellList(target_postbuilds[configname])))) postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path])) for i in xrange(len(postbuilds)): if (not postbuilds[i].startswith('$')): postbuilds[i] = EscapeShellArgument(postbuilds[i]) self.WriteLn(('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))) self.WriteLn(('%s: POSTBUILDS := %s' % (QuoteSpaces(self.output), ' '.join(postbuilds)))) if self.is_mac_bundle: self.WriteDependencyOnExtraOutputs(self.output, extra_outputs) self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS') self.WriteLn(('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output))) if (self.type in ('shared_library', 'loadable_module')): self.WriteLn((' DCTB @$(call do_cmd,mac_package_framework,,,%s)' % self.xcode_settings.GetFrameworkVersion())) if postbuilds: self.WriteLn(' DCTB @$(call do_postbuilds)') postbuilds = [] self.WriteLn(' DCTB @true # No-op, used by tests') self.WriteLn((' DCTB @touch -c %s' % QuoteSpaces(self.output))) if postbuilds: assert (not self.is_mac_bundle), ("Postbuilds for bundles should be done on the bundle, not the binary (target '%s')" % self.target) assert ('product_dir' not in spec), 'Postbuilds do not work with custom product_dir' if (self.type == 'executable'): self.WriteLn(('%s: LD_INPUTS := %s' % (QuoteSpaces(self.output_binary), ' '.join(map(QuoteSpaces, link_deps))))) if ((self.toolset == 'host') and (self.flavor == 'android')): self.WriteDoCmd([self.output_binary], link_deps, 'link_host', part_of_all, postbuilds=postbuilds) else: self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all, postbuilds=postbuilds) elif (self.type == 'static_library'): for link_dep in link_deps: assert (' ' not in link_dep), ('Spaces in alink input filenames not supported (%s)' % link_dep) if ((self.flavor not in ('mac', 'openbsd', 'netbsd', 'win')) and (not self.is_standalone_static_library)): self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin', part_of_all, postbuilds=postbuilds) else: self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all, postbuilds=postbuilds) elif (self.type == 'shared_library'): self.WriteLn(('%s: LD_INPUTS := %s' % (QuoteSpaces(self.output_binary), ' '.join(map(QuoteSpaces, link_deps))))) self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all, postbuilds=postbuilds) elif (self.type == 'loadable_module'): for link_dep in link_deps: assert (' ' not in link_dep), ('Spaces in module input filenames not supported (%s)' % link_dep) if ((self.toolset == 'host') and (self.flavor == 'android')): self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host', part_of_all, postbuilds=postbuilds) else: self.WriteDoCmd([self.output_binary], link_deps, 'solink_module', part_of_all, postbuilds=postbuilds) elif (self.type == 'none'): self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all, postbuilds=postbuilds) else: print 'WARNING: no output for', self.type, target if ((self.output and (self.output != self.target)) and (self.type not in self._INSTALLABLE_TARGETS)): self.WriteMakeRule([self.target], [self.output], comment='Add target alias', phony=True) if part_of_all: self.WriteMakeRule(['all'], [self.target], comment='Add target alias to "all" target.', phony=True) if ((self.type in self._INSTALLABLE_TARGETS) or self.is_standalone_static_library): if (self.type == 'shared_library'): file_desc = 'shared library' elif (self.type == 'static_library'): file_desc = 'static library' else: file_desc = 'executable' install_path = self._InstallableTargetInstallPath() installable_deps = [self.output] if ((self.flavor == 'mac') and (not ('product_dir' in spec)) and (self.toolset == 'target')): assert (install_path == self.output), ('%s != %s' % (install_path, self.output)) self.WriteMakeRule([self.target], [install_path], comment='Add target alias', phony=True) if (install_path != self.output): assert (not self.is_mac_bundle) self.WriteDoCmd([install_path], [self.output], 'copy', comment=('Copy this to the %s output path.' % file_desc), part_of_all=part_of_all) installable_deps.append(install_path) if ((self.output != self.alias) and (self.alias != self.target)): self.WriteMakeRule([self.alias], installable_deps, comment=('Short alias for building this %s.' % file_desc), phony=True) if part_of_all: self.WriteMakeRule(['all'], [install_path], comment=('Add %s to "all" target.' % file_desc), phony=True)
'Write a variable definition that is a list of values. E.g. WriteList([\'a\',\'b\'], \'foo\', prefix=\'blah\') writes out foo = blaha blahb but in a pretty-printed style.'
def WriteList(self, value_list, variable=None, prefix='', quoter=QuoteIfNecessary):
values = '' if value_list: value_list = [quoter((prefix + l)) for l in value_list] values = (' \\\n DCTB ' + ' \\\n DCTB '.join(value_list)) self.fp.write(('%s :=%s\n\n' % (variable, values)))
'Write a Makefile rule that uses do_cmd. This makes the outputs dependent on the command line that was run, as well as support the V= make command line flag.'
def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None, postbuilds=False):
suffix = '' if postbuilds: assert (',' not in command) suffix = ',,1' self.WriteMakeRule(outputs, inputs, actions=[('$(call do_cmd,%s%s)' % (command, suffix))], comment=comment, command=command, force=True) outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs] self.WriteLn(('all_deps += %s' % ' '.join(outputs)))
'Write a Makefile rule, with some extra tricks. outputs: a list of outputs for the rule (note: this is not directly supported by make; see comments below) inputs: a list of inputs for the rule actions: a list of shell commands to run for the rule comment: a comment to put in the Makefile above the rule (also useful for making this Python script\'s code self-documenting) order_only: if true, makes the dependency order-only force: if true, include FORCE_DO_CMD as an order-only dep phony: if true, the rule does not actually generate the named output, the output is just a name to run the rule command: (optional) command name to generate unambiguous labels'
def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, order_only=False, force=False, phony=False, command=None):
outputs = map(QuoteSpaces, outputs) inputs = map(QuoteSpaces, inputs) if comment: self.WriteLn(('# ' + comment)) if phony: self.WriteLn(('.PHONY: ' + ' '.join(outputs))) if actions: self.WriteLn(('%s: TOOLSET := $(TOOLSET)' % outputs[0])) force_append = (' FORCE_DO_CMD' if force else '') if order_only: self.WriteLn(('%s: | %s%s' % (' '.join(outputs), ' '.join(inputs), force_append))) elif (len(outputs) == 1): self.WriteLn(('%s: %s%s' % (outputs[0], ' '.join(inputs), force_append))) else: intermediate = ('%s.intermediate' % (command if command else self.target)) self.WriteLn(('%s: %s' % (' '.join(outputs), intermediate))) self.WriteLn((' DCTB %s' % '@:')) self.WriteLn(('%s: %s' % ('.INTERMEDIATE', intermediate))) self.WriteLn(('%s: %s%s' % (intermediate, ' '.join(inputs), force_append))) actions.insert(0, '$(call do_cmd,touch)') if actions: for action in actions: self.WriteLn((' DCTB %s' % action)) self.WriteLn()
'Write a set of LOCAL_XXX definitions for Android NDK. These variable definitions will be used by Android NDK but do nothing for non-Android applications. Arguments: module_name: Android NDK module name, which must be unique among all module names. all_sources: A list of source files (will be filtered by Compilable). link_deps: A list of link dependencies, which must be sorted in the order from dependencies to dependents.'
def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
if (self.type not in ('executable', 'shared_library', 'static_library')): return self.WriteLn('# Variable definitions for Android applications') self.WriteLn('include $(CLEAR_VARS)') self.WriteLn(('LOCAL_MODULE := ' + module_name)) self.WriteLn('LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) $(DEFS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))') self.WriteLn('LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))') self.WriteLn('LOCAL_C_INCLUDES :=') self.WriteLn('LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)') cpp_ext = {'.cc': 0, '.cpp': 0, '.cxx': 0} default_cpp_ext = '.cpp' for filename in all_sources: ext = os.path.splitext(filename)[1] if (ext in cpp_ext): cpp_ext[ext] += 1 if (cpp_ext[ext] > cpp_ext[default_cpp_ext]): default_cpp_ext = ext self.WriteLn(('LOCAL_CPP_EXTENSION := ' + default_cpp_ext)) self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)), 'LOCAL_SRC_FILES') def DepsToModules(deps, prefix, suffix): modules = [] for filepath in deps: filename = os.path.basename(filepath) if (filename.startswith(prefix) and filename.endswith(suffix)): modules.append(filename[len(prefix):(- len(suffix))]) return modules params = {'flavor': 'linux'} default_variables = {} CalculateVariables(default_variables, params) self.WriteList(DepsToModules(link_deps, generator_default_variables['SHARED_LIB_PREFIX'], default_variables['SHARED_LIB_SUFFIX']), 'LOCAL_SHARED_LIBRARIES') self.WriteList(DepsToModules(link_deps, generator_default_variables['STATIC_LIB_PREFIX'], generator_default_variables['STATIC_LIB_SUFFIX']), 'LOCAL_STATIC_LIBRARIES') if (self.type == 'executable'): self.WriteLn('include $(BUILD_EXECUTABLE)') elif (self.type == 'shared_library'): self.WriteLn('include $(BUILD_SHARED_LIBRARY)') elif (self.type == 'static_library'): self.WriteLn('include $(BUILD_STATIC_LIBRARY)') self.WriteLn()
'Convert a path to its output directory form.'
def Objectify(self, path):
if ('$(' in path): path = path.replace('$(obj)/', ('$(obj).%s/$(TARGET)/' % self.toolset)) if (not ('$(obj)' in path)): path = ('$(obj).%s/$(TARGET)/%s' % (self.toolset, path)) return path
'Convert a prefix header path to its output directory form.'
def Pchify(self, path, lang):
path = self.Absolutify(path) if ('$(' in path): path = path.replace('$(obj)/', ('$(obj).%s/$(TARGET)/pch-%s' % (self.toolset, lang))) return path return ('$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path))
'Convert a subdirectory-relative path into a base-relative path. Skips over paths that contain variables.'
def Absolutify(self, path):
if ('$(' in path): return path.rstrip('/') return os.path.normpath(os.path.join(self.path, path))
'Returns the location of the final output for an installable target.'
def _InstallableTargetInstallPath(self):
return ('$(builddir)/' + self.alias)
'Initializes the folder. Args: path: Full path to the folder. name: Name of the folder. entries: List of folder entries to nest inside this folder. May contain Folder or Project objects. May be None, if the folder is empty. guid: GUID to use for folder, if not None. items: List of solution items to include in the folder project. May be None, if the folder does not directly contain items.'
def __init__(self, path, name=None, entries=None, guid=None, items=None):
if name: self.name = name else: self.name = os.path.basename(path) self.path = path self.guid = guid self.entries = sorted(list((entries or []))) self.items = list((items or [])) self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
'Initializes the project. Args: path: Absolute path to the project file. name: Name of project. If None, the name will be the same as the base name of the project file. dependencies: List of other Project objects this project is dependent upon, if not None. guid: GUID to use for project, if not None. spec: Dictionary specifying how to build this project. build_file: Filename of the .gyp file that the vcproj file comes from. config_platform_overrides: optional dict of configuration platforms to used in place of the default for this target. fixpath_prefix: the path used to adjust the behavior of _fixpath'
def __init__(self, path, name=None, dependencies=None, guid=None, spec=None, build_file=None, config_platform_overrides=None, fixpath_prefix=None):
self.path = path self.guid = guid self.spec = spec self.build_file = build_file self.name = (name or os.path.splitext(os.path.basename(path))[0]) self.dependencies = list((dependencies or [])) self.entry_type_guid = ENTRY_TYPE_GUIDS['project'] if config_platform_overrides: self.config_platform_overrides = config_platform_overrides else: self.config_platform_overrides = {} self.fixpath_prefix = fixpath_prefix self.msbuild_toolset = None
'Initializes the solution. Args: path: Path to solution file. version: Format version to emit. entries: List of entries in solution. May contain Folder or Project objects. May be None, if the folder is empty. variants: List of build variant strings. If none, a default list will be used. websiteProperties: Flag to decide if the website properties section is generated.'
def __init__(self, path, version, entries=None, variants=None, websiteProperties=True):
self.path = path self.websiteProperties = websiteProperties self.version = version self.entries = list((entries or [])) if variants: self.variants = variants[:] else: self.variants = ['Debug|Win32', 'Release|Win32'] self.Write()
'Writes the solution file to disk. Raises: IndexError: An entry appears multiple times.'
def Write(self, writer=gyp.common.WriteOnDiff):
all_entries = set() entries_to_check = self.entries[:] while entries_to_check: e = entries_to_check.pop(0) if (e in all_entries): continue all_entries.add(e) if isinstance(e, MSVSFolder): entries_to_check += e.entries all_entries = sorted(all_entries) f = writer(self.path) f.write(('Microsoft Visual Studio Solution File, Format Version %s\r\n' % self.version.SolutionVersion())) f.write(('# %s\r\n' % self.version.Description())) sln_root = os.path.split(self.path)[0] for e in all_entries: relative_path = gyp.common.RelativePath(e.path, sln_root) folder_name = (relative_path.replace('/', '\\') or '.') f.write(('Project("%s") = "%s", "%s", "%s"\r\n' % (e.entry_type_guid, e.name, folder_name, e.get_guid()))) if self.websiteProperties: f.write(' DCTB ProjectSection(WebsiteProperties) = preProject\r\n DCTB DCTB Debug.AspNetCompiler.Debug = "True"\r\n DCTB DCTB Release.AspNetCompiler.Debug = "False"\r\n DCTB EndProjectSection\r\n') if isinstance(e, MSVSFolder): if e.items: f.write(' DCTB ProjectSection(SolutionItems) = preProject\r\n') for i in e.items: f.write((' DCTB DCTB %s = %s\r\n' % (i, i))) f.write(' DCTB EndProjectSection\r\n') if isinstance(e, MSVSProject): if e.dependencies: f.write(' DCTB ProjectSection(ProjectDependencies) = postProject\r\n') for d in e.dependencies: f.write((' DCTB DCTB %s = %s\r\n' % (d.get_guid(), d.get_guid()))) f.write(' DCTB EndProjectSection\r\n') f.write('EndProject\r\n') f.write('Global\r\n') f.write(' DCTB GlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n') for v in self.variants: f.write((' DCTB DCTB %s = %s\r\n' % (v, v))) f.write(' DCTB EndGlobalSection\r\n') config_guids = [] config_guids_overrides = {} for e in all_entries: if isinstance(e, MSVSProject): config_guids.append(e.get_guid()) config_guids_overrides[e.get_guid()] = e.config_platform_overrides config_guids.sort() f.write(' DCTB GlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n') for g in config_guids: for v in self.variants: nv = config_guids_overrides[g].get(v, v) f.write((' DCTB DCTB %s.%s.ActiveCfg = %s\r\n' % (g, v, nv))) f.write((' DCTB DCTB %s.%s.Build.0 = %s\r\n' % (g, v, nv))) f.write(' DCTB EndGlobalSection\r\n') f.write(' DCTB GlobalSection(SolutionProperties) = preSolution\r\n') f.write(' DCTB DCTB HideSolutionNode = FALSE\r\n') f.write(' DCTB EndGlobalSection\r\n') if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]): f.write(' DCTB GlobalSection(NestedProjects) = preSolution\r\n') for e in all_entries: if (not isinstance(e, MSVSFolder)): continue for subentry in e.entries: f.write((' DCTB DCTB %s = %s\r\n' % (subentry.get_guid(), e.get_guid()))) f.write(' DCTB EndGlobalSection\r\n') f.write('EndGlobal\r\n') f.close()
'Initialize per-spider redis queue. Parameters: server -- redis connection spider -- spider instance key -- key for this queue (e.g. "%(spider)s:queue")'
def __init__(self, server, spider, key):
self.server = server self.spider = spider self.key = (key % {'spider': spider.name})
'Encode a request object'
def _encode_request(self, request):
return pickle.dumps(request_to_dict(request, self.spider), protocol=(-1))
'Decode an request previously encoded'
def _decode_request(self, encoded_request):
return request_from_dict(pickle.loads(encoded_request), self.spider)
'Return the length of the queue'
def __len__(self):
raise NotImplementedError
'Push a request'
def push(self, request):
raise NotImplementedError
'Pop a request'
def pop(self):
raise NotImplementedError
'Clear queue/stack'
def clear(self):
self.server.delete(self.key)
'Return the length of the queue'
def __len__(self):
return self.server.llen(self.key)
'Push a request'
def push(self, request):
self.server.lpush(self.key, self._encode_request(request))
'Pop a request'
def pop(self):
data = self.server.rpop(self.key) if data: return self._decode_request(data)
'Return the length of the queue'
def __len__(self):
return self.server.zcard(self.key)
'Push a request'
def push(self, request):
data = self._encode_request(request) pairs = {data: (- request.priority)} self.server.zadd(self.key, **pairs)
'Pop a request'
def pop(self):
pipe = self.server.pipeline() pipe.multi() pipe.zrange(self.key, 0, 0).zremrangebyrank(self.key, 0, 0) (results, count) = pipe.execute() if results: return self._decode_request(results[0])
'Return the length of the stack'
def __len__(self):
return self.server.llen(self.key)
'Push a request'
def push(self, request):
self.server.lpush(self.key, self._encode_request(request))
'Pop a request'
def pop(self):
data = self.server.lpop(self.key) if data: return self._decode_request(data)
'Initialize scheduler. Parameters server : Redis instance persist : bool queue_key : str queue_cls : queue class dupefilter_key : str'
def __init__(self, server, persist, queue_key, queue_cls, dupefilter_key):
self.server = server self.persist = persist self.queue_key = queue_key self.queue_cls = queue_cls self.dupefilter_key = dupefilter_key
'execute this function when open one spider'
def open(self, spider):
self.spider = spider self.queue = self.queue_cls(self.server, spider, self.queue_key) self.df = RFPDupeFilter(self.server, (self.dupefilter_key % {'spider': spider.name})) if len(self.queue): spider.log(('Resuming crawl (%d requests scheduled)' % len(self.queue)))
'Initialize duplication filter Parameters server : Redis instance key : str Where to store fingerprints'
def __init__(self, server, key):
self.server = server self.key = key
'use sismember judge whether fp is duplicate.'
def request_seen(self, request):
fp = request_fingerprint(request) if self.server.sismember(self.key, fp): return True self.server.sadd(self.key, fp) return False
'Delete data on close. Called by scrapy\'s scheduler'
def close(self, reason):
self.clear()
'Clears fingerprints data'
def clear(self):
self.server.delete(self.key)
'the stat is the file key dir, the last_modified is the file that saved to the file key dir.'
def stat_file(self, key, info):
checksum = self.fs.get(key).md5 last_modified = self.fs.get(key).upload_date return {'last_modified': last_modified, 'checksum': checksum}
'custom process_item func,so it will manage the Request result.'
def process_item(self, item, spider):
info = self.spiderinfo[spider] requests = arg_to_iter(self.get_media_requests(item, info)) dlist = [self._process_request(r, info) for r in requests] dfd = DeferredList(dlist, consumeErrors=1) dfd.addCallback(self.item_completed, item, info) return dfd.addCallback(self.another_process_item, item, info)
'custom process_item func,so it will manage the Request result.'
def another_process_item(self, result, item, info):
assert isinstance(result, (Item, Request)), ("WoaiduBookFile pipeline' item_completed must return Item or Request, got %s" % type(result)) if isinstance(result, Item): return result elif isinstance(result, Request): dlist = [self._process_request(r, info) for r in arg_to_iter(result)] dfd = DeferredList(dlist, consumeErrors=1) dfd.addCallback(self.item_completed, item, info) return dfd.addCallback(self.another_process_item, item, info) else: raise NofilesDrop
'Only download once per book,so it pick out one from all of the download urls.'
def get_media_requests(self, item, info):
if item.get('book_download'): downloadfile_urls = [i['url'] for i in item.get('book_download') if i['url']] downloadfile_urls = list(set(itertools.chain(*downloadfile_urls))) first_download_file = list_first_item(downloadfile_urls) self.item_download[item['original_url']] = downloadfile_urls[1:] if first_download_file: return Request(first_download_file)
'Handler for success downloads.'
def media_downloaded(self, response, request, info):
referer = request.headers.get('Referer') if (response.status != 200): log.msg(format='%(medianame)s (code: %(status)s): Error downloading %(medianame)s from %(request)s referred in <%(referer)s>', level=log.WARNING, spider=info.spider, medianame=self.MEDIA_NAME, status=response.status, request=request, referer=referer) raise BookFileException(request.url, ('%s: download-error' % (request.url,))) if (not response.body): log.msg(format='%(medianame)s (empty-content): Empty %(medianame)s from %(request)s referred in <%(referer)s>: no-content', level=log.WARNING, spider=info.spider, medianame=self.MEDIA_NAME, request=request, referer=referer) raise BookFileException(request.url, ('%s: empty-content' % (request.url,))) status = ('cached' if ('cached' in response.flags) else 'downloaded') log.msg(format='%(medianame)s (%(status)s): Downloaded %(medianame)s from %(request)s referred in <%(referer)s>', level=log.DEBUG, spider=info.spider, medianame=self.MEDIA_NAME, status=status, request=request, referer=referer) if self.is_valid_content_type(response): raise BookFileException(request.url, ('%s: invalid-content_type' % (request.url,))) filename = self.get_file_name(request, response) if (not filename): raise BookFileException(request.url, ('%s: noaccess-filename' % (request.url,))) self.inc_stats(info.spider, status) try: key = self.file_key(request.url) (book_file_id, checksum) = self.store.persist_file(key, response.body, info, filename) except BookFileException as exc: whyfmt = '%(medianame)s (error): Error processing %(medianame)s from %(request)s referred in <%(referer)s>: %(errormsg)s' log.msg(format=whyfmt, level=log.WARNING, spider=info.spider, medianame=self.MEDIA_NAME, request=request, referer=referer, errormsg=str(exc)) raise return {'url': request.url, 'book_file_id': book_file_id, 'checksum': checksum}
'return the SHA1 hash of the file url'
def file_key(self, url):
file_guid = hashlib.sha1(url).hexdigest() return ('%s_%s' % (urlparse(url).netloc, file_guid))
'judge whether is it a valid response by the Content-Type.'
def is_valid_content_type(self, response):
content_type = response.headers.get('Content-Type', '') return (content_type not in self.BOOK_FILE_CONTENT_TYPE)
'custom process_item func,so it will manage the Request result.'
def process_item(self, item, spider):
info = self.spiderinfo[spider] requests = arg_to_iter(self.get_media_requests(item, info)) dlist = [self._process_request(r, info) for r in requests] dfd = DeferredList(dlist, consumeErrors=1) dfd.addCallback(self.item_completed, item, info) return dfd.addCallback(self.another_process_item, item, info)
'custom process_item func,so it will manage the Request result.'
def another_process_item(self, result, item, info):
assert isinstance(result, (Item, Request)), ("WoaiduBookFile pipeline' item_completed must return Item or Request, got %s" % type(result)) if isinstance(result, Item): return result elif isinstance(result, Request): dlist = [self._process_request(r, info) for r in arg_to_iter(result)] dfd = DeferredList(dlist, consumeErrors=1) dfd.addCallback(self.item_completed, item, info) return dfd.addCallback(self.another_process_item, item, info) else: raise NofilesDrop
'Only download once per book,so it pick out one from all of the download urls.'
def get_media_requests(self, item, info):
if item.get('book_download'): downloadfile_urls = [i['url'] for i in item.get('book_download') if i['url']] downloadfile_urls = list(set(itertools.chain(*downloadfile_urls))) first_download_file = list_first_item(downloadfile_urls) self.item_download[item['original_url']] = downloadfile_urls[1:] if first_download_file: return Request(first_download_file)
'judge whether is it a valid response by the Content-Type.'
def is_valid_content_type(self, response):
content_type = response.headers.get('Content-Type', '') return (content_type not in self.BOOK_FILE_CONTENT_TYPE)
'The only async framework that PyMongo fully supports is Gevent. Currently there is no great way to use PyMongo in conjunction with Tornado or Twisted. PyMongo provides built-in connection pooling, so some of the benefits of those frameworks can be achieved just by writing multi-threaded code that shares a MongoClient.'
def __init__(self):
self.style = color.color_style() try: client = MongoClient(self.MONGODB_SERVER, self.MONGODB_PORT) self.db = client[self.MONGODB_DB] except Exception as e: print self.style.ERROR(('ERROR(SingleMongodbPipeline): %s' % (str(e),))) traceback.print_exc()
'The only async framework that PyMongo fully supports is Gevent. Currently there is no great way to use PyMongo in conjunction with Tornado or Twisted. PyMongo provides built-in connection pooling, so some of the benefits of those frameworks can be achieved just by writing multi-threaded code that shares a MongoClient.'
def __init__(self):
self.style = color.color_style() try: client = MongoClient(self.MONGODB_SERVER, self.MONGODB_PORT) self.db = client[self.MONGODB_DB] except Exception as e: print self.style.ERROR(('ERROR(ShardMongodbPipeline): %s' % (str(e),))) traceback.print_exc()
'the stat is the file key dir, the last_modified is the file that saved to the file key dir.'
def stat_file(self, key, info):
keydir = os.path.join(self.basedir, *key.split('/')) filenames = os.listdir(keydir) if (len(filenames) != 1): shutil.rmtree(keydir, True) return {} else: filename = list_first_item(filenames) absolute_path = self._get_filesystem_path(key) try: last_modified = os.path.getmtime(absolute_path) except: return {} with open(os.path.join(absolute_path, filename), 'rb') as file_content: checksum = md5sum(file_content) return {'last_modified': last_modified, 'checksum': checksum}
'Handler for success downloads.'
def media_downloaded(self, response, request, info):
referer = request.headers.get('Referer') if (response.status != 200): log.msg(format='%(medianame)s (code: %(status)s): Error downloading %(medianame)s from %(request)s referred in <%(referer)s>', level=log.WARNING, spider=info.spider, medianame=self.MEDIA_NAME, status=response.status, request=request, referer=referer) raise FileException(request.url, ('%s: download-error' % (request.url,))) if (not response.body): log.msg(format='%(medianame)s (empty-content): Empty %(medianame)s from %(request)s referred in <%(referer)s>: no-content', level=log.WARNING, spider=info.spider, medianame=self.MEDIA_NAME, request=request, referer=referer) raise FileException(request.url, ('%s: empty-content' % (request.url,))) status = ('cached' if ('cached' in response.flags) else 'downloaded') log.msg(format='%(medianame)s (%(status)s): Downloaded %(medianame)s from %(request)s referred in <%(referer)s>', level=log.DEBUG, spider=info.spider, medianame=self.MEDIA_NAME, status=status, request=request, referer=referer) if self.is_valid_content_type(response): raise FileException(request.url, ('%s: invalid-content_type' % (request.url,))) filename = self.get_file_name(request, response) if (not filename): raise FileException(request.url, ('%s: noaccess-filename' % (request.url,))) self.inc_stats(info.spider, status) try: key = self.file_key(request.url) checksum = self.store.persist_file(key, response.body, info, filename) except FileException as exc: whyfmt = '%(medianame)s (error): Error processing %(medianame)s from %(request)s referred in <%(referer)s>: %(errormsg)s' log.msg(format=whyfmt, level=log.WARNING, spider=info.spider, medianame=self.MEDIA_NAME, request=request, referer=referer, errormsg=str(exc)) raise return {'url': request.url, 'path': key, 'checksum': checksum}
'judge whether is it a valid response by the Content-Type.'
def is_valid_content_type(self, response):
return True
'return the SHA1 hash of the file url'
def file_key(self, url):
file_guid = hashlib.sha1(url).hexdigest() return ('%s/%s' % (urlparse(url).netloc, file_guid))
'Get the raw file name that the sever transfer to. It examine two places:Content-Disposition,url.'
def get_file_name(self, request, response):
content_dispo = response.headers.get('Content-Disposition', '') filename = '' if content_dispo: for i in content_dispo.split(';'): if ('filename' in i): filename = i.split('filename=')[1].strip(' \n\'"') break if filename: if (urlparse(request.url).netloc in self.ATTACHMENT_FILENAME_UTF8_DOMAIN): filename = filename.decode('utf-8') else: filename = filename.decode('gbk') else: guessname = request.url.split('/')[(-1)] if (os.path.splitext(guessname)[1].lower() in self.FILE_EXTENTION): if (urlparse(request.url).netloc in self.URL_GBK_DOMAIN): filename = urllib.unquote(guessname).decode('gbk').encode('utf-8') else: filename = urllib.unquote(guessname) return filename
'the scrapy documention said that: If it returns a Request object, the returned request will be rescheduled (in the Scheduler) to be downloaded in the future. The callback of the original request will always be called. If the new request has a callback it will be called with the response downloaded, and the output of that callback will then be passed to the original callback. If the new request doesn’t have a callback, the response downloaded will be just passed to the original request callback. but actually is that if it returns a Request object,then the original request will be droped,so you must make sure that the new request object\'s callback is the original callback.'
def process_request(self, request, spider):
gcd = self.cache[spider] if gcd: if (urlparse(request.url).netloc in gcd): request = request.replace(url=(self.google_cache + request.url)) request.meta['google_cache'] = True return request
'Parse the response payload and return the result. Returns a tuple that contains the result data and the cursors (or None if not present).'
def parse(self, method, payload):
raise NotImplementedError
'Parse the error message from payload. If unable to parse the message, throw an exception and default error message will be used.'
def parse_error(self, method, payload):
raise NotImplementedError
'Parse a JSON object into a model instance.'
@classmethod def parse(cls, api, json):
raise NotImplementedError
'Parse a list of JSON objects into a result set of model instances.'
@classmethod def parse_list(cls, api, json_list):
results = ResultSet() for obj in json_list: results.append(cls.parse(api, obj)) return results
'Apply authentication headers to request'
def apply_auth(self, url, method, headers, parameters):
raise NotImplementedError
'Return the username of the authenticated user'
def get_username(self):
raise NotImplementedError
'Get the authorization URL to redirect the user'
def get_authorization_url(self, signin_with_twitter=False):
try: self.request_token = self._get_request_token() if signin_with_twitter: url = self._get_oauth_url('authenticate') else: url = self._get_oauth_url('authorize') request = oauth.OAuthRequest.from_token_and_callback(token=self.request_token, http_url=url, callback=self.callback) return request.to_url() except Exception as e: raise WeibopError(e)
'After user has authorized the request token, get access token with user supplied verifier.'
def get_access_token(self, verifier=None):
try: url = self._get_oauth_url('access_token') request = oauth.OAuthRequest.from_consumer_and_token(self._consumer, token=self.request_token, http_url=url, verifier=str(verifier)) request.sign_request(self._sigmethod, self._consumer, self.request_token) resp = urlopen(Request(url, headers=request.to_header())) self.access_token = oauth.OAuthToken.from_string(resp.read()) print ('Access token key: ' + str(self.access_token.key)) print ('Access token secret: ' + str(self.access_token.secret)) return self.access_token except Exception as e: raise WeibopError(e)
'Returns a token from something like: oauth_token_secret=xxx&oauth_token=xxx'
def from_string(s):
params = cgi.parse_qs(s, keep_blank_values=False) key = params['oauth_token'][0] secret = params['oauth_token_secret'][0] token = OAuthToken(key, secret) try: token.callback_confirmed = params['oauth_callback_confirmed'][0] except KeyError: pass return token
'Get any non-OAuth parameters.'
def get_nonoauth_parameters(self):
parameters = {} for (k, v) in self.parameters.iteritems(): if (k.find('oauth_') < 0): parameters[k] = v return parameters
'Serialize as a header for an HTTPAuth request.'
def to_header(self, realm=''):
auth_header = ('OAuth realm="%s"' % realm) if self.parameters: for (k, v) in self.parameters.iteritems(): if (k[:6] == 'oauth_'): auth_header += (', %s="%s"' % (k, escape(str(v)))) return {'Authorization': auth_header}
'Serialize as post data for a POST request.'
def to_postdata(self):
return '&'.join([('%s=%s' % (escape(str(k)), escape(str(v)))) for (k, v) in self.parameters.iteritems()])
'Serialize as a URL for a GET request.'
def to_url(self):
return ('%s?%s' % (self.get_normalized_http_url(), self.to_postdata()))
'Return a string that contains the parameters that must be signed.'
def get_normalized_parameters(self):
params = self.parameters try: del params['oauth_signature'] except: pass key_values = [(escape(_utf8_str(k)), escape(_utf8_str(v))) for (k, v) in params.items()] key_values.sort() return '&'.join([('%s=%s' % (k, v)) for (k, v) in key_values])
'Uppercases the http method.'
def get_normalized_http_method(self):
return self.http_method.upper()
'Parses the URL and rebuilds it to be scheme://host/path.'
def get_normalized_http_url(self):
parts = urlparse.urlparse(self.http_url) (scheme, netloc, path) = parts[:3] if ((scheme == 'http') and (netloc[(-3):] == ':80')): netloc = netloc[:(-3)] elif ((scheme == 'https') and (netloc[(-4):] == ':443')): netloc = netloc[:(-4)] return ('%s://%s%s' % (scheme, netloc, path))
'Set the signature parameter to the result of build_signature.'
def sign_request(self, signature_method, consumer, token):
self.set_parameter('oauth_signature_method', signature_method.get_name()) self.set_parameter('oauth_signature', self.build_signature(signature_method, consumer, token))
'Calls the build signature method within the signature method.'
def build_signature(self, signature_method, consumer, token):
return signature_method.build_signature(self, consumer, token)
'Combines multiple parameter sources.'
def from_request(http_method, http_url, headers=None, parameters=None, query_string=None):
if (parameters is None): parameters = {} if (headers and ('Authorization' in headers)): auth_header = headers['Authorization'] if (auth_header[:6] == 'OAuth '): auth_header = auth_header[6:] try: header_params = OAuthRequest._split_header(auth_header) parameters.update(header_params) except: raise OAuthError('Unable to parse OAuth parameters from Authorization header.') if query_string: query_params = OAuthRequest._split_url_string(query_string) parameters.update(query_params) param_str = urlparse.urlparse(http_url)[4] url_params = OAuthRequest._split_url_string(param_str) parameters.update(url_params) if parameters: return OAuthRequest(http_method, http_url, parameters) return None
'Turn Authorization: header into parameters.'
def _split_header(header):
params = {} parts = header.split(',') for param in parts: if (param.find('realm') > (-1)): continue param = param.strip() param_parts = param.split('=', 1) params[param_parts[0]] = urllib.unquote(param_parts[1].strip('"')) return params
'Turn URL string into parameters.'
def _split_url_string(param_str):
parameters = cgi.parse_qs(param_str, keep_blank_values=False) for (k, v) in parameters.iteritems(): parameters[k] = urllib.unquote(v[0]) return parameters
'Processes a request_token request and returns the request token on success.'
def fetch_request_token(self, oauth_request):
try: token = self._get_token(oauth_request, 'request') except OAuthError: version = self._get_version(oauth_request) consumer = self._get_consumer(oauth_request) try: callback = self.get_callback(oauth_request) except OAuthError: callback = None self._check_signature(oauth_request, consumer, None) token = self.data_store.fetch_request_token(consumer, callback) return token
'Processes an access_token request and returns the access token on success.'
def fetch_access_token(self, oauth_request):
version = self._get_version(oauth_request) consumer = self._get_consumer(oauth_request) try: verifier = self._get_verifier(oauth_request) except OAuthError: verifier = None token = self._get_token(oauth_request, 'request') self._check_signature(oauth_request, consumer, token) new_token = self.data_store.fetch_access_token(consumer, token, verifier) return new_token
'Verifies an api call and checks all the parameters.'
def verify_request(self, oauth_request):
version = self._get_version(oauth_request) consumer = self._get_consumer(oauth_request) token = self._get_token(oauth_request, 'access') self._check_signature(oauth_request, consumer, token) parameters = oauth_request.get_nonoauth_parameters() return (consumer, token, parameters)
'Authorize a request token.'
def authorize_token(self, token, user):
return self.data_store.authorize_request_token(token, user)