_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q4300
ModuleGraph.findModuleOfName
train
def findModuleOfName(self, dotted_name, level, filename, extrapath=None): """Given a fully qualified name, find what module contains it.""" if dotted_name.endswith('.*'): return dotted_name[:-2] name = dotted_name # extrapath is None only in a couple of test cases; in real life it's # always present if level and level > 1 and extrapath: # strip trailing path bits for each extra level to account for # relative imports # from . import X has level == 1 and nothing is stripped (the level > 1 check accounts for this case) # from .. import X has level == 2 and one trailing path component must go # from ... import X has level == 3 and two trailing path components must go extrapath = extrapath.split(os.path.sep) level -= 1 extrapath = extrapath[0:-level] extrapath = os.path.sep.join(extrapath) while name: candidate = self.isModule(name, extrapath) if candidate: return candidate candidate = self.isPackage(name, extrapath) if candidate: return candidate name = name[:name.rfind('.')] self.warn(dotted_name, '%s: could not find %s', filename, dotted_name) return dotted_name
python
{ "resource": "" }
q4301
ModuleGraph.isModule
train
def isModule(self, dotted_name, extrapath=None): """Is ``dotted_name`` the name of a module?""" try: return self._module_cache[(dotted_name, extrapath)] except KeyError: pass if dotted_name in sys.modules or dotted_name in self.builtin_modules: return dotted_name filename = dotted_name.replace('.', os.path.sep) if extrapath: for ext in self._exts: candidate = os.path.join(extrapath, filename) + ext if os.path.exists(candidate): modname = self.filenameToModname(candidate) self._module_cache[(dotted_name, extrapath)] = modname return modname try: return self._module_cache[(dotted_name, None)] except KeyError: pass for dir in self.path: if os.path.isfile(dir): if dir.endswith('.egg-info'): # distribute creates a setuptools-blah-blah.egg-info # that ends up in sys.path continue try: zf = zipfile.ZipFile(dir) except zipfile.BadZipfile: self.warn(dir, "%s: not a directory or zip file", dir) continue names = zf.namelist() for ext in self._exts: candidate = filename + ext if candidate in names: modname = filename.replace(os.path.sep, '.') self._module_cache[(dotted_name, extrapath)] = modname self._module_cache[(dotted_name, None)] = modname return modname else: for ext in self._exts: candidate = os.path.join(dir, filename) + ext if os.path.exists(candidate): modname = self.filenameToModname(candidate) self._module_cache[(dotted_name, extrapath)] = modname self._module_cache[(dotted_name, None)] = modname return modname return None
python
{ "resource": "" }
q4302
ModuleGraph.isPackage
train
def isPackage(self, dotted_name, extrapath=None): """Is ``dotted_name`` the name of a package?""" candidate = self.isModule(dotted_name + '.__init__', extrapath) if candidate: candidate = candidate[:-len(".__init__")] return candidate
python
{ "resource": "" }
q4303
ModuleGraph.packageOf
train
def packageOf(self, dotted_name, packagelevel=None): """Determine the package that contains ``dotted_name``.""" if '.' not in dotted_name: return dotted_name if not self.isPackage(dotted_name): dotted_name = '.'.join(dotted_name.split('.')[:-1]) if packagelevel: dotted_name = '.'.join(dotted_name.split('.')[:packagelevel]) return dotted_name
python
{ "resource": "" }
q4304
ModuleGraph.listModules
train
def listModules(self): """Return an alphabetical list of all modules.""" modules = list(self.modules.items()) modules.sort() return [module for name, module in modules]
python
{ "resource": "" }
q4305
ModuleGraph.packageGraph
train
def packageGraph(self, packagelevel=None): """Convert a module graph to a package graph.""" packages = {} for module in self.listModules(): package_name = self.packageOf(module.modname, packagelevel) if package_name not in packages: dirname = os.path.dirname(module.filename) packages[package_name] = Module(package_name, dirname) package = packages[package_name] for name in module.imports: package_name = self.packageOf(name, packagelevel) if package_name != package.modname: # no loops package.imports.add(package_name) graph = ModuleGraph() graph.modules = packages return graph
python
{ "resource": "" }
q4306
ModuleGraph.collapseCycles
train
def collapseCycles(self): """Create a graph with cycles collapsed. Collapse modules participating in a cycle to a single node. """ # This algorithm determines Strongly Connected Components. Look it up. # It is adapted to suit our data structures. # Phase 0: prepare the graph imports = {} for u in self.modules: imports[u] = set() for v in self.modules[u].imports: if v in self.modules: # skip external dependencies imports[u].add(v) # Phase 1: order the vertices visited = {} for u in self.modules: visited[u] = False order = [] def visit1(u): visited[u] = True for v in imports[u]: if not visited[v]: visit1(v) order.append(u) for u in self.modules: if not visited[u]: visit1(u) order.reverse() # Phase 2: compute the inverse graph revimports = {} for u in self.modules: revimports[u] = set() for u in self.modules: for v in imports[u]: revimports[v].add(u) # Phase 3: determine the strongly connected components components = {} component_of = {} for u in self.modules: visited[u] = False def visit2(u): visited[u] = True component.append(u) for v in revimports[u]: if not visited[v]: visit2(v) for u in order: if not visited[u]: component = [] visit2(u) component.sort() node = ModuleCycle(component) components[node.modname] = node for modname in component: component_of[modname] = node # Phase 4: construct the condensed graph for node in components.values(): for modname in node.modnames: for impname in imports[modname]: other = component_of[impname].modname if other != node.modname: node.imports.add(other) graph = ModuleGraph() graph.modules = components return graph
python
{ "resource": "" }
q4307
ModuleGraph.printImportedNames
train
def printImportedNames(self): """Produce a report of imported names.""" for module in self.listModules(): print("%s:" % module.modname) print(" %s" % "\n ".join(imp.name for imp in module.imported_names))
python
{ "resource": "" }
q4308
ModuleGraph.printImports
train
def printImports(self): """Produce a report of dependencies.""" for module in self.listModules(): print("%s:" % module.label) if self.external_dependencies: imports = list(module.imports) else: imports = [modname for modname in module.imports if modname in self.modules] imports.sort() print(" %s" % "\n ".join(imports))
python
{ "resource": "" }
q4309
ModuleGraph.printUnusedImports
train
def printUnusedImports(self): """Produce a report of unused imports.""" for module in self.listModules(): names = [(unused.lineno, unused.name) for unused in module.unused_names] names.sort() for lineno, name in names: if not self.all_unused: line = linecache.getline(module.filename, lineno) if '#' in line: # assume there's a comment explaining why it's not used continue print("%s:%s: %s not used" % (module.filename, lineno, name))
python
{ "resource": "" }
q4310
ModuleGraph.printDot
train
def printDot(self): """Produce a dependency graph in dot format.""" print("digraph ModuleDependencies {") print(" node[shape=box];") allNames = set() nameDict = {} for n, module in enumerate(self.listModules()): module._dot_name = "mod%d" % n nameDict[module.modname] = module._dot_name print(" %s[label=\"%s\"];" % (module._dot_name, quote(module.label))) allNames |= module.imports print(" node[style=dotted];") if self.external_dependencies: myNames = set(self.modules) extNames = list(allNames - myNames) extNames.sort() for n, name in enumerate(extNames): nameDict[name] = id = "extmod%d" % n print(" %s[label=\"%s\"];" % (id, name)) for modname, module in sorted(self.modules.items()): for other in sorted(module.imports): if other in nameDict: print(" %s -> %s;" % (nameDict[module.modname], nameDict[other])) print("}")
python
{ "resource": "" }
q4311
quote
train
def quote(text): """encode html entities""" text = unicode(text) return text.translate({ ord('&'): u'&amp;', ord('<'): u'&lt;', ord('"'): u'&quot;', ord('>'): u'&gt;', ord('@'): u'&#64;', 0xa0: u'&nbsp;'})
python
{ "resource": "" }
q4312
_create_tags
train
def _create_tags(ctx): "create all classes and put them in ctx" for (tag, info) in _TAGS.items(): class_name = tag.title() quote_, compact, self_closing, docs = info def __init__(self, *childs, **attrs): TagBase.__init__(self, childs, attrs) cls = type(class_name, (TagBase,), { "__doc__": docs, "__init__": __init__ }) cls.QUOTE = quote_ cls.COMPACT = compact cls.SELF_CLOSING = self_closing ctx[class_name] = cls
python
{ "resource": "" }
q4313
tag_from_element
train
def tag_from_element(el): """ Convert an Element into a Tag. ``el`` is an instance of ``Element``. Returns an instance of the corresponding subclass of ``TagBase``. """ tag = el.tag namespace = None if tag.startswith('{'): # Strip namespace of the form "{namespace}tag" namespace,tag = tag[1:].split('}') try: cls = globals()[tag.title()] if not issubclass(cls, TagBase): raise KeyError() except KeyError: raise ValueError("TagBase doesn't have a subclass for '%s'." % tag) children = [tag_from_element(c) for c in el] tag = cls(*children, **el.attrib) tag.text = el.text tag.tail = el.tail if namespace: tag.attrib['xmlns'] = namespace return tag
python
{ "resource": "" }
q4314
html_to_tags
train
def html_to_tags(code): """ Convert HTML code to tags. ``code`` is a string containing HTML code. The return value is a list of corresponding instances of ``TagBase``. """ code = ('<div>' + code + '</div>').encode('utf8') el = ET.fromstring(code) return [tag_from_element(c) for c in el]
python
{ "resource": "" }
q4315
HTMLTranslator._init_math_handler
train
def _init_math_handler(self): """ Parse math configuration and set up math handler. """ fields = self.settings.math_output.split(None, 1) name = fields[0].lower() option = fields[1] if len(fields) > 1 else None if name == 'html': option = self.settings.math_css or option self.math_handler = HTMLMathHandler(css_filename=option) elif name == 'mathml': if option: raise ValueError(('Math handler "%s" does not support ' + 'option "%s".') % (name, option)) self.math_handler = MathMLMathHandler() elif name == 'mathjax': # The MathJax handler can be configured via different ways: # # - By passing an additional JS url to "--math-output" # (to stay backwards-compatible with docutils) # # - By using "--mathjax-opts" (to stay backwards compatible # with the previous html5css3 mathjax postprocessor) # # - By using "--mathjax-url" and "--mathjax-config" (the # preferred way) js_url = option config = None if self.settings.mathjax_opts: parts = self.settings.mathjax_opts.split(',') options = dict(part.split('=', 1) for part in parts) js_url = options.get('url', js_url) config = options.get('config', config) js_url = self.settings.mathjax_url or js_url config = self.settings.mathjax_config or config self.math_handler = MathJaxMathHandler(js_url=js_url, config_filename=config) elif name == 'latex': if option: raise ValueError(('Math handler "%s" does not support ' + 'option "%s".') % (name, option)) self.math_handler = LaTeXMathHandler() else: raise ValueError('Unknown math handler "%s".' % name)
python
{ "resource": "" }
q4316
HTMLTranslator.append_default_stylesheets
train
def append_default_stylesheets(self): """ Appends the default styles defined on the translator settings. """ for style in utils.get_stylesheet_list(self.settings): self.css(style)
python
{ "resource": "" }
q4317
repr_args
train
def repr_args(args): """formats a list of function arguments prettily but as working code (kwargs are tuples (argname, argvalue) """ res = [] for x in args: if isinstance(x, tuple) and len(x) == 2: key, value = x # todo: exclude this key if value is its default res += ["%s=%s" % (key, repr_arg(value))] else: res += [repr_arg(x)] return ', '.join(res)
python
{ "resource": "" }
q4318
repr_arg
train
def repr_arg(d): """formats a function argument prettily but as working code unicode encodable as ascii is formatted as str""" if isinstance(d, dict): # if d can be expressed in key=value syntax: return "{%s}" % ", ".join( "%s: %s" % (repr_arg(k), repr_arg(v)) for k, v in d.items()) if isinstance(d, list): return "[%s]" % ", ".join(repr_arg(elem) for elem in d) if isinstance(d, unicode): try: return repr(d.encode("ascii")) except UnicodeEncodeError: return repr(d) return repr(d)
python
{ "resource": "" }
q4319
str_args
train
def str_args(args): """formats a list of function arguments prettily not as code (kwargs are tuples (argname, argvalue) """ res = [] for x in args: if isinstance(x, tuple) and len(x) == 2: key, value = x if value and str_arg(value): res += ["%s=%s" % (key, str_arg(value))] else: res += [str_arg(x)] return ', '.join(res)
python
{ "resource": "" }
q4320
str_arg
train
def str_arg(d): """formats a function argument prettily not as code dicts are expressed in {key=value} syntax strings are formatted using str in quotes not repr""" if not d: return None if isinstance(d, dict): if len(d) == 2 and d.get('type') == 'text' and 'value' in d: return str_arg(d['value']) if len(d) == 2 and d.get('type') == 'text' and 'subkey' in d: return ".%s" % d['subkey'] if d.get('type') == 'module': return None return "{%s}" % str_args(d.items()) if isinstance(d, list): if len(d) == 1: return str_arg(d[0]) return "[%s]" % ", ".join(str_arg(elem) for elem in d) if isinstance(d, unicode): return '"%s"' % d return repr(d)
python
{ "resource": "" }
q4321
asyncPipeHash
train
def asyncPipeHash(context=None, _INPUT=None, conf=None, **kwargs): """A string module that asynchronously hashes the given text. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : twisted Deferred iterable of items or strings Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of hashed strings """ splits = yield asyncGetSplits(_INPUT, conf, **cdicts(opts, kwargs)) parsed = yield asyncDispatch(splits, *get_async_dispatch_funcs()) _OUTPUT = yield asyncStarMap(partial(maybeDeferred, parse_result), parsed) returnValue(iter(_OUTPUT))
python
{ "resource": "" }
q4322
pipe_tail
train
def pipe_tail(context=None, _INPUT=None, conf=None, **kwargs): """Returns a specified number of items from the bottom of a feed. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) kwargs -- terminal, if the truncation value is wired in conf : count -- length of the truncated feed, if specified literally Yields ------ _OUTPUT : items """ conf = DotDict(conf) limit = conf.get('count', func=int, **kwargs) for item in deque(_INPUT, limit): yield item
python
{ "resource": "" }
q4323
get_graph_component
train
def get_graph_component(graph): """ Identify strongly connected components in a graph using Tarjan's algorithm. graph should be a dictionary mapping node names to lists of successor nodes. """ components = map(partial(_visit, graph=graph), graph) node_component = dict(_gen_node_component(components)) graph_component = {component: [] for component in components} graph_component.update( dict(_gen_graph_component(graph, node_component, _gen_graph_value))) return graph_component
python
{ "resource": "" }
q4324
pipe_xpathfetchpage
train
def pipe_xpathfetchpage(context=None, _INPUT=None, conf=None, **kwargs): """A source that fetches the content of a given website as DOM nodes or a string. Loopable. context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items or fields conf : dict URL -- url object contain the URL to download xpath -- xpath to extract html5 -- use html5 parser? useAsString -- emit items as string? TODOS: - don't retrieve pages larger than 1.5MB - don't retrieve if page is not indexable. Yields ------ _OUTPUT : items """ conf = DotDict(conf) urls = utils.listize(conf['URL']) for item in _INPUT: for item_url in urls: url = utils.get_value(DotDict(item_url), DotDict(item), **kwargs) url = utils.get_abspath(url) f = urlopen(url) # TODO: it seems that Yahoo! converts relative links to # absolute. This needs to be done on the content but seems to # be a non-trival task python? content = unicode(f.read(), 'utf-8') if context and context.verbose: print '............Content .................' print content print '...............EOF...................' xpath = conf.get('xpath', **kwargs) html5 = conf.get('html5', **kwargs) == 'true' use_as_string = conf.get('useAsString', **kwargs) == 'true' tree = html5parser.parse(f) if html5 else html.parse(f) root = tree.getroot() items = root.xpath(xpath) if context and context.verbose: print 'XPathFetchPage: found count items:', len(items) for etree in items: i = utils.etree_to_dict(etree) if context and context.verbose: print '--------------item data --------------------' print i print '--------------EOF item data ----------------' if use_as_string: yield {'content': unicode(i)} else: yield i if item.get('forever'): # _INPUT is pipeforever and not a loop, # so we just yield our item once break
python
{ "resource": "" }
q4325
extract_dependencies
train
def extract_dependencies(pipe_def=None, pipe_generator=None): """Extract modules used by a pipe""" if pipe_def: pydeps = gen_dependencies(pipe_def) elif pipe_generator: pydeps = pipe_generator(Context(describe_dependencies=True)) else: raise Exception('Must supply at least one kwarg!') return sorted(set(pydeps))
python
{ "resource": "" }
q4326
extract_input
train
def extract_input(pipe_def=None, pipe_generator=None): """Extract inputs required by a pipe""" if pipe_def: pyinput = gen_input(pipe_def) elif pipe_generator: pyinput = pipe_generator(Context(describe_input=True)) else: raise Exception('Must supply at least one kwarg!') return sorted(list(pyinput))
python
{ "resource": "" }
q4327
pythonise
train
def pythonise(id, encoding='ascii'): """Return a Python-friendly id""" replace = {'-': '_', ':': '_', '/': '_'} func = lambda id, pair: id.replace(pair[0], pair[1]) id = reduce(func, replace.iteritems(), id) id = '_%s' % id if id[0] in string.digits else id return id.encode(encoding)
python
{ "resource": "" }
q4328
etree_to_dict
train
def etree_to_dict(element): """Convert an eTree xml into dict imitating how Yahoo Pipes does it. todo: further investigate white space and multivalue handling """ i = dict(element.items()) content = element.text.strip() if element.text else None i.update({'content': content}) if content else None if len(element.getchildren()): for child in element.iterchildren(): tag = child.tag.split('}', 1)[-1] new = etree_to_dict(child) content = _make_content(i, tag, new) i.update({tag: content}) if content else None tag = 'content' new = child.tail.strip() if child.tail else None content = _make_content(i, tag, new) i.update({tag: content}) if content else None elif content and not set(i).difference(['content']): # element is leaf node and doesn't have attributes i = content return i
python
{ "resource": "" }
q4329
broadcast
train
def broadcast(_INPUT, *funcs, **kwargs): """copies an iterable and delivers the items to multiple functions /--> foo2bar(_INPUT) --> \ / \ _INPUT ---> foo2baz(_INPUT) ---> _OUTPUT \ / \--> foo2qux(_INPUT) --> / One way to construct such a flow in code would be:: _INPUT = repeat('foo', 3) foo2bar = lambda word: word.replace('foo', 'bar') foo2baz = lambda word: word.replace('foo', 'baz') foo2qux = lambda word: word.replace('foo', 'quz') _OUTPUT = broadcast(_INPUT, foo2bar, foo2baz, foo2qux) _OUTPUT == repeat(('bar', 'baz', 'qux'), 3) """ map_func = kwargs.get('map_func', _map_func) apply_func = kwargs.get('apply_func', _apply_func) splits = izip(*tee(_INPUT, len(funcs))) return map_func(partial(apply_func, funcs), splits)
python
{ "resource": "" }
q4330
url_quote
train
def url_quote(url): """Ensure url is valid""" try: return quote(url, safe=URL_SAFE) except KeyError: return quote(encode(url), safe=URL_SAFE)
python
{ "resource": "" }
q4331
asyncPipeItembuilder
train
def asyncPipeItembuilder(context=None, _INPUT=None, conf=None, **kwargs): """A source that asynchronously builds an item. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : asyncPipe like object (twisted Deferred iterable of items) conf : { 'attrs': [ {'key': {'value': 'title'}, 'value': {'value': 'new title'}}, {'key': {'value': 'desc.content'}, 'value': {'value': 'new desc'}} ] } Returns ------ _OUTPUT : twisted.internet.defer.Deferred generator of items """ pkwargs = cdicts(opts, kwargs) asyncFuncs = yield asyncGetSplits(None, conf['attrs'], **pkwargs) _input = yield _INPUT finite = utils.finitize(_input) inputs = imap(DotDict, finite) pieces = yield asyncImap(asyncFuncs[0], inputs) results = imap(utils.parse_params, pieces) _OUTPUT = imap(DotDict, results) returnValue(_OUTPUT)
python
{ "resource": "" }
q4332
pipe_itembuilder
train
def pipe_itembuilder(context=None, _INPUT=None, conf=None, **kwargs): """A source that builds an item. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items conf : { 'attrs': [ {'key': {'value': <'title'>}, 'value': {'value': <'chair'>}}, {'key': {'value': <'color'>}, 'value': {'value': <'red'>}} ] } Returns ------ _OUTPUT : generator of items """ funcs = get_splits(None, conf['attrs'], **cdicts(opts, kwargs)) finite = utils.finitize(_INPUT) inputs = imap(DotDict, finite) pieces = imap(funcs[0], inputs) results = imap(utils.parse_params, pieces) _OUTPUT = imap(DotDict, results) return _OUTPUT
python
{ "resource": "" }
q4333
asyncPipeLoop
train
def asyncPipeLoop(context=None, _INPUT=None, conf=None, embed=None, **kwargs): """An operator that asynchronously loops over the input and performs the embedded submodule. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : asyncPipe like object (twisted Deferred iterable of items) embed : the submodule, i.e., asyncPipe*(context, _INPUT, conf) Most modules, with the exception of User inputs and Operators can be sub-modules. conf : { 'assign_part': {'value': <all or first>}, 'assign_to': {'value': <assigned field name>}, 'emit_part': {'value': <all or first>}, 'mode': {'value': <assign or EMIT>}, 'with': {'value': <looped field name or blank>}, 'embed': {'value': {'conf': <module conf>}} } Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of items """ cust_func = get_cust_func(context, conf, embed, parse_embed, **kwargs) opts.update({'cust_func': cust_func}) splits = yield asyncGetSplits(_INPUT, conf, **cdicts(opts, kwargs)) gathered = yield asyncStarMap(asyncParseResult, splits) _OUTPUT = utils.multiplex(gathered) returnValue(_OUTPUT)
python
{ "resource": "" }
q4334
pipe_loop
train
def pipe_loop(context=None, _INPUT=None, conf=None, embed=None, **kwargs): """An operator that loops over the input and performs the embedded submodule. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) embed : the submodule, i.e., pipe_*(context, _INPUT, conf) Most modules, with the exception of User inputs and Operators can be sub-modules. conf : { 'assign_part': {'value': <all or first>}, 'assign_to': {'value': <assigned field name>}, 'emit_part': {'value': <all or first>}, 'mode': {'value': <assign or EMIT>}, 'with': {'value': <looped field name or blank>}, 'embed': {'value': {'conf': <module conf>}} } Returns ------- _OUTPUT : generator of items """ cust_func = get_cust_func(context, conf, embed, parse_embed, **kwargs) opts.update({'cust_func': cust_func}) splits = get_splits(_INPUT, conf, **cdicts(opts, kwargs)) gathered = starmap(parse_result, splits) _OUTPUT = utils.multiplex(gathered) return _OUTPUT
python
{ "resource": "" }
q4335
pipe_fetchpage
train
def pipe_fetchpage(context=None, _INPUT=None, conf=None, **kwargs): """A source that fetches the content of a given web site as a string. Loopable. context : pipe2py.Context object _INPUT : pipeforever asyncPipe or an iterable of items or fields conf : dict URL -- url object contain the URL to download from -- string from where to start the input to -- string to limit the input token -- if present, split the input on this token to generate items Description: http://pipes.yahoo.com/pipes/docs?doc=sources#FetchPage TODOS: - don't retrieve pages larger than 200k - don't retrieve if page is not indexable. - item delimiter removes the closing tag if using a HTML tag (not documented but happens) - items should be cleaned, i.e. stripped of HTML tags Yields ------ _OUTPUT : items """ conf = DotDict(conf) split_token = conf.get('token', **kwargs) urls = utils.listize(conf['URL']) for item in _INPUT: for item_url in urls: url = utils.get_value(DotDict(item_url), DotDict(item), **kwargs) url = utils.get_abspath(url) if not url: continue f = urlopen(url) # TODO: it seems that Yahoo! converts relative links to # absolute. This needs to be done on the content but seems to # be a non-trival task python? content = unicode(f.read(), 'utf-8') if context and context.verbose: print '............Content .................' print content print '...............EOF...................' parsed = _parse_content(content, conf, **kwargs) items = parsed.split(split_token) if split_token else [parsed] if context and context.verbose: print "FetchPage: found count items:", len(items) for i in items: if context and context.verbose: print "--------------item data --------------------" print i print "--------------EOF item data ----------------" yield {"content": i} if item.get('forever'): # _INPUT is pipeforever and not a loop, # so we just yield our item once break
python
{ "resource": "" }
q4336
pipe_fetchdata
train
def pipe_fetchdata(context=None, _INPUT=None, conf=None, **kwargs): """A source that fetches and parses an XML or JSON file. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items or fields conf : { 'URL': {'value': <url>}, 'path': {'value': <dot separated path to data list>} } Yields ------ _OUTPUT : items Examples -------- >>> from os import path as p >>> from pipe2py.modules.pipeforever import pipe_forever >>> parent = p.dirname(p.dirname(__file__)) >>> abspath = p.abspath(p.join(parent, 'data', 'gigs.json')) >>> path = 'value.items' >>> url = "file://%s" % abspath >>> conf = {'URL': {'value': url}, 'path': {'value': path}} >>> pipe_fetchdata(_INPUT=pipe_forever(), conf=conf).next().keys()[:5] [u'y:repeatcount', u'description', u'pubDate', u'title', u'y:published'] >>> abspath = p.abspath(p.join(parent, 'data', 'places.xml')) >>> path = 'appointment' >>> url = "file://%s" % abspath >>> conf = {'URL': {'value': url}, 'path': {'value': path}} >>> sorted(pipe_fetchdata(_INPUT=pipe_forever(), conf=conf).next().keys()) ['alarmTime', 'begin', 'duration', 'places', 'subject', 'uid'] >>> conf = {'URL': {'value': url}, 'path': {'value': ''}} >>> sorted(pipe_fetchdata(_INPUT=pipe_forever(), conf=conf).next().keys()) ['appointment', 'reminder'] """ # todo: iCal and KML funcs = get_splits(None, conf, **cdicts(opts, kwargs)) parsed = get_parsed(_INPUT, funcs[0]) results = starmap(parse_result, parsed) items = imap(utils.gen_items, results) _OUTPUT = utils.multiplex(items) return _OUTPUT
python
{ "resource": "" }
q4337
asyncPipeFetch
train
def asyncPipeFetch(context=None, _INPUT=None, conf=None, **kwargs): """A source that asynchronously fetches and parses one or more feeds to return the feed entries. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : asyncPipe like object (twisted Deferred iterable of items) conf : { 'URL': [ {'type': 'url', 'value': <url1>}, {'type': 'url', 'value': <url2>}, {'type': 'url', 'value': <url3>}, ] } Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of items """ splits = yield asyncGetSplits(_INPUT, conf['URL'], **cdicts(opts, kwargs)) items = yield asyncStarMap(asyncParseResult, splits) _OUTPUT = utils.multiplex(items) returnValue(_OUTPUT)
python
{ "resource": "" }
q4338
pipe_fetch
train
def pipe_fetch(context=None, _INPUT=None, conf=None, **kwargs): """A source that fetches and parses one or more feeds to return the entries. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items or fields conf : { 'URL': [ {'type': 'url', 'value': <url1>}, {'type': 'url', 'value': <url2>}, {'type': 'url', 'value': <url3>}, ] } Returns ------- _OUTPUT : generator of items """ splits = get_splits(_INPUT, conf['URL'], **cdicts(opts, kwargs)) items = starmap(parse_result, splits) _OUTPUT = utils.multiplex(items) return _OUTPUT
python
{ "resource": "" }
q4339
pipe_filter
train
def pipe_filter(context=None, _INPUT=None, conf=None, **kwargs): """An operator that filters for source items matching the given rules. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) conf : { 'MODE': {'value': <'permit' or 'block'>}, 'COMBINE': {'value': <'and' or 'or'>} 'RULE': [ { 'field': {'value': 'search field'}, 'op': {'value': 'one of SWITCH above'}, 'value': {'value': 'search term'} } ] } kwargs : other inputs, e.g., to feed terminals for rule values Returns ------- _OUTPUT : generator of filtered items Examples -------- >>> import os.path as p >>> from pipe2py.modules.pipeforever import pipe_forever >>> from pipe2py.modules.pipefetchdata import pipe_fetchdata >>> parent = p.dirname(p.dirname(__file__)) >>> file_name = p.abspath(p.join(parent, 'data', 'gigs.json')) >>> path = 'value.items' >>> url = 'file://%s' % file_name >>> conf = {'URL': {'value': url}, 'path': {'value': path}} >>> input = pipe_fetchdata(_INPUT=pipe_forever(), conf=conf) >>> mode = {'value': 'permit'} >>> combine = {'value': 'and'} >>> rule = [{'field': {'value': 'title'}, 'op': {'value': 'contains'}, \ 'value': {'value': 'web'}}] >>> conf = {'MODE': mode, 'COMBINE': combine, 'RULE': rule} >>> pipe_filter(_INPUT=input, conf=conf).next()['title'] u'E-Commerce Website Developer | Elance Job' >>> rule = [{'field': {'value': 'title'}, 'op': {'value': 'contains'}, \ 'value': {'value': 'kjhlked'}}] >>> conf = {'MODE': mode, 'COMBINE': combine, 'RULE': rule} >>> list(pipe_filter(_INPUT=input, conf=conf)) [] """ conf = DotDict(conf) test = kwargs.pop('pass_if', None) permit = conf.get('MODE', **kwargs) == 'permit' combine = conf.get('COMBINE', **kwargs) if not combine in {'and', 'or'}: raise Exception( "Invalid combine: %s. (Expected 'and' or 'or')" % combine) rule_defs = map(DotDict, utils.listize(conf['RULE'])) get_pass = partial(utils.get_pass, test=test) get_value = partial(utils.get_value, **kwargs) parse_conf = partial(utils.parse_conf, parse_func=get_value, **kwargs) get_rules = lambda i: imap(parse_conf, rule_defs, repeat(i)) funcs = [COMBINE_BOOLEAN[combine], utils.passthrough, utils.passthrough] inputs = imap(DotDict, _INPUT) splits = utils.broadcast(inputs, get_rules, utils.passthrough, get_pass) outputs = starmap(partial(parse_rules, **kwargs), splits) parsed = utils.dispatch(outputs, *funcs) gathered = starmap(partial(parse_result, permit=permit), parsed) _OUTPUT = ifilter(None, gathered) return _OUTPUT
python
{ "resource": "" }
q4340
pipe_split
train
def pipe_split(context, _INPUT, conf, splits, **kwargs): """An operator that splits a source into identical copies. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) conf : dict splits : number of copies Yields ------ _OUTPUT, _OUTPUT2... : copies of all source items """ return Split(context, _INPUT, conf, splits, **kwargs)
python
{ "resource": "" }
q4341
pipe_datebuilder
train
def pipe_datebuilder(context=None, _INPUT=None, conf=None, **kwargs): """A date module that converts a text string into a datetime value. Useful as terminal data. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items conf : {'DATE': {'type': 'datetime', 'value': '12/2/2014'}} Yields ------ _OUTPUT : date timetuples """ conf = DotDict(conf) for item in _INPUT: _input = DotDict(item) date = utils.get_value(conf['DATE'], _input, **kwargs).lower() if date.endswith(' day') or date.endswith(' days'): count = int(date.split(' ')[0]) new_date = dt.today() + timedelta(days=count) elif date.endswith(' year') or date.endswith(' years'): count = int(date.split(' ')[0]) new_date = dt.today().replace(year=dt.today().year + count) else: new_date = SWITCH.get(date) if not new_date: new_date = utils.get_date(date) if not new_date: raise Exception('Unrecognized date string: %s' % date) yield new_date.timetuple()
python
{ "resource": "" }
q4342
asyncImap
train
def asyncImap(asyncCallable, *iterables): """itertools.imap for deferred callables """ deferreds = imap(asyncCallable, *iterables) return gatherResults(deferreds, consumeErrors=True)
python
{ "resource": "" }
q4343
asyncStarCmap
train
def asyncStarCmap(asyncCallable, iterable): """itertools.starmap for deferred callables using cooperative multitasking """ results = [] yield coopStar(asyncCallable, results.append, iterable) returnValue(results)
python
{ "resource": "" }
q4344
asyncStarPmap
train
def asyncStarPmap(asyncCallable, iterable): """itertools.starmap for deferred callables using parallel cooperative multitasking """ results = [] yield asyncStarParallel(asyncCallable, results.append, iterable) returnValue(results)
python
{ "resource": "" }
q4345
asyncStarMap
train
def asyncStarMap(asyncCallable, iterable): """itertools.starmap for deferred callables """ deferreds = starmap(asyncCallable, iterable) return gatherResults(deferreds, consumeErrors=True)
python
{ "resource": "" }
q4346
pipe_rssitembuilder
train
def pipe_rssitembuilder(context=None, _INPUT=None, conf=None, **kwargs): """A source that builds an rss item. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever asyncPipe or an iterable of items or fields conf : { 'mediaContentType': {'type': 'text', 'value': ''}, 'mediaContentHeight': {'type': 'text', 'value': ''}, 'mediaContentWidth': {'type': 'text', 'value': ''}, 'mediaContentURL': {'type': 'text', 'value': 'url'}, 'mediaThumbHeight': {'type': 'text', 'value': ''}, 'mediaThumbWidth': {'type': 'text', 'value': ''}, 'mediaThumbURL': {'type': 'text', 'value': 'url'}, 'description': {'type': 'text', 'value': 'description'}, 'pubdate': {'type': 'text', 'value': 'pubdate'}, 'author': {'type': 'text', 'value': 'author'}, 'title': {'type': 'text', 'value': 'title'}, 'link': {'type': 'text', 'value': 'url'}, 'guid': {'type': 'text', 'value': 'guid'}, } Yields ------ _OUTPUT : items """ get_value = partial(utils.get_value, **kwargs) pkwargs = utils.combine_dicts({'parse_func': get_value}, kwargs) parse_conf = partial(utils.parse_conf, DotDict(conf), **pkwargs) get_RSS = lambda key, value: (RSS.get(key, key), value) get_YAHOO = lambda key, value: (YAHOO.get(key), value) make_dict = lambda func, conf: dict(starmap(func, conf.iteritems())) clean_dict = lambda d: dict(i for i in d.items() if all(i)) funcs = [partial(make_dict, get_RSS), partial(make_dict, get_YAHOO)] finite = utils.finitize(_INPUT) inputs = imap(DotDict, finite) confs = imap(parse_conf, inputs) splits = utils.broadcast(confs, *funcs) combined = starmap(utils.combine_dicts, splits) result = imap(clean_dict, combined) _OUTPUT = imap(DotDict, result) return _OUTPUT
python
{ "resource": "" }
q4347
asyncPipeStrconcat
train
def asyncPipeStrconcat(context=None, _INPUT=None, conf=None, **kwargs): """A string module that asynchronously builds a string. Loopable. No direct input. Parameters ---------- context : pipe2py.Context object _INPUT : asyncPipe like object (twisted Deferred iterable of items) conf : { 'part': [ {'value': <'<img src="'>}, {'subkey': <'img.src'>}, {'value': <'">'>} ] } Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of joined strings """ splits = yield asyncGetSplits(_INPUT, conf['part'], **cdicts(opts, kwargs)) _OUTPUT = yield asyncStarMap(partial(maybeDeferred, parse_result), splits) returnValue(iter(_OUTPUT))
python
{ "resource": "" }
q4348
pipe_strconcat
train
def pipe_strconcat(context=None, _INPUT=None, conf=None, **kwargs): """A string module that builds a string. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items conf : { 'part': [ {'value': '<img src="'}, {'subkey': 'img.src'}, {'value': '">'} ] } Returns ------- _OUTPUT : generator of joined strings """ splits = get_splits(_INPUT, conf['part'], **cdicts(opts, kwargs)) _OUTPUT = starmap(parse_result, splits) return _OUTPUT
python
{ "resource": "" }
q4349
asyncPipeUniq
train
def asyncPipeUniq(context=None, _INPUT=None, conf=None, **kwargs): """An operator that asynchronously filters out non unique items according to the specified field. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : twisted Deferred iterable of items conf : {'field': {'type': 'text', 'value': <field to be unique>}} returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of unique items """ _input = yield _INPUT asyncFuncs = yield asyncGetSplits(None, conf, **cdicts(opts, kwargs)) pieces = yield asyncFuncs[0]() _pass = yield asyncFuncs[2]() _OUTPUT = _input if _pass else unique_items(_input, pieces.field) returnValue(_OUTPUT)
python
{ "resource": "" }
q4350
pipe_uniq
train
def pipe_uniq(context=None, _INPUT=None, conf=None, **kwargs): """An operator that filters out non unique items according to the specified field. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) kwargs -- other inputs, e.g. to feed terminals for rule values conf : {'field': {'type': 'text', 'value': <field to be unique>}} Returns ------- _OUTPUT : generator of unique items """ funcs = get_splits(None, conf, **cdicts(opts, kwargs)) pieces, _pass = funcs[0](), funcs[2]() _OUTPUT = _INPUT if _pass else unique_items(_INPUT, pieces.field) return _OUTPUT
python
{ "resource": "" }
q4351
asyncPipeUnion
train
def asyncPipeUnion(context=None, _INPUT=None, conf=None, **kwargs): """An operator that asynchronously merges multiple source together. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : asyncPipe like object (twisted Deferred iterable of items) conf : unused Keyword arguments ----------------- _OTHER1 : asyncPipe like object _OTHER2 : etc. Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of items """ _input = yield _INPUT _OUTPUT = get_output(_input, **kwargs) returnValue(_OUTPUT)
python
{ "resource": "" }
q4352
pipe_union
train
def pipe_union(context=None, _INPUT=None, conf=None, **kwargs): """An operator that merges multiple source together. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) conf : unused Keyword arguments ----------------- _OTHER1 : pipe2py.modules pipe like object _OTHER2 : etc. Returns ------- _OUTPUT : generator of items """ _OUTPUT = get_output(_INPUT, **kwargs) return _OUTPUT
python
{ "resource": "" }
q4353
pipe_sort
train
def pipe_sort(context=None, _INPUT=None, conf=None, **kwargs): """An operator that sorts the input source according to the specified key. Not loopable. Not lazy. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) kwargs -- other inputs, e.g. to feed terminals for rule values conf : { 'KEY': [ { 'field': {'type': 'text', 'value': 'title'}, 'dir': {'type': 'text', 'value': 'DESC'} } ] } Returns ------- _OUTPUT : generator of sorted items """ test = kwargs.pop('pass_if', None) _pass = utils.get_pass(test=test) key_defs = imap(DotDict, utils.listize(conf['KEY'])) get_value = partial(utils.get_value, **kwargs) parse_conf = partial(utils.parse_conf, parse_func=get_value, **kwargs) keys = imap(parse_conf, key_defs) order = ('%s%s' % ('-' if k.dir == 'DESC' else '', k.field) for k in keys) comparers = map(get_comparer, order) cmp_func = partial(multikeysort, comparers=comparers) _OUTPUT = _INPUT if _pass else iter(sorted(_INPUT, cmp=cmp_func)) return _OUTPUT
python
{ "resource": "" }
q4354
pipe_createrss
train
def pipe_createrss(context=None, _INPUT=None, conf=None, **kwargs): """An operator that converts a source into an RSS stream. Not loopable. """ conf = DotDict(conf) for item in _INPUT: item = DotDict(item) yield { value: item.get(conf.get(key, **kwargs)) for key, value in RSS_FIELDS.items()}
python
{ "resource": "" }
q4355
pipe_fetchsitefeed
train
def pipe_fetchsitefeed(context=None, _INPUT=None, conf=None, **kwargs): """A source that fetches and parses the first feed found on one or more sites. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items or fields conf : URL -- url Yields ------ _OUTPUT : items """ conf = DotDict(conf) urls = utils.listize(conf['URL']) for item in _INPUT: for item_url in urls: url = utils.get_value(DotDict(item_url), DotDict(item), **kwargs) url = utils.get_abspath(url) if context and context.verbose: print "pipe_fetchsitefeed loading:", url for link in autorss.getRSSLink(url.encode('utf-8')): parsed = speedparser.parse(urlopen(link).read()) for entry in utils.gen_entries(parsed): yield entry if item.get('forever'): # _INPUT is pipeforever and not a loop, # so we just yield our item once break
python
{ "resource": "" }
q4356
asyncPipeStrreplace
train
def asyncPipeStrreplace(context=None, _INPUT=None, conf=None, **kwargs): """A string module that asynchronously replaces text. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : twisted Deferred iterable of items or strings conf : { 'RULE': [ { 'param': {'value': <match type: 1=first, 2=last, 3=every>}, 'find': {'value': <text to find>}, 'replace': {'value': <replacement>} } ] } Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of replaced strings """ splits = yield asyncGetSplits(_INPUT, conf['RULE'], **kwargs) parsed = yield asyncDispatch(splits, *get_async_dispatch_funcs()) _OUTPUT = yield asyncStarMap(asyncParseResult, parsed) returnValue(iter(_OUTPUT))
python
{ "resource": "" }
q4357
pipe_strreplace
train
def pipe_strreplace(context=None, _INPUT=None, conf=None, **kwargs): """A string module that replaces text. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : iterable of items or strings conf : { 'RULE': [ { 'param': {'value': <match type: 1=first, 2=last, 3=every>}, 'find': {'value': <text to find>}, 'replace': {'value': <replacement>} } ] } Returns ------- _OUTPUT : generator of replaced strings """ splits = get_splits(_INPUT, conf['RULE'], **kwargs) parsed = utils.dispatch(splits, *get_dispatch_funcs()) _OUTPUT = starmap(parse_result, parsed) return _OUTPUT
python
{ "resource": "" }
q4358
asyncPipeUniq
train
def asyncPipeUniq(context=None, _INPUT=None, conf=None, **kwargs): """An operator that asynchronously returns a specified number of items from the top of a feed. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : twisted Deferred iterable of items conf : { 'start': {'type': 'number', value': <starting location>} 'count': {'type': 'number', value': <desired feed length>} } returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of unique items """ _input = yield _INPUT asyncFuncs = yield asyncGetSplits(None, conf, **cdicts(opts, kwargs)) pieces = yield asyncFuncs[0]() _pass = yield asyncFuncs[2]() if _pass: _OUTPUT = _input else: start = int(pieces.start) stop = start + int(pieces.count) _OUTPUT = islice(_input, start, stop) returnValue(_OUTPUT)
python
{ "resource": "" }
q4359
pipe_truncate
train
def pipe_truncate(context=None, _INPUT=None, conf=None, **kwargs): """An operator that returns a specified number of items from the top of a feed. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) kwargs -- terminal, if the truncation value is wired in conf : { 'start': {'type': 'number', value': <starting location>} 'count': {'type': 'number', value': <desired feed length>} } Returns ------- _OUTPUT : generator of items """ funcs = get_splits(None, conf, **cdicts(opts, kwargs)) pieces, _pass = funcs[0](), funcs[2]() if _pass: _OUTPUT = _INPUT else: try: start = int(pieces.start) except AttributeError: start = 0 stop = start + int(pieces.count) _OUTPUT = islice(_INPUT, start, stop) return _OUTPUT
python
{ "resource": "" }
q4360
asyncPipeStringtokenizer
train
def asyncPipeStringtokenizer(context=None, _INPUT=None, conf=None, **kwargs): """A string module that asynchronously splits a string into tokens delimited by separators. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : twisted Deferred iterable of items or strings conf : { 'to-str': {'value': <delimiter>}, 'dedupe': {'type': 'bool', value': <1>}, 'sort': {'type': 'bool', value': <1>} } Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of items """ conf['delimiter'] = conf.pop('to-str', dict.get(conf, 'delimiter')) splits = yield asyncGetSplits(_INPUT, conf, **cdicts(opts, kwargs)) parsed = yield asyncDispatch(splits, *get_async_dispatch_funcs()) items = yield asyncStarMap(partial(maybeDeferred, parse_result), parsed) _OUTPUT = utils.multiplex(items) returnValue(_OUTPUT)
python
{ "resource": "" }
q4361
asyncPipeExchangerate
train
def asyncPipeExchangerate(context=None, _INPUT=None, conf=None, **kwargs): """A string module that asynchronously retrieves the current exchange rate for a given currency pair. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : twisted Deferred iterable of items or strings (base currency) conf : { 'quote': {'value': <'USD'>}, 'default': {'value': <'USD'>}, 'offline': {'type': 'bool', 'value': '0'}, } Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of hashed strings """ offline = conf.get('offline', {}).get('value') # TODO add async rate data fetching rate_data = get_offline_rate_data() if offline else get_rate_data() rates = parse_request(rate_data) splits = yield asyncGetSplits(_INPUT, conf, **cdicts(opts, kwargs)) parsed = yield asyncDispatch(splits, *get_async_dispatch_funcs()) _OUTPUT = starmap(partial(parse_result, rates=rates), parsed) returnValue(iter(_OUTPUT))
python
{ "resource": "" }
q4362
pipe_exchangerate
train
def pipe_exchangerate(context=None, _INPUT=None, conf=None, **kwargs): """A string module that retrieves the current exchange rate for a given currency pair. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : iterable of items or strings (base currency) conf : { 'quote': {'value': <'USD'>}, 'default': {'value': <'USD'>}, 'offline': {'type': 'bool', 'value': '0'}, } Returns ------- _OUTPUT : generator of hashed strings """ offline = conf.get('offline', {}).get('value') rate_data = get_offline_rate_data(err=False) if offline else get_rate_data() rates = parse_request(rate_data) splits = get_splits(_INPUT, conf, **cdicts(opts, kwargs)) parsed = utils.dispatch(splits, *get_dispatch_funcs()) _OUTPUT = starmap(partial(parse_result, rates=rates), parsed) return _OUTPUT
python
{ "resource": "" }
q4363
pipe_strtransform
train
def pipe_strtransform(context=None, _INPUT=None, conf=None, **kwargs): """A string module that splits a string into tokens delimited by separators. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : iterable of items or strings conf : {'transformation': {value': <'swapcase'>}} Returns ------- _OUTPUT : generator of tokenized strings """ splits = get_splits(_INPUT, conf, **cdicts(opts, kwargs)) parsed = utils.dispatch(splits, *get_dispatch_funcs()) _OUTPUT = starmap(parse_result, parsed) return _OUTPUT
python
{ "resource": "" }
q4364
pipe_privateinput
train
def pipe_privateinput(context=None, _INPUT=None, conf=None, **kwargs): """An input that prompts the user for some text and yields it forever. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : unused conf : { 'name': {'value': 'parameter name'}, 'prompt': {'value': 'User prompt'}, 'default': {'value': 'default value'}, 'debug': {'value': 'debug value'} } Yields ------ _OUTPUT : text """ value = utils.get_input(context, conf) while True: yield value
python
{ "resource": "" }
q4365
pipe_dateformat
train
def pipe_dateformat(context=None, _INPUT=None, conf=None, **kwargs): """Formats a datetime value. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipedatebuilder pipe like object (iterable of date timetuples) conf : { 'format': {'value': <'%B %d, %Y'>}, 'timezone': {'value': <'EST'>} } Yields ------ _OUTPUT : formatted dates """ conf = DotDict(conf) loop_with = kwargs.pop('with', None) date_format = conf.get('format', **kwargs) # timezone = conf.get('timezone', **kwargs) for item in _INPUT: _with = item.get(loop_with, **kwargs) if loop_with else item try: # todo: check that all PHP formats are covered by Python date_string = time.strftime(date_format, _with) except TypeError as e: if context and context.verbose: print 'Error formatting date: %s' % item print e continue else: yield date_string
python
{ "resource": "" }
q4366
pipe_subelement
train
def pipe_subelement(context=None, _INPUT=None, conf=None, **kwargs): """An operator extracts select sub-elements from a feed. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) conf : {'path': {'value': <element path>}} Yields ------ _OUTPUT : items """ path = DotDict(conf).get('path', **kwargs) for item in _INPUT: element = DotDict(item).get(path, **kwargs) for i in utils.gen_items(element): yield {'content': i} if item.get('forever'): # _INPUT is pipeforever and not a loop, # so we just yield our item once break
python
{ "resource": "" }
q4367
pipe_feedautodiscovery
train
def pipe_feedautodiscovery(context=None, _INPUT=None, conf=None, **kwargs): """A source that searches for and returns feed links found in a page. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items or fields conf : URL -- url Yields ------ _OUTPUT : items """ conf = DotDict(conf) urls = utils.listize(conf['URL']) for item in _INPUT: for item_url in urls: url = utils.get_value(DotDict(item_url), DotDict(item), **kwargs) url = utils.get_abspath(url) if context and context.verbose: print "pipe_feedautodiscovery loading:", url for entry in autorss.getRSSLink(url.encode('utf-8')): yield {'link': entry} # todo: add rel, type, title if item.get('forever'): # _INPUT is pipeforever and not a loop, # so we just yield our item once break
python
{ "resource": "" }
q4368
pipe_urlinput
train
def pipe_urlinput(context=None, _INPUT=None, conf=None, **kwargs): """An input that prompts the user for a url and yields it forever. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : unused conf : { 'name': {'value': 'parameter name'}, 'prompt': {'value': 'User prompt'}, 'default': {'value': 'default value'}, 'debug': {'value': 'debug value'} } Yields ------ _OUTPUT : url """ value = utils.get_input(context, conf) value = utils.url_quote(value) while True: yield value
python
{ "resource": "" }
q4369
pipe_yql
train
def pipe_yql(context=None, _INPUT=None, conf=None, **kwargs): """A source that issues YQL queries. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items or fields conf : yqlquery -- YQL query # todo: handle envURL Yields ------ _OUTPUT : query results """ # todo: get from a config/env file url = "http://query.yahooapis.com/v1/public/yql" conf = DotDict(conf) query = conf['yqlquery'] for item in _INPUT: item = DotDict(item) yql = utils.get_value(query, item, **kwargs) # note: we use the default format of xml since json loses some # structure # todo: diagnostics=true e.g. if context.test # todo: consider paging for large result sets r = requests.get(url, params={'q': yql}, stream=True) # Parse the response tree = parse(r.raw) if context and context.verbose: print "pipe_yql loading xml:", yql root = tree.getroot() # note: query also has row count results = root.find('results') # Convert xml into generation of dicts for element in results.getchildren(): yield utils.etree_to_dict(element) if item.get('forever'): # _INPUT is pipeforever and not a loop, # so we just yield our item once break
python
{ "resource": "" }
q4370
pipe_numberinput
train
def pipe_numberinput(context=None, _INPUT=None, conf=None, **kwargs): """An input that prompts the user for a number and yields it forever. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : not used conf : { 'name': {'value': 'parameter name'}, 'prompt': {'value': 'User prompt'}, 'default': {'value': 'default value'}, 'debug': {'value': 'debug value'} } Yields ------ _OUTPUT : text """ value = utils.get_input(context, conf) try: value = int(value) except: value = 0 while True: yield value
python
{ "resource": "" }
q4371
pipe_urlbuilder
train
def pipe_urlbuilder(context=None, _INPUT=None, conf=None, **kwargs): """A url module that builds a url. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items or fields conf : { 'PARAM': [ {'key': {'value': <'order'>}, 'value': {'value': <'desc'>}}, {'key': {'value': <'page'>}, 'value': {'value': <'2'>}} ] 'PATH': {'type': 'text', 'value': <''>}, 'BASE': {'type': 'text', 'value': <'http://site.com/feed.xml'>}, } Yields ------ _OUTPUT : url """ pkwargs = cdicts(opts, kwargs) get_params = get_funcs(conf.get('PARAM', []), **kwargs)[0] get_paths = get_funcs(conf.get('PATH', []), **pkwargs)[0] get_base = get_funcs(conf['BASE'], listize=False, **pkwargs)[0] parse_params = utils.parse_params splits = get_splits(_INPUT, funcs=[get_params, get_paths, get_base]) parsed = utils.dispatch(splits, *get_dispatch_funcs('pass', parse_params)) _OUTPUT = starmap(parse_result, parsed) return _OUTPUT
python
{ "resource": "" }
q4372
pipe_csv
train
def pipe_csv(context=None, _INPUT=None, conf=None, **kwargs): """A source that fetches and parses a csv file to yield items. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items or fields conf : URL -- url skip -- number of header rows to skip col_mode -- column name source: row=header row(s), custom=defined in col_name col_name -- list of custom column names col_row_start -- first column header row col_row_end -- last column header row separator -- column separator Yields ------ _OUTPUT : items Note: Current restrictions: separator must be 1 character assumes every row has exactly the expected number of fields, as defined in the header """ conf = DotDict(conf) conf_sep = conf['separator'] conf_mode = conf['col_mode'] col_name = conf['col_name'] for item in _INPUT: item = DotDict(item) url = utils.get_value(conf['URL'], item, **kwargs) url = utils.get_abspath(url) separator = utils.get_value(conf_sep, item, encode=True, **kwargs) skip = int(utils.get_value(conf['skip'], item, **kwargs)) col_mode = utils.get_value(conf_mode, item, **kwargs) f = urlopen(url) if context and context.verbose: print "pipe_csv loading:", url for i in xrange(skip): f.next() reader = csv.UnicodeReader(f, delimiter=separator) fieldnames = [] if col_mode == 'custom': fieldnames = [DotDict(x).get() for x in col_name] else: fieldnames = _gen_fieldnames(conf, reader, item, **kwargs) for rows in reader: yield dict(zip(fieldnames, rows)) f.close() if item.get('forever'): # _INPUT is pipeforever and not a loop, # so we just yield our item once break
python
{ "resource": "" }
q4373
asyncPipeRename
train
def asyncPipeRename(context=None, _INPUT=None, conf=None, **kwargs): """An operator that asynchronously renames or copies fields in the input source. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : asyncPipe like object (twisted Deferred iterable of items) conf : { 'RULE': [ { 'op': {'value': 'rename or copy'}, 'field': {'value': 'old field'}, 'newval': {'value': 'new field'} } ] } kwargs : other inputs, e.g., to feed terminals for rule values Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of items """ splits = yield asyncGetSplits(_INPUT, conf['RULE'], **cdicts(opts, kwargs)) _OUTPUT = yield maybeDeferred(parse_results, splits, **kwargs) returnValue(_OUTPUT)
python
{ "resource": "" }
q4374
pipe_rename
train
def pipe_rename(context=None, _INPUT=None, conf=None, **kwargs): """An operator that renames or copies fields in the input source. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) conf : { 'RULE': [ { 'op': {'value': 'rename or copy'}, 'field': {'value': 'old field'}, 'newval': {'value': 'new field'} } ] } kwargs : other inputs, e.g., to feed terminals for rule values Returns ------- _OUTPUT : generator of items """ splits = get_splits(_INPUT, conf['RULE'], **cdicts(opts, kwargs)) _OUTPUT = parse_results(splits, **kwargs) return _OUTPUT
python
{ "resource": "" }
q4375
pipe_reverse
train
def pipe_reverse(context=None, _INPUT=None, conf=None, **kwargs): """An operator that reverses the order of source items. Not loopable. Not lazy. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) conf : unused Yields ------ _OUTPUT : items """ for item in reversed(list(_INPUT)): yield item
python
{ "resource": "" }
q4376
pipe_count
train
def pipe_count(context=None, _INPUT=None, conf=None, **kwargs): """An operator that counts the number of _INPUT items and yields it forever. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) conf : not used Yields ------ _OUTPUT : number of items in the feed Examples -------- >>> generator = (x for x in xrange(5)) >>> count = pipe_count(_INPUT=generator) >>> count #doctest: +ELLIPSIS <generator object pipe_count at 0x...> >>> count.next() 5 """ count = len(list(_INPUT)) # todo: check all operators (not placeable in loops) while True: yield count
python
{ "resource": "" }
q4377
asyncPipeSubstr
train
def asyncPipeSubstr(context=None, _INPUT=None, conf=None, **kwargs): """A string module that asynchronously returns a substring. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : twisted Deferred iterable of items or strings conf : { 'from': {'type': 'number', value': <starting position>}, 'length': {'type': 'number', 'value': <count of characters to return>} } returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of substrings """ conf['start'] = conf.pop('from', dict.get(conf, 'start')) splits = yield asyncGetSplits(_INPUT, conf, **cdicts(opts, kwargs)) parsed = yield asyncDispatch(splits, *get_async_dispatch_funcs()) _OUTPUT = yield asyncStarMap(partial(maybeDeferred, parse_result), parsed) returnValue(iter(_OUTPUT))
python
{ "resource": "" }
q4378
pipe_substr
train
def pipe_substr(context=None, _INPUT=None, conf=None, **kwargs): """A string module that returns a substring. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : iterable of items or strings conf : { 'from': {'type': 'number', value': <starting position>}, 'length': {'type': 'number', 'value': <count of characters to return>} } Returns ------- _OUTPUT : generator of substrings """ conf['start'] = conf.pop('from', dict.get(conf, 'start')) splits = get_splits(_INPUT, conf, **cdicts(opts, kwargs)) parsed = utils.dispatch(splits, *get_dispatch_funcs()) _OUTPUT = starmap(parse_result, parsed) return _OUTPUT
python
{ "resource": "" }
q4379
serialize_number
train
def serialize_number(x, fmt=SER_BINARY, outlen=None): """ Serializes `x' to a string of length `outlen' in format `fmt' """ ret = b'' if fmt == SER_BINARY: while x: x, r = divmod(x, 256) ret = six.int2byte(int(r)) + ret if outlen is not None: assert len(ret) <= outlen ret = ret.rjust(outlen, b'\0') return ret assert fmt == SER_COMPACT while x: x, r = divmod(x, len(COMPACT_DIGITS)) ret = COMPACT_DIGITS[r:r + 1] + ret if outlen is not None: assert len(ret) <= outlen ret = ret.rjust(outlen, COMPACT_DIGITS[0:1]) return ret
python
{ "resource": "" }
q4380
deserialize_number
train
def deserialize_number(s, fmt=SER_BINARY): """ Deserializes a number from a string `s' in format `fmt' """ ret = gmpy.mpz(0) if fmt == SER_BINARY: if isinstance(s, six.text_type): raise ValueError( "Encode `s` to a bytestring yourself to" + " prevent problems with different default encodings") for c in s: ret *= 256 ret += byte2int(c) return ret assert fmt == SER_COMPACT if isinstance(s, six.text_type): s = s.encode('ascii') for c in s: ret *= len(COMPACT_DIGITS) ret += R_COMPACT_DIGITS[c] return ret
python
{ "resource": "" }
q4381
mod_issquare
train
def mod_issquare(a, p): """ Returns whether `a' is a square modulo p """ if not a: return True p1 = p // 2 p2 = pow(a, p1, p) return p2 == 1
python
{ "resource": "" }
q4382
mod_root
train
def mod_root(a, p): """ Return a root of `a' modulo p """ if a == 0: return 0 if not mod_issquare(a, p): raise ValueError n = 2 while mod_issquare(n, p): n += 1 q = p - 1 r = 0 while not q.getbit(r): r += 1 q = q >> r y = pow(n, q, p) h = q >> 1 b = pow(a, h, p) x = (a * b) % p b = (b * x) % p while b != 1: h = (b * b) % p m = 1 while h != 1: h = (h * h) % p m += 1 h = gmpy.mpz(0) h = h.setbit(r - m - 1) t = pow(y, h, p) y = (t * t) % p r = m x = (x * t) % p b = (b * y) % p return x
python
{ "resource": "" }
q4383
encrypt
train
def encrypt(s, pk, pk_format=SER_COMPACT, mac_bytes=10, curve=None): """ Encrypts `s' for public key `pk' """ curve = (Curve.by_pk_len(len(pk)) if curve is None else Curve.by_name(curve)) p = curve.pubkey_from_string(pk, pk_format) return p.encrypt(s, mac_bytes)
python
{ "resource": "" }
q4384
decrypt
train
def decrypt(s, passphrase, curve='secp160r1', mac_bytes=10): """ Decrypts `s' with passphrase `passphrase' """ curve = Curve.by_name(curve) privkey = curve.passphrase_to_privkey(passphrase) return privkey.decrypt(s, mac_bytes)
python
{ "resource": "" }
q4385
encrypt_file
train
def encrypt_file(in_path_or_file, out_path_or_file, pk, pk_format=SER_COMPACT, mac_bytes=10, chunk_size=4096, curve=None): """ Encrypts `in_file' to `out_file' for pubkey `pk' """ close_in, close_out = False, False in_file, out_file = in_path_or_file, out_path_or_file try: if stringlike(in_path_or_file): in_file = open(in_path_or_file, 'rb') close_in = True if stringlike(out_path_or_file): out_file = open(out_path_or_file, 'wb') close_out = True _encrypt_file(in_file, out_file, pk, pk_format, mac_bytes, chunk_size, curve) finally: if close_out: out_file.close() if close_in: in_file.close()
python
{ "resource": "" }
q4386
decrypt_file
train
def decrypt_file(in_path_or_file, out_path_or_file, passphrase, curve='secp160r1', mac_bytes=10, chunk_size=4096): """ Decrypts `in_file' to `out_file' with passphrase `passphrase' """ close_in, close_out = False, False in_file, out_file = in_path_or_file, out_path_or_file try: if stringlike(in_path_or_file): in_file = open(in_path_or_file, 'rb') close_in = True if stringlike(out_path_or_file): out_file = open(out_path_or_file, 'wb') close_out = True _decrypt_file(in_file, out_file, passphrase, curve, mac_bytes, chunk_size) finally: if close_out: out_file.close() if close_in: in_file.close()
python
{ "resource": "" }
q4387
verify
train
def verify(s, sig, pk, sig_format=SER_COMPACT, pk_format=SER_COMPACT, curve=None): """ Verifies that `sig' is a signature of pubkey `pk' for the message `s'. """ if isinstance(s, six.text_type): raise ValueError("Encode `s` to a bytestring yourself to" + " prevent problems with different default encodings") curve = (Curve.by_pk_len(len(pk)) if curve is None else Curve.by_name(curve)) p = curve.pubkey_from_string(pk, pk_format) return p.verify(hashlib.sha512(s).digest(), sig, sig_format)
python
{ "resource": "" }
q4388
sign
train
def sign(s, passphrase, sig_format=SER_COMPACT, curve='secp160r1'): """ Signs `s' with passphrase `passphrase' """ if isinstance(s, six.text_type): raise ValueError("Encode `s` to a bytestring yourself to" + " prevent problems with different default encodings") curve = Curve.by_name(curve) privkey = curve.passphrase_to_privkey(passphrase) return privkey.sign(hashlib.sha512(s).digest(), sig_format)
python
{ "resource": "" }
q4389
PubKey.verify
train
def verify(self, h, sig, sig_fmt=SER_BINARY): """ Verifies that `sig' is a signature for a message with SHA-512 hash `h'. """ s = deserialize_number(sig, sig_fmt) return self.p._ECDSA_verify(h, s)
python
{ "resource": "" }
q4390
PubKey.encrypt_to
train
def encrypt_to(self, f, mac_bytes=10): """ Returns a file like object `ef'. Anything written to `ef' will be encrypted for this pubkey and written to `f'. """ ctx = EncryptionContext(f, self.p, mac_bytes) yield ctx ctx.finish()
python
{ "resource": "" }
q4391
PubKey.encrypt
train
def encrypt(self, s, mac_bytes=10): """ Encrypt `s' for this pubkey. """ if isinstance(s, six.text_type): raise ValueError( "Encode `s` to a bytestring yourself to" + " prevent problems with different default encodings") out = BytesIO() with self.encrypt_to(out, mac_bytes) as f: f.write(s) return out.getvalue()
python
{ "resource": "" }
q4392
PrivKey.decrypt_from
train
def decrypt_from(self, f, mac_bytes=10): """ Decrypts a message from f. """ ctx = DecryptionContext(self.curve, f, self, mac_bytes) yield ctx ctx.read()
python
{ "resource": "" }
q4393
PrivKey.sign
train
def sign(self, h, sig_format=SER_BINARY): """ Signs the message with SHA-512 hash `h' with this private key. """ outlen = (self.curve.sig_len_compact if sig_format == SER_COMPACT else self.curve.sig_len_bin) sig = self._ECDSA_sign(h) return serialize_number(sig, sig_format, outlen)
python
{ "resource": "" }
q4394
Curve.hash_to_exponent
train
def hash_to_exponent(self, h): """ Converts a 32 byte hash to an exponent """ ctr = Crypto.Util.Counter.new(128, initial_value=0) cipher = Crypto.Cipher.AES.new(h, Crypto.Cipher.AES.MODE_CTR, counter=ctr) buf = cipher.encrypt(b'\0' * self.order_len_bin) return self._buf_to_exponent(buf)
python
{ "resource": "" }
q4395
run
train
def run(): # pragma: no cover """Defines how to start the CLI for the DomainTools API""" out_file, out_format, arguments = parse() user, key = arguments.pop('user', None), arguments.pop('key', None) if not user or not key: sys.stderr.write('Credentials are required to perform API calls.\n') sys.exit(1) api = API(user, key, https=arguments.pop('https'), verify_ssl=arguments.pop('verify_ssl'), rate_limit=arguments.pop('rate_limit')) response = getattr(api, arguments.pop('api_call'))(**arguments) output = str(getattr(response, out_format) if out_format != 'list' else response.as_list()) out_file.write(output if output.endswith('\n') else output + '\n')
python
{ "resource": "" }
q4396
permission_required
train
def permission_required(perm, *lookup_variables, **kwargs): """ Decorator for views that checks whether a user has a particular permission enabled, redirecting to the log-in page if necessary. """ login_url = kwargs.pop('login_url', settings.LOGIN_URL) redirect_field_name = kwargs.pop('redirect_field_name', REDIRECT_FIELD_NAME) redirect_to_login = kwargs.pop('redirect_to_login', True) def decorate(view_func): def decorated(request, *args, **kwargs): if request.user.is_authenticated(): params = [] for lookup_variable in lookup_variables: if isinstance(lookup_variable, string_types): value = kwargs.get(lookup_variable, None) if value is None: continue params.append(value) elif isinstance(lookup_variable, (tuple, list)): model, lookup, varname = lookup_variable value = kwargs.get(varname, None) if value is None: continue if isinstance(model, string_types): model_class = apps.get_model(*model.split(".")) else: model_class = model if model_class is None: raise ValueError( "The given argument '%s' is not a valid model." % model) if (inspect.isclass(model_class) and not issubclass(model_class, Model)): raise ValueError( 'The argument %s needs to be a model.' % model) obj = get_object_or_404(model_class, **{lookup: value}) params.append(obj) check = get_check(request.user, perm) granted = False if check is not None: granted = check(*params) if granted or request.user.has_perm(perm): return view_func(request, *args, **kwargs) if redirect_to_login: path = urlquote(request.get_full_path()) tup = login_url, redirect_field_name, path return HttpResponseRedirect('%s?%s=%s' % tup) return permission_denied(request) return wraps(view_func)(decorated) return decorate
python
{ "resource": "" }
q4397
get_permissions
train
def get_permissions(parser, token): """ Retrieves all permissions associated with the given obj and user and assigns the result to a context variable. Syntax:: {% get_permissions obj %} {% for perm in permissions %} {{ perm }} {% endfor %} {% get_permissions obj as "my_permissions" %} {% get_permissions obj for request.user as "my_permissions" %} """ return PermissionsForObjectNode.handle_token(parser, token, approved=True, name='"permissions"')
python
{ "resource": "" }
q4398
get_permission_requests
train
def get_permission_requests(parser, token): """ Retrieves all permissions requests associated with the given obj and user and assigns the result to a context variable. Syntax:: {% get_permission_requests obj %} {% for perm in permissions %} {{ perm }} {% endfor %} {% get_permission_requests obj as "my_permissions" %} {% get_permission_requests obj for request.user as "my_permissions" %} """ return PermissionsForObjectNode.handle_token(parser, token, approved=False, name='"permission_requests"')
python
{ "resource": "" }
q4399
get_permission
train
def get_permission(parser, token): """ Performs a permission check with the given signature, user and objects and assigns the result to a context variable. Syntax:: {% get_permission PERMISSION_LABEL.CHECK_NAME for USER and *OBJS [as VARNAME] %} {% get_permission "poll_permission.change_poll" for request.user and poll as "is_allowed" %} {% get_permission "poll_permission.change_poll" for request.user and poll,second_poll as "is_allowed" %} {% if is_allowed %} I've got ze power to change ze pollllllzzz. Muahahaa. {% else %} Meh. No power for meeeee. {% endif %} """ return PermissionForObjectNode.handle_token(parser, token, approved=True, name='"permission"')
python
{ "resource": "" }