prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>q2.py<|end_file_name|><|fim▁begin|># Experiment with boundary for collections L = [0, 1, 2, 3] print('-------- Part A --------') # Index beyond, generates a IndexError exception try: L[4] # part (a) of question except IndexError as err: print('IndexError Exception', err) print('-------- Part B --------') # Slice out of bounds sliced = L[-10:10] print(sliced) print('slicing out of bounds results in a new list equal in value to original') print('if slices includes indices of original)') print('-------- Part C --------') # part(c), reverse slicing sliced = L[3:1] print(sliced) # this is not same effect as out of bound slicing, # results in a empty list above as start is greater than the end with a positive stride # to actually reserve the values in a new list need to specify a negative stride sliced = L[3:1:-1]<|fim▁hole|><|fim▁end|>
print(sliced)
<|file_name|>example_02.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python #/* # * example_02.c: sample gnome-print code # * # * This program is free software; you can redistribute it and/or # * modify it under the terms of the GNU Library General Public License # * as published by the Free Software Foundation; either version 2 of # * the License, or (at your option) any later version. # * # * This program is distributed in the hope that it will be useful, # * but WITHOUT ANY WARRANTY; without even the implied warranty of # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # * GNU Library General Public License for more details. # * # * You should have received a copy of the GNU Library General Public # * License along with this program; if not, write to the Free Software # * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. # * # * Authors: # * Chema Celorio <[email protected]> # Python conversion: # Gustavo J. A. M. Carneiro <[email protected]> # * # * Copyright (C) 2002 Ximian Inc. and authors # * # */ #/* # * See README # */ import pygtk; pygtk.require("2.0") import gnomeprint import gtk.gdk import struct NUMBER_OF_PIXELS=256 def my_print_image_from_pixbuf(gpc, pixbuf): raw_image = pixbuf.get_pixels() has_alpha = pixbuf.get_has_alpha() rowstride = pixbuf.get_rowstride() height = pixbuf.get_height() width = pixbuf.get_width() if has_alpha: gpc.rgbaimage(raw_image, width, height, rowstride) else: gpc.rgbimage(raw_image, width, height, rowstride) def my_print_image_from_disk(gpc): # Load the image into a pixbuf pixbuf = gtk.gdk.pixbuf_new_from_file("sample-image.png") # Save the graphic context, scale, print the image and restore gpc.gsave() gpc.scale(144, 144) my_print_image_from_pixbuf(gpc, pixbuf) gpc.grestore() def my_print_image_from_memory(gpc): pixels = NUMBER_OF_PIXELS; # Create the image in memory color_image = [] for y in xrange(pixels): for x in xrange(pixels): color_image.append(struct.pack("BBB", (x + y) >> 1, (x + (pixels - 1 - y)) >> 1, ((pixels - 1 - x) + y) >> 1)) # All images in postscript are printed on a 1 x 1 square, since we # want an image which has a size of 2" by 2" inches, we have to scale # the CTM (Current Transformation Matrix). Save the graphic state and # restore it after we are done so that our scaling does not affect the # drawing calls that follow. gpc.gsave() gpc.scale(144, 144) gpc.rgbimage("".join(color_image), pixels, pixels, pixels * 3) gpc.grestore() def my_draw(gpc): gpc.beginpage("1") gpc.translate(200, 100) my_print_image_from_memory(gpc) gpc.translate(0, 150) my_print_image_from_disk(gpc)<|fim▁hole|>def my_print(): job = gnomeprint.Job(gnomeprint.config_default()) gpc = job.get_context() my_draw(gpc) job.close() job.print_() my_print() print "Done..."<|fim▁end|>
gpc.showpage()
<|file_name|>preferences.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use crate::commands::Result; use crate::models::application::{Application, Preferences}; use crate::util; pub fn edit(app: &mut Application) -> Result { let preference_buffer = Preferences::edit()?; util::add_buffer(preference_buffer, app) } pub fn reload(app: &mut Application) -> Result { app.preferences.borrow_mut().reload() }<|fim▁end|>
<|file_name|>fileview.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- mode: python; encoding: utf-8 -*- # """This plugin renders the filesystem in a tree and a table.""" import cgi import os import random import socket from django import http from M2Crypto import X509 from grr.gui import renderers from grr.gui.plugins import fileview_widgets from grr.gui.plugins import forms from grr.gui.plugins import semantic from grr.lib import aff4 from grr.lib import config_lib from grr.lib import flow from grr.lib import rdfvalue from grr.lib import utils from grr.lib.flows.general import export class BufferReferenceRenderer(semantic.RDFProtoRenderer): """Render the buffer reference.""" classname = "BufferReference" name = "Buffer Reference" def Hexify(self, _, data): """Render a hexdump of the data.""" results = [] idx = 0 while idx < len(data): raw = "" result = "" for _ in range(16): ord_value = ord(data[idx]) result += "%02X " % ord_value if ord_value > 32 and ord_value < 127: raw += cgi.escape(data[idx]) else: raw += "." idx += 1 if idx >= len(data): break results.append(result + " " * (16 * 3 - len(result)) + raw) return "<pre>%s</pre>" % "\n".join(results) translator = dict(data=Hexify) class StatModeRenderer(semantic.RDFValueRenderer): """Renders stat mode fields.""" classname = "StatMode" layout_template = renderers.Template(""" <abbr title="Mode {{this.oct}}">{{this.mode_string|escape}}</abbr>""") def Layout(self, request, response): self.oct = oct(int(self.proxy)) self.mode_string = unicode(self.proxy) return super(StatModeRenderer, self).Layout(request, response) class StatEntryRenderer(semantic.RDFProtoRenderer): """Nicely format the StatEntry rdfvalue.""" classname = "StatEntry" name = "Stat Entry" def TranslateRegistryData(self, request, registry_data): if registry_data.HasField("data"): ret = repr(registry_data.GetValue()) else: ret = utils.SmartStr(registry_data.GetValue()) # This is not escaped by the template! return renderers.EscapingRenderer(ret).RawHTML(request) translator = dict(registry_data=TranslateRegistryData) class GrrMessageRenderer(semantic.RDFProtoRenderer): """Nicely format the GrrMessage rdfvalue.""" classname = "GrrMessage" name = "GrrMessage" def RenderPayload(self, request, unused_value): rdf_object = self.proxy.payload return semantic.FindRendererForObject(rdf_object).RawHTML(request) translator = dict(args=RenderPayload) class VolumeRenderer(semantic.RDFProtoRenderer): """Make the disk volume values human readable.""" classname = "Volume" name = "Disk Volume" def Layout(self, request, response): """Render the protobuf as a table.""" self.result = [] for descriptor, value in self.proxy.ListSetFields(): name = descriptor.name friendly_name = descriptor.friendly_name or name if name == "total_allocation_units" and value is not None: value_str = "{0} ({1:.2f} GB)".format( value, self.proxy.AUToGBytes(value)) self.result.append((friendly_name, descriptor.description, value_str)) elif name == "actual_available_allocation_units" and value is not None: value_str = "{0} ({1:.2f} GB, {2:.0f}% free)".format( value, self.proxy.AUToGBytes(value), self.proxy.FreeSpacePercent()) self.result.append((friendly_name, descriptor.description, value_str)) else: renderer = semantic.FindRendererForObject(value) self.result.append((friendly_name, descriptor.description, renderer.RawHTML(request))) return super(semantic.RDFProtoRenderer, self).Layout(request, response) class CollectionRenderer(StatEntryRenderer): """Nicely format a Collection.""" classname = "CollectionList" name = "Collection Listing" layout_template = renderers.Template(""" <table class='proto_table'> <thead> <tr><th>Mode</th><th>Name</th><th>Size</th><th>Modified</th></tr> </thead> <tbody> {% for row in this.result %} <tr> {% for value in row %} <td class="proto_value"> {{value|safe}} </td> {% endfor %} </tr> {% endfor %} </tbody> </table> """) def Layout(self, request, response): """Render collections as a table.""" self.result = [] fields = "st_mode pathspec st_size st_mtime".split() items = self.proxy.items for item in items: row = [] for name in fields: value = getattr(item, name) try: value = self.translator[name](self, request, value) # Regardless of what the error is, we need to escape the value. except StandardError: # pylint: disable=broad-except value = self.FormatFromTemplate(self.translator_error_template, value=value) row.append(value) self.result.append(row) return renderers.TemplateRenderer.Layout(self, request, response) class GrepResultRenderer(semantic.RDFProtoRenderer): """Nicely format grep results.""" classname = "GrepResultList" name = "Grep Result Listing" layout_template = renderers.Template(""" <table class='proto_table'> <thead> <tr><th>Offset</th><th>Data</th></tr> </thead> <tbody> {% for row in this.results %} <tr> {% for value in row %} <td class="proto_value"> {{value|escape}} </td> {% endfor %} </tr> {% endfor %} </tbody> </table> """) def Layout(self, request, response): self.results = [] for row in self.proxy: self.results.append([row.offset, repr(row)]) return renderers.TemplateRenderer.Layout(self, request, response) class UsersRenderer(semantic.RDFValueArrayRenderer): classname = "Users" name = "Users" class NetworkAddressRenderer(semantic.RDFValueRenderer): classname = "NetworkAddress" name = "Network Address" layout_template = renderers.Template("{{result|escape}}") def Layout(self, request, response): _ = request, response return self.RenderFromTemplate(self.layout_template, response, result=self.proxy.human_readable_address) class InterfaceRenderer(semantic.RDFProtoRenderer): """Render a machine's interfaces.""" classname = "Interface" name = "Interface Record" def TranslateIp4Addresses(self, _, value): return " ".join([socket.inet_ntop(socket.AF_INET, x) for x in value]) def TranslateMacAddress(self, _, value): return value.human_readable_address def TranslateIp6Addresses(self, _, value): return " ".join([socket.inet_ntop(socket.AF_INET6, x) for x in value]) translator = dict(ip4_addresses=TranslateIp4Addresses, ip6_addresses=TranslateIp6Addresses, mac_address=TranslateMacAddress) class StringListRenderer(renderers.TemplateRenderer): """Renders a list of strings as a proto table.""" layout_template = renderers.Template(""" <table class='proto_table'> <tbody> {% for string in this.strings %} <tr><td> {{string|escape}} </td></tr> {% endfor %} </tbody> </table> """) def __init__(self, strings, **kwargs): self.strings = strings super(StringListRenderer, self).__init__(**kwargs) class ConnectionsRenderer(semantic.RDFValueArrayRenderer): """Renders connection listings.""" classname = "Connections" name = "Connection Listing" # The contents of result are safe since they were already escaped in # connection_template. layout_template = renderers.Template(""" <table class='proto_table'> <tbody> {% for connection in result %} <tr> {{connection|safe}} </tr> {% endfor %} </tbody> </table> """) connection_template = renderers.Template( """ <td>{{type|escape}}</td> <td>{{local_address|escape}}</td> <td>{{remote_address|escape}}</td> <td>{{state|escape}}</td> <td>{{pid|escape}}</td> """) types = { (2, 1): "tcp", (10, 1): "tcp6",<|fim▁hole|> (30, 1): "tcp6", (2, 2): "udp", (10, 2): "udp6", (23, 2): "udp6", (30, 2): "udp6", } def Layout(self, request, response): """Render the connection as a table.""" _ = request result = [] for conn in self.proxy: try: conn_type = self.types[(conn.family, conn.type)] except KeyError: conn_type = "(%d,%d)" % (conn.family, conn.type) local_address = "%s:%d" % (conn.local_address.ip, conn.local_address.port) if conn.remote_address.ip: remote_address = "%s:%d" % (conn.remote_address.ip, conn.remote_address.port) else: if ":" in conn.local_address.ip: remote_address = ":::*" else: remote_address = "0.0.0.0:*" result.append(self.FormatFromTemplate(self.connection_template, type=conn_type, local_address=local_address, remote_address=remote_address, state=utils.SmartStr(conn.state), pid=conn.pid)) return self.RenderFromTemplate(self.layout_template, response, result=sorted(result)) class NetworkConnections(ConnectionsRenderer): """Handle repeated NetworkConnection fields in protobufs.""" classname = "NetworkConnection" class ProcessRenderer(semantic.RDFValueArrayRenderer): """Renders process listings.""" classname = "Processes" name = "Process Listing" def RenderFiles(self, request, file_list): return StringListRenderer(sorted(file_list)).RawHTML(request) translator = dict(open_files=RenderFiles) class FilesystemRenderer(semantic.RDFValueArrayRenderer): classname = "FileSystem" name = "FileSystems" class CertificateRenderer(semantic.RDFValueRenderer): """Render X509 Certs properly.""" classname = "RDFX509Cert" name = "X509 Certificate" # Implement hide/show behaviour for certificates as they tend to be long and # uninteresting. layout_template = renderers.Template(""" <div class='certificate_viewer' id='certificate_viewer_{{unique|escape}}'> <ins class='fg-button ui-icon ui-icon-minus'/> Click to show details. <div class='contents'> <pre> {{ this.cert|escape }} </pre> </div> </div> """) def Layout(self, request, response): # Present the certificate as text self.cert = X509.load_cert_string(str(self.proxy)).as_text() response = super(CertificateRenderer, self).RenderAjax(request, response) return self.CallJavascript(response, "CertificateRenderer.Layout") class BlobArrayRenderer(semantic.RDFValueRenderer): """Render a blob array.""" classname = "BlobArray" name = "Array" layout_template = renderers.Template(""" {% for i in first %} {{i|escape}} {% endfor %} {% for i in array %} , {{i|escape}} {% endfor %} """) def Layout(self, _, response): array = [] for i in self.proxy: for field in ["integer", "string", "data", "boolean"]: if i.HasField(field): array.append(getattr(i, field)) break return self.RenderFromTemplate(self.layout_template, response, first=array[0:1], array=array[1:]) class AgeSelector(semantic.RDFValueRenderer): """Allows the user to select a different version for viewing objects.""" layout_template = renderers.Template(""" <img src=static/images/window-duplicate.png class='grr-icon version-selector'> <span age='{{this.int}}'><nobr>{{this.proxy|escape}}</nobr></span> """) def Layout(self, request, response): self.int = int(self.proxy or 0) return super(AgeSelector, self).Layout(request, response) class AgeRenderer(AgeSelector): classname = "RDFDatetime" layout_template = renderers.Template(""" <span age='{{this.int}}'><nobr>{{this.proxy|escape}}</nobr></span> """) class AbstractFileTable(renderers.TableRenderer): """A table that displays the content of a directory. Listening Javascript Events: - tree_select(aff4_path) - A selection event on the tree informing us of the tree path. We re-layout the entire table on this event to show the directory listing of aff4_path. Generated Javascript Events: - file_select(aff4_path, age) - The full AFF4 path for the file in the directory which is selected. Age is the latest age we wish to see. Internal State: - client_id. """ layout_template = (renderers.TableRenderer.layout_template + """ <div id="version_selector_dialog_{{unique|escape}}" class="version-selector-dialog modal wide-modal high-modal"></div> """) toolbar = None # Toolbar class to render above table. content_cache = None post_parameters = ["aff4_path"] root_path = "/" # Paths will all be under this path. # This can restrict the view to only certain types of objects. It should be a # list of types to show. visible_types = None def __init__(self, **kwargs): super(AbstractFileTable, self).__init__(**kwargs) if AbstractFileTable.content_cache is None: AbstractFileTable.content_cache = utils.TimeBasedCache() def RenderAjax(self, request, response): response = super(AbstractFileTable, self).RenderAjax(request, response) return self.CallJavascript(response, "AbstractFileTable.RenderAjax") def Layout(self, request, response): """Populate the table state with the request.""" # Draw the toolbar first if self.toolbar: tb_cls = renderers.Renderer.classes[self.toolbar] tb_cls().Layout(request, response) response = super(AbstractFileTable, self).Layout(request, response) return self.CallJavascript(response, "AbstractFileTable.Layout", renderer=self.__class__.__name__, client_id=self.state.get("client_id", "")) def BuildTable(self, start_row, end_row, request): """Populate the table.""" # Default sort direction sort = request.REQ.get("sort", "Name:asc") try: reverse_sort = sort.split(":")[1] == "desc" except IndexError: reverse_sort = False filter_term = request.REQ.get("filter") aff4_path = request.REQ.get("aff4_path", self.root_path) urn = rdfvalue.RDFURN(aff4_path) filter_string = None if filter_term: column, regex = filter_term.split(":", 1) escaped_regex = utils.EscapeRegex(aff4_path + "/") # The start anchor refers only to this directory. if regex.startswith("^"): escaped_regex += utils.EscapeRegex(regex[1:]) else: escaped_regex += ".*" + utils.EscapeRegex(regex) filter_string = "subject matches '%s'" % escaped_regex # For now we just list the directory try: key = utils.SmartUnicode(urn) if filter_string: key += ":" + filter_string # Open the directory as a directory. directory_node = aff4.FACTORY.Open(urn, token=request.token).Upgrade( "VFSDirectory") if not directory_node: raise IOError() key += str(directory_node.Get(directory_node.Schema.LAST)) key += ":" + str(request.token) try: children = self.content_cache.Get(key) except KeyError: # Only show the direct children. children = sorted(directory_node.Query(filter_string=filter_string, limit=100000)) # Filter the children according to types. if self.visible_types: children = [x for x in children if x.__class__.__name__ in self.visible_types] self.content_cache.Put(key, children) try: self.message = "Directory Listing '%s' was taken on %s" % ( aff4_path, directory_node.Get(directory_node.Schema.TYPE.age)) except AttributeError: pass except IOError: children = [] children.sort(reverse=reverse_sort) row_index = start_row # Make sure the table knows how large it is for paging. self.size = len(children) self.columns[1].base_path = urn for fd in children[start_row:end_row]: # We use the timestamp on the TYPE as a proxy for the last update time # of this object - its only an estimate. fd_type = fd.Get(fd.Schema.TYPE) if fd_type: self.AddCell(row_index, "Age", rdfvalue.RDFDatetime(fd_type.age)) self.AddCell(row_index, "Name", fd.urn) # Add the fd to all the columns for column in self.columns: # This sets AttributeColumns directly from their fd. if isinstance(column, semantic.AttributeColumn): column.AddRowFromFd(row_index, fd) if "Container" in fd.behaviours: self.AddCell(row_index, "Icon", dict(icon="directory", description="Directory")) else: self.AddCell(row_index, "Icon", dict(icon="file", description="File Like Object")) row_index += 1 if row_index > end_row: return class FileTable(AbstractFileTable): """A table that displays the content of a directory. Listening Javascript Events: - tree_select(aff4_path) - A selection event on the tree informing us of the tree path. We re-layout the entire table on this event to show the directory listing of aff4_path. Generated Javascript Events: - file_select(aff4_path, age) - The full AFF4 path for the file in the directory which is selected. Age is the latest age we wish to see. Internal State: - client_id. """ root_path = None # The root will be dynamically set to the client path. toolbar = "Toolbar" context_help_url = "user_manual.html#_listing_the_virtual_filesystem" def __init__(self, **kwargs): super(FileTable, self).__init__(**kwargs) self.AddColumn(semantic.RDFValueColumn( "Icon", renderer=semantic.IconRenderer, width="40px")) self.AddColumn(semantic.RDFValueColumn( "Name", renderer=semantic.SubjectRenderer, sortable=True, width="20%")) self.AddColumn(semantic.AttributeColumn("type", width="10%")) self.AddColumn(semantic.AttributeColumn("size", width="10%")) self.AddColumn(semantic.AttributeColumn("stat.st_size", width="15%")) self.AddColumn(semantic.AttributeColumn("stat.st_mtime", width="15%")) self.AddColumn(semantic.AttributeColumn("stat.st_ctime", width="15%")) self.AddColumn(semantic.RDFValueColumn( "Age", renderer=AgeSelector, width="15%")) def Layout(self, request, response): """Populate the table state with the request.""" self.state["client_id"] = client_id = request.REQ.get("client_id") self.root_path = client_id return super(FileTable, self).Layout(request, response) def BuildTable(self, start_row, end_row, request): client_id = request.REQ.get("client_id") self.root_path = client_id return super(FileTable, self).BuildTable(start_row, end_row, request) class FileSystemTree(renderers.TreeRenderer): """A FileSystem navigation Tree. Generated Javascript Events: - tree_select(aff4_path) - The full aff4 path for the branch which the user selected. Internal State: - client_id: The client this tree is showing. - aff4_root: The aff4 node which forms the root of this tree. """ # Flows are special children which confuse users when seen, so we remove them # from the tree. Note that they are still visible in the table. hidden_branches = ["/flows"] def Layout(self, request, response): self.state["client_id"] = client_id = request.REQ.get("client_id") self.state["aff4_root"] = request.REQ.get("aff4_root", client_id) response = super(FileSystemTree, self).Layout(request, response) return self.CallJavascript(response, "FileSystemTree.Layout") def RenderBranch(self, path, request): """Renders tree leafs for filesystem path.""" client_id = request.REQ["client_id"] aff4_root = rdfvalue.RDFURN(request.REQ.get("aff4_root", client_id)) # Path is relative to the aff4 root specified. urn = aff4_root.Add(path) try: # Open the client directory = aff4.FACTORY.Open(urn, token=request.token).Upgrade( "VFSDirectory") children = [ch for ch in directory.OpenChildren(limit=100000) if "Container" in ch.behaviours] try: self.message = "Directory %s Last retrieved %s" % ( urn, directory.Get(directory.Schema.TYPE).age) except AttributeError: pass for child in sorted(children): self.AddElement(child.urn.RelativeName(urn)) except IOError as e: self.message = "Error fetching %s: %s" % (urn, e) class RecursiveRefreshDialog(renderers.ConfirmationDialogRenderer): """Dialog that allows user to recursively update directories.""" post_parameters = ["aff4_path"] header = "Recursive Refresh" proceed_button_title = "Refresh!" content_template = renderers.Template(""" {{this.recursive_refresh_form|safe}} """) ajax_template = renderers.Template(""" <p class="text-info">Refresh started successfully!</p> """) def Layout(self, request, response): args = rdfvalue.RecursiveListDirectoryArgs() self.recursive_refresh_form = forms.SemanticProtoFormRenderer( args, supressions=["pathspec"]).RawHTML(request) return super(RecursiveRefreshDialog, self).Layout(request, response) def RenderAjax(self, request, response): aff4_path = rdfvalue.RDFURN(request.REQ.get("aff4_path")) args = forms.SemanticProtoFormRenderer( rdfvalue.RecursiveListDirectoryArgs()).ParseArgs(request) fd = aff4.FACTORY.Open(aff4_path, aff4_type="AFF4Volume", token=request.token) args.pathspec = fd.real_pathspec flow.GRRFlow.StartFlow(client_id=aff4_path.Split()[0], flow_name="RecursiveListDirectory", args=args, notify_to_user=True, token=request.token) return self.RenderFromTemplate(self.ajax_template, response) class Toolbar(renderers.TemplateRenderer): """A navigation enhancing toolbar. Listening Javascript Events: - AttributeUpdated(aff4_path, attribute): This event is fired then the aff4_path has updated. If the content of this event have changed, we emit the tree_select and file_select events to force the table to redraw. Generated Javascript Events: - file_select(aff4_path), tree_select(aff4_path) are fired when the buttons are clicked. Internal State: - aff4_path: The path we are viewing now in the table. """ layout_template = renderers.Template(""" <div class="navbar navbar-default"> <div class="navbar-inner"> <div class="navbar-form pull-right"> <button class="btn btn-default" id='refresh_{{unique|escape}}' name="Refresh" title='Refresh this directory listing.'> <img src='/static/images/stock_refresh.png' class="toolbar_icon" /> </button> <button class="btn btn-default" id='recursive_refresh_{{unique|escape}}' title='Refresh this directory listing.' style='position: relative' name="RecursiveRefresh" data-toggle="modal" data-target="#recursive_refresh_dialog_{{unique|escape}}"> <img src='/static/images/stock_refresh.png' class="toolbar_icon" /> <span style='position: absolute; left: 23px; top: 5px; font-weight: bold; font-size: 18px; -webkit-text-stroke: 1px #000; color: #fff'>R</span> </button> <button class="btn btn-default" id='rweowned' title='Is this machine pwned?'> <img src='/static/images/stock_dialog_question.png' class="toolbar_icon" /> </button> </div> <ul class="breadcrumb"> {% for path, fullpath, fullpath_id, i, last in this.paths %} <li {% if forloop.last %}class="active"{% endif %}> {% if forloop.last %} {{path|escape}} {% else %} <a id="path_{{i|escape}}">{{path|escape}}</a> {% endif %} </li> {% endfor %} <div class="clearfix"></div> </ul> </div> </div> <div id="refresh_action" class="hide"></div> <div id="rweowned_dialog" class="modal"></div> <div id="recursive_refresh_dialog_{{unique|escape}}" class="modal" tabindex="-1" role="dialog" aria-hidden="true"> </div> """) def Layout(self, request, response): """Render the toolbar.""" self.state["client_id"] = client_id = request.REQ.get("client_id") self.state["aff4_path"] = aff4_path = request.REQ.get( "aff4_path", client_id) client_urn = rdfvalue.ClientURN(client_id) self.paths = [("/", client_urn, "_", 0)] for path in rdfvalue.RDFURN(aff4_path).Split()[1:]: previous = self.paths[-1] fullpath = previous[1].Add(path) self.paths.append((path, fullpath, renderers.DeriveIDFromPath( fullpath.RelativeName(client_urn)), previous[3] + 1)) response = super(Toolbar, self).Layout(request, response) return self.CallJavascript(response, "Toolbar.Layout", aff4_path=utils.SmartUnicode(aff4_path), paths=self.paths) class UpdateAttribute(renderers.TemplateRenderer): """Reloads a directory listing from client. The renderer will launch the flow in the layout method, and then call its render method every few seconds to check if the flow is complete. Post Parameters: - aff4_path: The aff4 path to update the attribute for. - aff4_type: If provided, the aff4 object will be upgraded to this type before updating. - attribute: The attribute name to update. Generated Javascript Events: - AttributeUpdated(aff4_path, attribute) - When the flow is complete we emit this event. """ # Number of ms to wait poll_time = 1000 def ParseRequest(self, request): """Parses parameters from the request.""" self.aff4_path = request.REQ.get("aff4_path") self.flow_urn = request.REQ.get("flow_urn") # Refresh the contains attribute self.attribute_to_refresh = request.REQ.get("attribute", "CONTAINS") def Layout(self, request, response): """Render the toolbar.""" self.ParseRequest(request) try: client_id = rdfvalue.RDFURN(self.aff4_path).Split(2)[0] update_flow_urn = flow.GRRFlow.StartFlow( client_id=client_id, flow_name="UpdateVFSFile", token=request.token, vfs_file_urn=rdfvalue.RDFURN(self.aff4_path), attribute=self.attribute_to_refresh) update_flow = aff4.FACTORY.Open( update_flow_urn, aff4_type="UpdateVFSFile", token=request.token) self.flow_urn = str(update_flow.state.get_file_flow_urn) except IOError as e: raise IOError("Sorry. This path cannot be refreshed due to %s" % e) if self.flow_urn: response = super(UpdateAttribute, self).Layout(request, response) return self.CallJavascript(response, "UpdateAttribute.Layout", aff4_path=self.aff4_path, flow_urn=self.flow_urn, attribute_to_refresh=self.attribute_to_refresh, poll_time=self.poll_time) def RenderAjax(self, request, response): """Continue polling as long as the flow is in flight.""" super(UpdateAttribute, self).RenderAjax(request, response) self.ParseRequest(request) # Check if the flow is still in flight. try: flow_obj = aff4.FACTORY.Open(self.flow_urn, token=request.token) complete = not flow_obj.GetRunner().IsRunning() except IOError: # Something went wrong, stop polling. complete = True if complete: return renderers.JsonResponse("1") class AFF4ReaderMixin(object): """A helper which reads a buffer from an AFF4 object. This is meant to be mixed in with the HexView and TextView renderers. """ def ReadBuffer(self, request, offset, length): """Renders the HexTable.""" # Allow derived classes to just set the urn directly self.aff4_path = request.REQ.get("aff4_path") self.age = request.REQ.get("age") if not self.aff4_path: return try: fd = aff4.FACTORY.Open(self.aff4_path, token=request.token, age=rdfvalue.RDFDatetime(self.age)) self.total_size = int(fd.Get(fd.Schema.SIZE)) except (IOError, TypeError, AttributeError): self.total_size = 0 return "" fd.Seek(offset) return fd.Read(length) class FileHexViewer(AFF4ReaderMixin, fileview_widgets.HexView): """A HexView renderer.""" class FileTextViewer(AFF4ReaderMixin, fileview_widgets.TextView): """A TextView renderer.""" class VirtualFileSystemView(renderers.Splitter): """This is the main view to browse files.""" behaviours = frozenset(["Host"]) order = 10 description = "Browse Virtual Filesystem" left_renderer = "FileSystemTree" top_right_renderer = "FileTable" bottom_right_renderer = "AFF4ObjectRenderer" class DownloadView(renderers.TemplateRenderer): """Renders a download page.""" # We allow a longer execution time here to be able to download large files. max_execution_time = 60 * 15 layout_template = renderers.Template(""" <h3>{{ this.path|escape }}</h3> <div id="{{ unique|escape }}_action" class="hide"></div> {% if this.hash %} Hash was {{ this.hash|escape }}. {% endif %} {% if this.file_exists %} As downloaded on {{ this.age|escape }}.<br> <p> <button id="{{ unique|escape }}_2" class="btn btn-default"> Download ({{this.size|escape}} bytes) </button> </p> <p>or download using command line export tool:</p> <pre> {{ this.export_command_str|escape }} </pre> <hr/> {% endif %} <button id="{{ unique|escape }}" class="btn btn-default"> Get a new Version </button> </div> """) error_template = renderers.Template(""" <div class="alert alert-danger alert-block"> <h4>Error!</h4> {{this.path|escape}} does not appear to be a file object. <p><em>{{this.error_message|escape}}</em></p> </div> """) bad_extensions = [".bat", ".cmd", ".exe", ".com", ".pif", ".py", ".pl", ".scr", ".vbs"] def Layout(self, request, response): """Present a download form.""" self.age = rdfvalue.RDFDatetime(request.REQ.get("age")) client_id = request.REQ.get("client_id") aff4_path = request.REQ.get("aff4_path", client_id) try: fd = aff4.FACTORY.Open(aff4_path, token=request.token, age=self.age) self.path = fd.urn self.hash = fd.Get(fd.Schema.HASH, None) self.size = fd.Get(fd.Schema.SIZE) # If data is available to read - we present the download button. self.file_exists = False try: if fd.Read(1): self.file_exists = True except (IOError, AttributeError): pass self.export_command_str = u" ".join([ config_lib.CONFIG["AdminUI.export_command"], "--username", utils.ShellQuote(request.token.username), "--reason", utils.ShellQuote(request.token.reason), "file", "--path", utils.ShellQuote(aff4_path), "--output", "."]) response = super(DownloadView, self).Layout(request, response) return self.CallJavascript(response, "DownloadView.Layout", aff4_path=aff4_path, client_id=client_id, age_int=int(self.age), file_exists=self.file_exists, renderer=self.__class__.__name__, reason=request.token.reason) except (AttributeError, IOError) as e: # Render the error template instead. self.error_message = e.message return renderers.TemplateRenderer.Layout(self, request, response, self.error_template) def Download(self, request, _): """Stream the file into the browser.""" # Open the client client_id = request.REQ.get("client_id") self.aff4_path = request.REQ.get("aff4_path", client_id) self.age = rdfvalue.RDFDatetime(request.REQ.get("age")) or aff4.NEWEST_TIME self.token = request.token # If set, we don't append .noexec to dangerous extensions. safe_extension = bool(request.REQ.get("safe_extension", 0)) if self.aff4_path: def Generator(): fd = aff4.FACTORY.Open(self.aff4_path, token=request.token, age=self.age) while True: data = fd.Read(1000000) if not data: break yield data filename = os.path.basename(utils.SmartStr(self.aff4_path)) if not safe_extension: for ext in self.bad_extensions: if filename.lower().endswith(ext): filename += ".noexec" response = http.HttpResponse(content=Generator(), content_type="binary/octet-stream") # This must be a string. response["Content-Disposition"] = ("attachment; filename=%s" % filename) return response class UploadView(renderers.TemplateRenderer): """Renders an upload page.""" post_parameters = ["tree_path"] upload_handler = "UploadHandler" layout_template = renderers.Template(""" {% if grr.state.tree_path %} <h3>Upload to {{ grr.state.tree_path|escape }}</h3> {% endif %} <form id="{{unique|escape}}_form" enctype="multipart/form-data"> <input class="btn btn-default btn-file" id="{{ unique|escape }}_file" type="file" name="uploadFile" /> </form> <button class="btn btn-default" id="{{ unique|escape }}_upload_button"> Upload </button> <br/><br/> <div id="{{ unique|escape }}_upload_results"/> <div id="{{ unique|escape }}_upload_progress"/> """) def Layout(self, request, response): response = super(UploadView, self).Layout(request, response) return self.CallJavascript(response, "UploadView.Layout", upload_handler=self.upload_handler, upload_state=self.state) class UploadHandler(renderers.TemplateRenderer): """Handles an uploaded file.""" # We allow a longer execution time here to be able to upload large files. max_execution_time = 60 * 2 storage_path = "aff4:/config" error_template = renderers.Template(""" Error: {{this.error|escape}}. """) success_template = renderers.Template(""" Success: File uploaded to {{this.dest_path|escape}}. """) def RenderAjax(self, request, response): """Store the file on the server.""" super(UploadHandler, self).RenderAjax(request, response) try: self.uploaded_file = request.FILES.items()[0][1] self.dest_path, aff4_type = self.GetFilePath(request) self.ValidateFile() dest_file = aff4.FACTORY.Create(self.dest_path, aff4_type=aff4_type, token=request.token) for chunk in self.uploaded_file.chunks(): dest_file.Write(chunk) dest_file.Close() return super(UploadHandler, self).Layout(request, response, self.success_template) except (IOError, IndexError) as e: self.error = e return super(UploadHandler, self).Layout(request, response, self.error_template) def GetFilePath(self, unused_request): """Get the path to write the file to and aff4 type as a tuple.""" path = rdfvalue.RDFURN(self.storage_path).Add(self.uploaded_file.name) return path, "VFSFile" def ValidateFile(self): """Check if a file matches what we expected to be uploaded. Raises: IOError: On validation failure. """ if self.uploaded_file.size < 100: raise IOError("File is too small.") class AFF4Stats(renderers.TemplateRenderer): """Show stats about the currently selected AFF4 object. Post Parameters: - aff4_path: The aff4 path to update the attribute for. - age: The version of the AFF4 object to display. """ # This renderer applies to this AFF4 type name = "Stats" css_class = "" historical_renderer = "HistoricalView" # If specified, only these attributes will be shown. attributes_to_show = None layout_template = renderers.Template(""" <div class="container-fluid"> <div class="row horizontally-padded"> <div id="{{unique|escape}}" class="{{this.css_class}}"> <h3>{{ this.path|escape }} @ {{this.age|escape}}</h3> <table id='{{ unique|escape }}' class="table table-condensed table-bordered table-fullwidth fixed-columns"> <colgroup> <col style="width: 20ex" /> <col style="width: 100%" /> <col style="width: 20ex" /> </colgroup> <thead> <tr> <th class="ui-state-default">Attribute</th> <th class="ui-state-default">Value</th> <th class="ui-state-default">Age</th> </tr> </thead> <tbody> {% for name, attributes in this.classes %} <tr> <td colspan=3 class="grr_aff4_type_header"><b>{{ name|escape }}</b></td> </tr> {% for attribute, description, value, age, multi in attributes %} <tr> <td class='attribute_opener' attribute="{{attribute|escape}}"> {% if multi %} <ins class='fg-button ui-icon ui-icon-plus'/> {% endif %} <b title='{{ description|escape }}'>{{ attribute|escape }}</b> </td> <td> <div class="default_view">{{ value|safe }}</div> <div id="content_{{unique|escape}}_{{attribute|escape}}" class="historical_view"></div> </td> <td><div class='non-breaking'>{{ age|escape }}</div></td> </tr> {% endfor %} {% endfor %} </tbody> </table> </div> </div> </div> """) def Layout(self, request, response, client_id=None, aff4_path=None, age=None): """Introspect the Schema for each object.""" # Allow derived classes to just set the client_id/aff4_path/age directly self.client_id = client_id or request.REQ.get("client_id") self.aff4_path = aff4_path or request.REQ.get("aff4_path") self.age = request.REQ.get("age") if self.age is None: self.age = rdfvalue.RDFDatetime().Now() else: self.age = rdfvalue.RDFDatetime(self.age) if not self.aff4_path: return try: self.fd = aff4.FACTORY.Open(self.aff4_path, token=request.token, age=age or self.age) self.classes = self.RenderAFF4Attributes(self.fd, request) self.state["path"] = self.path = utils.SmartStr(self.fd.urn) except IOError: self.path = "Unable to open %s" % self.urn self.classes = [] response = super(AFF4Stats, self).Layout(request, response) return self.CallJavascript(response, "AFF4Stats.Layout", historical_renderer=self.historical_renderer, historical_renderer_state=self.state) def RenderAFF4Attributes(self, fd, request=None): """Returns attributes rendered by class.""" classes = [] attribute_names = set() for flow_cls in fd.__class__.__mro__: if not hasattr(flow_cls, "SchemaCls"): continue schema = flow_cls.SchemaCls attributes = [] for name, attribute in sorted(schema.__dict__.items()): if not isinstance(attribute, aff4.Attribute): continue # If we already showed this attribute we move on if attribute.predicate in attribute_names: continue values = list(fd.GetValuesForAttribute(attribute)) multi = len(values) > 1 if values: attribute_names.add(attribute.predicate) value_renderer = semantic.FindRendererForObject(values[0]) if self.attributes_to_show and name not in self.attributes_to_show: continue attributes.append((name, attribute.description, # This is assumed to be in safe RawHTML and not # escaped. value_renderer.RawHTML(request), rdfvalue.RDFDatetime(values[0].age), multi)) if attributes: classes.append((flow_cls.__name__, attributes)) return classes class HostInformation(AFF4Stats): """View information about the host.""" description = "Host Information" behaviours = frozenset(["Host"]) order = 0 css_class = "TableBody" def Layout(self, request, response, client_id=None): client_id = client_id or request.REQ.get("client_id") urn = rdfvalue.ClientURN(client_id) # This verifies we have auth for deep client paths. If this raises, we # force the auth screen. aff4.FACTORY.Open(rdfvalue.RDFURN(urn).Add("CheckAuth"), token=request.token, mode="r") return super(HostInformation, self).Layout(request, response, client_id=client_id, aff4_path=urn) class AFF4ObjectRenderer(renderers.TemplateRenderer): """This renderer delegates to the correct subrenderer based on the request. Listening Javascript Events: - file_select(aff4_path, age) - A selection event on the file table informing us of a new aff4 file to show. We redraw the entire bottom right side using a new renderer. """ layout_template = renderers.Template(""" <div id="{{unique|escape}}"></div> """) # When a message appears on this queue we choose a new renderer. event_queue = "file_select" def Layout(self, request, response): """Produces a layout as returned by the subrenderer.""" # This is the standard renderer for now. subrenderer = FileViewTabs client_id = request.REQ.get("client_id") aff4_path = request.REQ.get("aff4_path", client_id) if not aff4_path: raise RuntimeError("No valid aff4 path or client id provided") fd = aff4.FACTORY.Open(aff4_path, token=request.token) fd_type = fd.Get(fd.Schema.TYPE) if fd_type: for cls in self.classes.values(): if getattr(cls, "aff4_type", None) == fd_type: subrenderer = cls subrenderer(fd).Layout(request, response) response = super(AFF4ObjectRenderer, self).Layout(request, response) return self.CallJavascript(response, "AFF4ObjectRenderer.Layout", event_queue=self.event_queue, renderer=self.__class__.__name__) class FileViewTabs(renderers.TabLayout): """Show a tabset to inspect the selected file. Internal State: - aff4_path - The AFF4 object we are currently showing. - age: The version of the AFF4 object to display. """ FILE_TAB_NAMES = ["Stats", "Download", "TextView", "HexView"] FILE_DELEGATED_RENDERERS = ["AFF4Stats", "DownloadView", "FileTextViewer", "FileHexViewer"] COLLECTION_TAB_NAMES = ["Stats", "Results", "Export"] COLLECTION_DELEGATED_RENDERERS = ["AFF4Stats", "RDFValueCollectionRenderer", "CollectionExportView"] fd = None def __init__(self, fd=None, **kwargs): self.fd = fd super(FileViewTabs, self).__init__(**kwargs) def DisableTabs(self): self.disabled = [tab_renderer for tab_renderer in self.delegated_renderers if tab_renderer != "AFF4Stats"] def Layout(self, request, response): """Check if the file is a readable and disable the tabs.""" client_id = request.REQ.get("client_id") self.aff4_path = request.REQ.get("aff4_path", client_id) self.age = request.REQ.get("age", rdfvalue.RDFDatetime().Now()) self.state = dict(aff4_path=self.aff4_path, age=int(self.age)) # By default we assume that we're dealing with a regular file, # so we show tabs for files. self.names = self.FILE_TAB_NAMES self.delegated_renderers = self.FILE_DELEGATED_RENDERERS try: if self.fd is not None: self.fd = aff4.FACTORY.Open(self.aff4_path, token=request.token) # If file is actually a collection, then show collections-related tabs. if isinstance(self.fd, aff4.RDFValueCollection): self.names = self.COLLECTION_TAB_NAMES self.delegated_renderers = self.COLLECTION_DELEGATED_RENDERERS # If collection doesn't have StatEntries or FileFinderResults, disable # the Export tab. if not CollectionExportView.IsCollectionExportable(self.fd, token=request.token): self.disabled = ["CollectionExportView"] if isinstance(self.fd, aff4.RekallResponseCollection): # Make a copy so this change is not permanent. self.delegated_renderers = self.delegated_renderers[:] self.delegated_renderers[1] = "RekallResponseCollectionRenderer" else: if not hasattr(self.fd, "Read"): self.DisableTabs() except IOError: self.DisableTabs() return super(FileViewTabs, self).Layout(request, response) class CollectionExportView(renderers.TemplateRenderer): """Displays export command to be used to export collection.""" layout_template = renderers.Template(""" <p>To download all the files referenced in the collection, you can use this command:</p> <pre> {{ this.export_command_str|escape }} </pre> <p><em>NOTE: You can optionally add <tt>--dump_client_info</tt> flag to dump client info in YAML format.</em></p> """) @staticmethod def IsCollectionExportable(collection_urn_or_obj, token=None): if isinstance(collection_urn_or_obj, aff4.RDFValueCollection): collection = collection_urn_or_obj else: collection = aff4.FACTORY.Create( collection_urn_or_obj, "RDFValueCollection", mode="r", token=token) if not collection: return False try: export.CollectionItemToAff4Path(collection[0]) except export.ItemNotExportableError: return False return True def Layout(self, request, response, aff4_path=None): aff4_path = aff4_path or request.REQ.get("aff4_path") self.export_command_str = " ".join([ config_lib.CONFIG["AdminUI.export_command"], "--username", utils.ShellQuote(request.token.username), "--reason", utils.ShellQuote(request.token.reason), "collection_files", "--path", utils.ShellQuote(aff4_path), "--output", "."]) return super(CollectionExportView, self).Layout(request, response) class RWeOwned(renderers.TemplateRenderer): """A magic 8 ball reply to the question - Are we Owned?""" layout_template = renderers.Template(""" <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-hidden="true"> x </button> <h3>Are we owned?</h3> </div> <div class="modal-body"> <p class="text-info"> {{this.choice|escape}} </div> </div> </div> """) def Layout(self, request, response): """Render a magic 8 ball easter-egg.""" options = u"""It is certain You were eaten by a Grue! 中国 got you!! All your bases are belong to us! Maybe it was the Russians? It is decidedly so Without a doubt Yes - definitely You may rely on it As I see it, yes Most likely Outlook good Signs point to yes Yes Reply hazy, try again Ask again later Better not tell you now Cannot predict now Concentrate and ask again Don't count on it My reply is no My sources say no Outlook not so good Very doubtful""".splitlines() self.choice = options[random.randint(0, len(options) - 1)] return super(RWeOwned, self).Layout(request, response) class HistoricalView(renderers.TableRenderer): """Show historical view for an attribute.""" def __init__(self, **kwargs): super(HistoricalView, self).__init__(**kwargs) self.AddColumn(semantic.RDFValueColumn("Age")) def Layout(self, request, response): """Add the columns to the table.""" self.AddColumn(semantic.RDFValueColumn(request.REQ.get("attribute"))) return super(HistoricalView, self).Layout(request, response) def BuildTable(self, start_row, end_row, request): """Populate the table with attribute values.""" attribute_name = request.REQ.get("attribute") if attribute_name is None: return urn = request.REQ.get("urn") client_id = request.REQ.get("client_id") path = request.REQ.get("path") self.AddColumn(semantic.RDFValueColumn(attribute_name)) fd = aff4.FACTORY.Open(urn or path or client_id, token=request.token, age=aff4.ALL_TIMES) self.BuildTableFromAttribute(attribute_name, fd, start_row, end_row) def BuildTableFromAttribute(self, attribute_name, fd, start_row, end_row): """Build the table for the attribute.""" attribute = getattr(fd.Schema, attribute_name) additional_rows = False i = 0 for i, value in enumerate(fd.GetValuesForAttribute(attribute)): if i > end_row: additional_rows = True break if i < start_row: continue self.AddCell(i, "Age", rdfvalue.RDFDatetime(value.age)) self.AddCell(i, attribute_name, value) self.size = i + 1 return additional_rows class VersionSelectorDialog(renderers.TableRenderer): """Renders the version available for this object.""" layout_template = renderers.Template(""" <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-hidden="true"> x </button> <h4>Versions of {{this.state.aff4_path}}</h4> </div> <div class="modal-body"> <div class="padded"> """) + renderers.TableRenderer.layout_template + """ </div> </div> <div class="modal-footer"> <button class="btn btn-default" data-dismiss="modal" name="Ok" aria-hidden="true">Ok</button> </div> </div> </div> """ def __init__(self, **kwargs): super(VersionSelectorDialog, self).__init__(**kwargs) self.AddColumn(semantic.RDFValueColumn("Age")) self.AddColumn(semantic.RDFValueColumn("Type")) def Layout(self, request, response): """Populates the table state with the request.""" self.state["aff4_path"] = request.REQ.get("aff4_path") response = super(VersionSelectorDialog, self).Layout(request, response) return self.CallJavascript(response, "VersionSelectorDialog.Layout", aff4_path=self.state["aff4_path"]) def BuildTable(self, start_row, end_row, request): """Populates the table with attribute values.""" aff4_path = request.REQ.get("aff4_path") if aff4_path is None: return fd = aff4.FACTORY.Open(aff4_path, age=aff4.ALL_TIMES, token=request.token) i = 0 for i, type_attribute in enumerate( fd.GetValuesForAttribute(fd.Schema.TYPE)): if i < start_row or i > end_row: continue self.AddCell(i, "Age", rdfvalue.RDFDatetime(type_attribute.age)) self.AddCell(i, "Type", type_attribute)<|fim▁end|>
(23, 1): "tcp6",
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># coding: utf-8 # Copyright (c) Pymatgen Development Team.<|fim▁hole|> """ This package contains various command line wrappers to programs used in pymatgen that do not have Python equivalents. """<|fim▁end|>
# Distributed under the terms of the MIT License.
<|file_name|>VelocityTrim.js<|end_file_name|><|fim▁begin|>import React from 'react' import PropTypes from 'prop-types' import VelocityTrimControls from './VelocityTrimControls' import Instrument from '../../images/Instrument' import styles from '../../styles/velocityTrim' import { trimShape } from '../../reducers/velocityTrim' const handleKeyDown = (event, item, bank, userChangedTrimEnd) => { let delta = 0 event.nativeEvent.preventDefault() switch (event.key) { case 'ArrowUp': delta = 1 break case 'ArrowDown': delta = -1 break case 'PageUp': delta = 5 break case 'PageDown': delta = -5 break case 'Enter': delta = 100 break case 'Escape': delta = -100 break default: break } if (delta !== 0) { delta += item.trim if (delta < 0) delta = 0 if (delta > 100) delta = 100 userChangedTrimEnd(item.note, delta, bank) } } const VelocityTrim = (props) => { const { item, bank, selected, playNote, selectTrim, userChangedTrimEnd } = props const { note, trim, group, name } = item return ( <section tabIndex={note} onKeyDown={e => handleKeyDown(e, item, bank, userChangedTrimEnd)} onMouseUp={() => (selected ? null : selectTrim(note))} className={selected ? styles.selected : ''} role="presentation" > <div className={styles.header} onMouseUp={() => playNote(note, Math.round(127 * (trim / 100)), bank)} role="button" tabIndex={note} > <div>{note}</div> <div>{group}</div> <div>{Instrument(group)}</div> </div> <div className={styles.noteName} title={name} > {name} </div> <VelocityTrimControls {...props} /><|fim▁hole|>VelocityTrim.propTypes = { item: trimShape.isRequired, selected: PropTypes.bool.isRequired, playNote: PropTypes.func.isRequired, selectTrim: PropTypes.func.isRequired, userChangedTrimEnd: PropTypes.func.isRequired, bank: PropTypes.number.isRequired, } export default VelocityTrim<|fim▁end|>
</section> ) }
<|file_name|>test_context.py<|end_file_name|><|fim▁begin|># Copyright 2012 Managed I.T. # # Author: Kiall Mac Innes <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock import testtools from designate import context from designate import exceptions from designate import policy import designate.tests class TestDesignateContext(designate.tests.TestCase): def test_deepcopy(self): orig = context.DesignateContext( user_id='12345', project_id='54321' ) copy = orig.deepcopy() self.assertEqual(orig.to_dict(), copy.to_dict()) def test_tsigkey_id_override(self): orig = context.DesignateContext( tsigkey_id='12345', project_id='54321' ) copy = orig.to_dict() self.assertEqual('TSIG:12345 54321 - - -', copy['user_identity']) def test_elevated(self): ctxt = context.DesignateContext( user_id='12345', project_id='54321' ) admin_ctxt = ctxt.elevated() self.assertFalse(ctxt.is_admin) self.assertTrue(admin_ctxt.is_admin) self.assertEqual(0, len(ctxt.roles)) <|fim▁hole|> def test_elevated_with_show_deleted(self): ctxt = context.DesignateContext( user_id='12345', project_id='54321' ) admin_ctxt = ctxt.elevated(show_deleted=True) self.assertTrue(admin_ctxt.show_deleted) def test_all_tenants(self): ctxt = context.DesignateContext( user_id='12345', project_id='54321' ) admin_ctxt = ctxt.elevated() admin_ctxt.all_tenants = True self.assertFalse(ctxt.is_admin) self.assertTrue(admin_ctxt.is_admin) self.assertTrue(admin_ctxt.all_tenants) def test_all_tenants_policy_failure(self): ctxt = context.DesignateContext( user_id='12345', project_id='54321' ) with testtools.ExpectedException(exceptions.Forbidden): ctxt.all_tenants = True def test_edit_managed_records(self): ctxt = context.DesignateContext( user_id='12345', project_id='54321' ) admin_ctxt = ctxt.elevated() admin_ctxt.edit_managed_records = True self.assertFalse(ctxt.is_admin) self.assertTrue(admin_ctxt.is_admin) self.assertTrue(admin_ctxt.edit_managed_records) def test_edit_managed_records_failure(self): ctxt = context.DesignateContext( user_id='12345', project_id='54321' ) with testtools.ExpectedException(exceptions.Forbidden): ctxt.edit_managed_records = True @mock.patch.object(policy, 'check') def test_sudo(self, mock_policy_check): ctxt = context.DesignateContext( user_id='12345', project_id='old_project' ) ctxt.sudo('new_project') self.assertTrue(mock_policy_check.called) self.assertEqual('new_project', ctxt.project_id) self.assertEqual('old_project', ctxt.original_project_id)<|fim▁end|>
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from __future__ import print_function import linecache import sys import numpy from six import iteritems from theano import config from theano.compat import OrderedDict, PY3 def simple_extract_stack(f=None, limit=None, skips=[]): """This is traceback.extract_stack from python 2.7 with this change: - Comment the update of the cache. - Skip internal stack trace level. The update of the cache call os.stat to verify is the cache is up to date. This take too much time on cluster. limit - The number of stack level we want to return. If None, mean all what we can. skips - partial path of stack level we don't want to keep and count. When we find one level that isn't skipped, we stop skipping. """ if f is None: try: raise ZeroDivisionError except ZeroDivisionError: f = sys.exc_info()[2].tb_frame.f_back if limit is None: if hasattr(sys, 'tracebacklimit'): limit = sys.tracebacklimit trace = [] n = 0 while f is not None and (limit is None or n < limit): lineno = f.f_lineno co = f.f_code filename = co.co_filename name = co.co_name # linecache.checkcache(filename) line = linecache.getline(filename, lineno, f.f_globals) if line: line = line.strip() else: line = None f = f.f_back # Just skip inner level if len(trace) == 0: rm = False for p in skips: # Julian: I added the 'tests' exception together with # Arnaud. Otherwise, we'd lose the stack trace during # in our test cases (e.g. in test_opt.py). We're not # sure this is the right way to do it though. if p in filename and 'tests' not in filename: rm = True break if rm: continue trace.append((filename, lineno, name, line)) n = n + 1 trace.reverse() return trace def add_tag_trace(thing, user_line=None): """ Add tag.trace to an node or variable. The argument is returned after being affected (inplace). Parameters ---------- thing The object where we add .tag.trace. user_line The max number of user line to keep. Notes ----- We alse use config.traceback.limit for the maximum number of stack level we look. """ if user_line is None: user_line = config.traceback.limit if user_line == -1: user_line = None skips = ["theano/tensor/", "theano\\tensor\\", "theano/compile/", "theano\\compile\\", "theano/gof/", "theano\\gof\\", "theano/scalar/basic.py", "theano\\scalar\\basic.py", "theano/sandbox/", "theano\\sandbox\\", "theano/scan_module/", "theano\\scan_module\\", "theano/sparse/", "theano\\sparse\\", "theano/typed_list/", "theano\\typed_list\\"] tr = simple_extract_stack(limit=user_line, skips=skips) # Different python version use different sementic for # limit. python 2.7 include the call to extrack_stack. The -1 get # rid of it. if tr: thing.tag.trace = [tr] else: thing.tag.trace = tr return thing def hashtype(self): t = type(self) return hash(t.__name__) ^ hash(t.__module__) # Object to mark that a parameter is undefined (useful in cases where # None is a valid value with defined semantics) undef = object() class MethodNotDefined(Exception): """ To be raised by functions defined as part of an interface. When the user sees such an error, it is because an important interface function has been left out of an implementation class. """ class object2(object): __slots__ = [] if 0: def __hash__(self): # this fixes silent-error-prone new-style class behavior if hasattr(self, '__eq__') or hasattr(self, '__cmp__'): raise TypeError("unhashable object: %s" % self) return id(self) def __ne__(self, other): return not self == other class scratchpad: def clear(self): self.__dict__.clear() def __update__(self, other): self.__dict__.update(other.__dict__) return self <|fim▁hole|> def __str__(self): return "scratchpad" + str(self.__dict__) def __repr__(self): return "scratchpad" + str(self.__dict__) def info(self): print("<theano.gof.utils.scratchpad instance at %i>" % id(self)) for k, v in iteritems(self.__dict__): print(" %s: %s" % (k, v)) class D: def __init__(self, **d): self.__dict__.update(d) def memoize(f): """ Cache the return value for each tuple of arguments (which must be hashable). """ cache = {} def rval(*args, **kwargs): kwtup = tuple(kwargs.items()) key = (args, kwtup) if key not in cache: val = f(*args, **kwargs) cache[key] = val else: val = cache[key] return val return rval def deprecated(filename, msg=''): """ Decorator which will print a warning message on the first call. Use it like this:: @deprecated('myfile', 'do something different...') def fn_name(...) ... And it will print:: WARNING myfile.fn_name deprecated. do something different... """ def _deprecated(f): printme = [True] def g(*args, **kwargs): if printme[0]: print('WARNING: %s.%s deprecated. %s' % (filename, f.__name__, msg)) printme[0] = False return f(*args, **kwargs) return g return _deprecated def uniq(seq): """ Do not use set, this must always return the same value at the same index. If we just exchange other values, but keep the same pattern of duplication, we must keep the same order. """ # TODO: consider building a set out of seq so that the if condition # is constant time -JB return [x for i, x in enumerate(seq) if seq.index(x) == i] def difference(seq1, seq2): """ Returns all elements in seq1 which are not in seq2: i.e ``seq1\seq2``. """ try: # try to use O(const * len(seq1)) algo if len(seq2) < 4: # I'm guessing this threshold -JB raise Exception('not worth it') set2 = set(seq2) return [x for x in seq1 if x not in set2] except Exception: # maybe a seq2 element is not hashable # maybe seq2 is too short # -> use O(len(seq1) * len(seq2)) algo return [x for x in seq1 if x not in seq2] def to_return_values(values): if len(values) == 1: return values[0] else: return values def from_return_values(values): if isinstance(values, (list, tuple)): return values else: return [values] def toposort(prereqs_d): """ Sorts prereqs_d.keys() topologically. prereqs_d[x] contains all the elements that must come before x in the ordering. """ # all1 = set(prereqs_d.keys()) # all2 = set() # for x, y in iteritems(prereqs_d): # all2.update(y) # print all1.difference(all2) seq = [] done = set() postreqs_d = {} for x, prereqs in iteritems(prereqs_d): for prereq in prereqs: postreqs_d.setdefault(prereq, set()).add(x) next = set([k for k in prereqs_d if not prereqs_d[k]]) while next: bases = next next = set() for x in bases: done.add(x) seq.append(x) for x in bases: for postreq in postreqs_d.get(x, []): if not prereqs_d[postreq].difference(done): next.add(postreq) if len(prereqs_d) != len(seq): raise Exception("Cannot sort topologically: there might be cycles, " "prereqs_d does not have a key for each element or " "some orderings contain invalid elements.") return seq class Keyword: def __init__(self, name, nonzero=True): self.name = name self.nonzero = nonzero def __nonzero__(self): # Python 2.x return self.__bool__() def __bool__(self): # Python 3.x return self.nonzero def __str__(self): return "<%s>" % self.name def __repr__(self): return "<%s>" % self.name ABORT = Keyword("ABORT", False) RETRY = Keyword("RETRY", False) FAILURE = Keyword("FAILURE", False) simple_types = (int, float, str, bool, None.__class__, Keyword) ANY_TYPE = Keyword("ANY_TYPE") FALL_THROUGH = Keyword("FALL_THROUGH") def comm_guard(type1, type2): def wrap(f): old_f = f.__globals__[f.__name__] def new_f(arg1, arg2, *rest): if ((type1 is ANY_TYPE or isinstance(arg1, type1)) and (type2 is ANY_TYPE or isinstance(arg2, type2))): pass elif ((type1 is ANY_TYPE or isinstance(arg2, type1)) and (type2 is ANY_TYPE or isinstance(arg1, type2))): arg1, arg2 = arg2, arg1 else: return old_f(arg1, arg2, *rest) variable = f(arg1, arg2, *rest) if variable is FALL_THROUGH: return old_f(arg1, arg2, *rest) else: return variable new_f.__name__ = f.__name__ def typename(type): if isinstance(type, Keyword): return str(type) elif isinstance(type, (tuple, list)): return "(" + ", ".join([x.__name__ for x in type]) + ")" else: return type.__name__ new_f.__doc__ = (str(old_f.__doc__) + "\n" + ", ".join([typename(type) for type in (type1, type2)]) + "\n" + str(f.__doc__ or "")) return new_f return wrap def type_guard(type1): def wrap(f): old_f = f.__globals__[f.__name__] def new_f(arg1, *rest): if (type1 is ANY_TYPE or isinstance(arg1, type1)): variable = f(arg1, *rest) if variable is FALL_THROUGH: return old_f(arg1, *rest) else: return variable else: return old_f(arg1, *rest) new_f.__name__ = f.__name__ def typename(type): if isinstance(type, Keyword): return str(type) elif isinstance(type, (tuple, list)): return "(" + ", ".join([x.__name__ for x in type]) + ")" else: return type.__name__ new_f.__doc__ = (str(old_f.__doc__) + "\n" + ", ".join([typename(type) for type in (type1,)]) + "\n" + str(f.__doc__ or "")) return new_f return wrap def flatten(a): """ Recursively flatten tuple, list and set in a list. """ if isinstance(a, (tuple, list, set)): l = [] for item in a: l.extend(flatten(item)) return l else: return [a] def unique(x): return len(set(x)) == len(x) def hist(coll): counts = {} for elem in coll: counts[elem] = counts.get(elem, 0) + 1 return counts def give_variables_names(variables): """ Gives unique names to an iterable of variables. Modifies input. This function is idempotent. """ names = [var.name for var in variables] h = hist(names) def bad_var(var): return not var.name or h[var.name] > 1 for i, var in enumerate(filter(bad_var, variables)): var.name = (var.name or "") + "_%d" % i if not unique([str(v) for v in variables]): raise ValueError("Not all variables have unique names. Maybe you've " "named some of the variables identically") return variables def remove(predicate, coll): """ Return those items of collection for which predicate(item) is true. Examples -------- >>> def even(x): ... return x % 2 == 0 >>> remove(even, [1, 2, 3, 4]) [1, 3] """ return [x for x in coll if not predicate(x)] if PY3: import hashlib def hash_from_code(msg): # hashlib.md5() requires an object that supports buffer interface, # but Python 3 (unicode) strings don't. if isinstance(msg, str): msg = msg.encode() # Python 3 does not like module names that start with # a digit. return 'm' + hashlib.md5(msg).hexdigest() else: import hashlib def hash_from_code(msg): try: return hashlib.md5(msg).hexdigest() except TypeError: assert isinstance(msg, numpy.ndarray) return hashlib.md5(numpy.getbuffer(msg)).hexdigest() def hash_from_file(file_path): """ Return the MD5 hash of a file. """ return hash_from_code(open(file_path, 'rb').read()) def hash_from_dict(d): """ Work around the fact that dict are not hashable in python. This request that all object have a sorted order that depend only on the key of the object. We support only integer/float/string keys. Also, we transform values that are list into tuple as list are not hashable. Notes ----- Special case for OrderedDict, it use the order of the dict, so the key don't need to be sortable. """ if isinstance(d, OrderedDict): items = list(iteritems(d)) else: items = list(d.items()) items.sort() first_part = [k for k, v in items] second_part = [] for k, v in items: assert isinstance(k, (str, int, float)) if isinstance(v, (tuple, list)): second_part += [tuple(v)] else: second_part += [v] tuple_items = tuple(first_part + second_part + [d.__class__]) return hash(tuple_items)<|fim▁end|>
<|file_name|>ExternalSessionExpires.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * Copyright 2019 Tremolo Security, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.tremolosecurity.proxy; /** * * @author mlb * Provides an interface to extend a session termination from an external source instead of the built in session variables */ public interface ExternalSessionExpires { /** * * @return The expiration date/time in standard java form (milliseconds since epoch) */ public long getExpires();<|fim▁hole|>}<|fim▁end|>
<|file_name|>OfeliaDebianVMConfigurator.py<|end_file_name|><|fim▁begin|>import shutil import os import jinja2 import string import subprocess import re from xen.provisioning.HdManager import HdManager from settings.settingsLoader import OXA_XEN_SERVER_KERNEL,OXA_XEN_SERVER_INITRD,OXA_DEBIAN_INTERFACES_FILE_LOCATION,OXA_DEBIAN_UDEV_FILE_LOCATION, OXA_DEBIAN_HOSTNAME_FILE_LOCATION, OXA_DEBIAN_SECURITY_ACCESS_FILE_LOCATION from utils.Logger import Logger class OfeliaDebianVMConfigurator: logger = Logger.getLogger() ''' Private methods ''' @staticmethod def __configureInterfacesFile(vm,iFile): #Loopback iFile.write("auto lo\niface lo inet loopback\n\n") #Interfaces for inter in vm.xen_configuration.interfaces.interface : if inter.ismgmt: #is a mgmt interface interfaceString = "auto "+inter.name+"\n"+\ "iface "+inter.name+" inet static\n"+\ "\taddress "+inter.ip +"\n"+\ "\tnetmask "+inter.mask+"\n" if inter.gw != None and inter.gw != "": interfaceString +="\tgateway "+inter.gw+"\n" if inter.dns1 != None and inter.dns1 != "": interfaceString+="\tdns-nameservers "+inter.dns1 if inter.dns2 != None and inter.dns2 != "": interfaceString+=" "+inter.dns2 interfaceString +="\n\n" iFile.write(interfaceString) else: #is a data interface iFile.write("auto "+inter.name+"\n\n") @staticmethod def __configureUdevFile(vm,uFile): for inter in vm.xen_configuration.interfaces.interface: uFile.write('SUBSYSTEM=="net", ACTION=="add", DRIVERS=="?*", ATTR{address}=="'+inter.mac+'", ATTR{dev_id}=="0x0", ATTR{type}=="1", KERNEL=="eth*", NAME="'+inter.name+'"\n') @staticmethod def __configureHostname(vm,hFile): hFile.write(vm.name) @staticmethod def __createParavirtualizationFileHdConfigFile(vm,env): template_name = "paraVirtualizedFileHd.pt" template = env.get_template(template_name) #Set vars&render output = template.render( kernelImg=OXA_XEN_SERVER_KERNEL, initrdImg=OXA_XEN_SERVER_INITRD, hdFilePath=HdManager.getHdPath(vm), swapFilePath=HdManager.getSwapPath(vm), vm=vm) #write file cfile = open(HdManager.getConfigFilePath(vm),'w') cfile.write(output) cfile.close() ''' Public methods ''' @staticmethod def getIdentifier(): return OfeliaDebianVMConfigurator.__name__ @staticmethod<|fim▁hole|> try: #Backup current files shutil.copy(path+OXA_DEBIAN_INTERFACES_FILE_LOCATION,path+OXA_DEBIAN_INTERFACES_FILE_LOCATION+".bak") shutil.copy(path+OXA_DEBIAN_UDEV_FILE_LOCATION,path+OXA_DEBIAN_UDEV_FILE_LOCATION+".bak") except Exception as e: pass with open(path+OXA_DEBIAN_INTERFACES_FILE_LOCATION,'w') as openif: OfeliaDebianVMConfigurator.__configureInterfacesFile(vm,openif) with open(path+OXA_DEBIAN_UDEV_FILE_LOCATION,'w') as openudev: OfeliaDebianVMConfigurator.__configureUdevFile(vm,openudev) except Exception as e: OfeliaDebianVMConfigurator.logger.error(str(e)) raise Exception("Could not configure interfaces or Udev file") @staticmethod def _configureLDAPSettings(vm,path): try: file = open(path+OXA_DEBIAN_SECURITY_ACCESS_FILE_LOCATION, "r") text = file.read() file.close() file = open(path+OXA_DEBIAN_SECURITY_ACCESS_FILE_LOCATION, "w") #Scape spaces and tabs projectName = string.replace(vm.project_name,' ','_') projectName = string.replace(projectName,'\t','__') file.write(text.replace("__projectId","@proj_"+vm.project_id+"_"+projectName)) file.close() except Exception as e: OfeliaDebianVMConfigurator.logger.error("Could not configure LDAP file!! - "+str(e)) @staticmethod def _configureHostName(vm,path): try: with open(path+OXA_DEBIAN_HOSTNAME_FILE_LOCATION,'w') as openhost: OfeliaDebianVMConfigurator.__configureHostname(vm, openhost) except Exception as e: OfeliaDebianVMConfigurator.logger.error("Could not configure hostname;skipping.. - "+str(e)) @staticmethod def _configureSSHServer(vm,path): try: OfeliaDebianVMConfigurator.logger.debug("Regenerating SSH keys...\n Deleting old keys...") subprocess.check_call("rm -f "+path+"/etc/ssh/ssh_host_*", shell=True, stdout=None) #subprocess.check_call("chroot "+path+" dpkg-reconfigure openssh-server ", shell=True, stdout=None) OfeliaDebianVMConfigurator.logger.debug("Creating SSH1 key; this may take some time...") subprocess.check_call("ssh-keygen -q -f "+path+"/etc/ssh/ssh_host_key -N '' -t rsa1", shell=True, stdout=None) OfeliaDebianVMConfigurator.logger.debug("Creating SSH2 RSA key; this may take some time...") subprocess.check_call("ssh-keygen -q -f "+path+"/etc/ssh/ssh_host_rsa_key -N '' -t rsa", shell=True, stdout=None) OfeliaDebianVMConfigurator.logger.debug("Creating SSH2 DSA key; this may take some time...") subprocess.check_call("ssh-keygen -q -f "+path+"/etc/ssh/ssh_host_dsa_key -N '' -t dsa", shell=True, stdout=None) except Exception as e: OfeliaDebianVMConfigurator.logger.error("Fatal error; could not regenerate SSH keys. Aborting to prevent VM to be unreachable..."+str(e)) raise e #Public methods @staticmethod def createVmConfigurationFile(vm): #get env template_dirs = [] template_dirs.append(os.path.join(os.path.dirname(__file__), 'templates/')) env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dirs)) if vm.xen_configuration.hd_setup_type == "file-image" and vm.xen_configuration.virtualization_setup_type == "paravirtualization" : OfeliaDebianVMConfigurator.__createParavirtualizationFileHdConfigFile(vm,env) else: raise Exception("type of file or type of virtualization not supported for the creation of xen vm configuration file") @staticmethod def configureVmDisk(vm, path): if not path or not re.match(r'[\s]*\/\w+\/\w+\/.*', path,re.IGNORECASE): #For security, should never happen anyway raise Exception("Incorrect vm path") #Configure networking OfeliaDebianVMConfigurator._configureNetworking(vm,path) OfeliaDebianVMConfigurator.logger.info("Network configured successfully...") #Configure LDAP settings OfeliaDebianVMConfigurator._configureLDAPSettings(vm,path) OfeliaDebianVMConfigurator.logger.info("Authentication configured successfully...") #Configure Hostname OfeliaDebianVMConfigurator._configureHostName(vm,path) OfeliaDebianVMConfigurator.logger.info("Hostname configured successfully...") #Regenerate SSH keys OfeliaDebianVMConfigurator._configureSSHServer(vm,path) OfeliaDebianVMConfigurator.logger.info("SSH have been keys regenerated...")<|fim▁end|>
def _configureNetworking(vm,path): #Configure interfaces and udev settings try:
<|file_name|>prometheus.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2013-2020 Blockstack PBC, a public benefit corporation // Copyright (C) 2020 Stacks Open Internet Foundation // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. use prometheus::{ Gauge, Histogram, HistogramTimer, HistogramVec, IntCounter, IntCounterVec, IntGauge, }; lazy_static! { pub static ref RPC_CALL_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_rpc_requests_total", "Total number of RPC requests made.", labels! {"handler" => "all",} )).unwrap(); pub static ref RPC_CALL_LATENCIES_HISTOGRAM: HistogramVec = register_histogram_vec!(histogram_opts!( "stacks_node_rpc_call_latencies_histogram", "Time (seconds) measuring RPC calls latency" // Will use DEFAULT_BUCKETS = [0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0] by default ), &["path"]).unwrap(); pub static ref STX_BLOCKS_RECEIVED_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_stx_blocks_received_total", "Total number of Stacks blocks received" )).unwrap(); pub static ref STX_MICRO_BLOCKS_RECEIVED_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_stx_micro_blocks_received_total", "Total number of Stacks micro blocks received" )).unwrap(); pub static ref STX_BLOCKS_SERVED_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_stx_blocks_served_total", "Total number of Stacks blocks served" )).unwrap(); pub static ref STX_MICRO_BLOCKS_SERVED_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_stx_micro_blocks_served_total", "Total number of Stacks micro blocks served" )).unwrap(); pub static ref STX_CONFIRMED_MICRO_BLOCKS_SERVED_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_stx_confirmed_micro_blocks_served_total", "Total number of Stacks blocks served" )).unwrap(); pub static ref TXS_RECEIVED_COUNTER: IntCounter = register_int_counter!(opts!(<|fim▁hole|> "stacks_node_transactions_received_total", "Total number of transactions received and relayed" )).unwrap(); pub static ref BTC_BLOCKS_RECEIVED_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_btc_blocks_received_total", "Total number of blocks processed from the burnchain" )).unwrap(); pub static ref BTC_OPS_SENT_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_btc_ops_sent_total", "Total number of ops (key registrations, block commits, user burn supports) submitted to the burnchain" )).unwrap(); pub static ref STX_BLOCKS_PROCESSED_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_stx_blocks_processed_total", "Total number of stacks blocks processed" )).unwrap(); pub static ref STX_BLOCKS_MINED_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_stx_blocks_mined_total", "Total number of stacks blocks mined by node" )).unwrap(); pub static ref WARNING_EMITTED_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_warning_emitted_total", "Total number of warning logs emitted by node" )).unwrap(); pub static ref ERRORS_EMITTED_COUNTER: IntCounter = register_int_counter!(opts!( "stacks_node_errors_emitted_total", "Total number of error logs emitted by node" )).unwrap(); pub static ref ACTIVE_MINERS_COUNT_GAUGE: IntGauge = register_int_gauge!(opts!( "stacks_node_active_miners_total", "Total number of active miners" )).unwrap(); pub static ref STACKS_TIP_HEIGHT_GAUGE: IntGauge = register_int_gauge!(opts!( "stacks_node_stacks_tip_height", "Stacks chain tip height" )).unwrap(); pub static ref BURNCHAIN_HEIGHT_GAUGE: IntGauge = register_int_gauge!(opts!( "stacks_node_burn_block_height", "Burnchain tip height" )).unwrap(); pub static ref INBOUND_NEIGHBORS_GAUGE: IntGauge = register_int_gauge!(opts!( "stacks_node_neighbors_inbound", "Total count of current known inbound neighbors" )).unwrap(); pub static ref OUTBOUND_NEIGHBORS_GAUGE: IntGauge = register_int_gauge!(opts!( "stacks_node_neighbors_outbound", "Total count of current known outbound neighbors" )).unwrap(); pub static ref INBOUND_BANDWIDTH_GAUGE: IntGauge = register_int_gauge!(opts!( "stacks_node_bandwidth_inbound", "Total inbound bandwidth total in bytes" )).unwrap(); pub static ref OUTBOUND_BANDWIDTH_GAUGE: IntGauge = register_int_gauge!(opts!( "stacks_node_bandwidth_outbound", "Total outbound bandwidth total in bytes" )).unwrap(); pub static ref INBOUND_RPC_BANDWIDTH_GAUGE: IntGauge = register_int_gauge!(opts!( "stacks_node_rpc_bandwidth_inbound", "Total RPC inbound bandwidth in bytes" )).unwrap(); pub static ref OUTBOUND_RPC_BANDWIDTH_GAUGE: IntGauge = register_int_gauge!(opts!( "stacks_node_rpc_bandwidth_outbound", "Total RPC outbound bandwidth in bytes" )).unwrap(); pub static ref MSG_COUNTER_VEC: IntCounterVec = register_int_counter_vec!( "stacks_node_message_count", "Stacks message count by type of message", &["name"] ).unwrap(); pub static ref STX_MEMPOOL_GC: IntCounter = register_int_counter!(opts!( "stacks_node_mempool_gc_count", "Total count of all mempool garbage collections" )).unwrap(); pub static ref CONTRACT_CALLS_PROCESSED_COUNT: IntCounter = register_int_counter!(opts!( "stacks_contract_calls_processed", "Total count of processed contract calls" )).unwrap(); pub static ref MEMPOOL_OUTSTANDING_TXS: IntGauge = register_int_gauge!(opts!( "stacks_node_mempool_outstanding_txs", "Number of still-unprocessed transactions received by this node since it started", labels! {"handler" => "all",} )).unwrap(); pub static ref MEMPOOL_TX_CONFIRM_TIME: Histogram = register_histogram!(histogram_opts!( "stacks_node_mempool_tx_confirm_times", "Time (seconds) between when a tx was received by this node's mempool and when a tx was first processed in a block", vec![300.0, 600.0, 900.0, 1200.0, 1500.0, 1800.0, 2100.0, 2400.0, 2700.0, 3000.0, 3600.0, 4200.0, 4800.0, 6000.0], labels! {"handler".to_string() => "all".to_string(),} )).unwrap(); pub static ref COMPUTED_RELATIVE_MINER_SCORE: Gauge = register_gauge!(opts!( "stacks_node_computed_relative_miner_score", "Percentage of the u256 range that this miner is assigned in a particular round of sortition" )).unwrap(); pub static ref COMPUTED_MINER_COMMITMENT_HIGH: IntGauge = register_int_gauge!(opts!( "stacks_node_computed_miner_commitment_high", "High 64 bits of a miner's effective commitment (min of the miner's previous commitment and their median commitment)" )).unwrap(); pub static ref COMPUTED_MINER_COMMITMENT_LOW: IntGauge = register_int_gauge!(opts!( "stacks_node_computed_miner_commitment_low", "Low 64 bits of a miner's effective commitment (min of the miner's previous commitment and their median commitment)" )).unwrap(); pub static ref MINER_CURRENT_MEDIAN_COMMITMENT_HIGH: IntGauge = register_int_gauge!(opts!( "stacks_node_miner_current_median_commitment_high", "High 64 bits of a miner's median commitment over the mining commitment window." )).unwrap(); pub static ref MINER_CURRENT_MEDIAN_COMMITMENT_LOW: IntGauge = register_int_gauge!(opts!( "stacks_node_miner_current_median_commitment_low", "Low 64 bits of a miner's median commitment over the mining commitment window." )).unwrap(); } pub fn new_rpc_call_timer(path: &str) -> HistogramTimer { let histogram = RPC_CALL_LATENCIES_HISTOGRAM.with_label_values(&[path]); histogram.start_timer() }<|fim▁end|>
<|file_name|>item_knowbase.py<|end_file_name|><|fim▁begin|># Copyright 2017 Predict & Truly Systems All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .glpi import GlpiService from .glpi_item import GlpiItem class KnowBase(GlpiItem): """ Object of KB """ def __init__(self, attributes={}): """ Construct an KB Item. """ GlpiItem.__init__(self, {}) defaults = { "knowbaseitemcategories_id": 0, "users_id": 2, "is_faq": 0, "view": 1 }<|fim▁hole|> class GlpiKnowBase(GlpiService): """ Client for GLPI Knowledge Base item """ def __init__(self, url, app_token, username, password): """ Construct an instance for Ticket item """ uri = '/Knowbaseitem' GlpiService.__init__(self, url, app_token, uri, username=username, password=password)<|fim▁end|>
self.set_attributes(attributes=attributes) self.set_attributes(attributes=defaults)
<|file_name|>voteActionType.ts<|end_file_name|><|fim▁begin|>export enum VoteActionType { <|fim▁hole|> ADD_VOTE_LIST = 'ADD_VOTE_LIST', CLEAR_ALL_DATA_VOTE = 'CLEAR_ALL_DATA_VOTE' }<|fim▁end|>
ADD_VOTE = 'ADD_VOTE', DELETE_VOTE = 'DELETE_VOTE',
<|file_name|>test_ins.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # This file is part of Androguard. # # Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr> # All rights reserved. # # Androguard is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Androguard is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Androguard. If not, see <http://www.gnu.org/licenses/>. import sys, re PATH_INSTALL = "./" sys.path.append(PATH_INSTALL) from androguard.core.androgen import AndroguardS from androguard.core.analysis import analysis TESTS_CASES = [ #'examples/android/TC/bin/classes.dex', 'examples/android/TestsAndroguard/bin/classes.dex', ] VALUES = { 'examples/android/TestsAndroguard/bin/classes.dex' : { "Ltests/androguard/TestInvoke; <init> ()V" : { 0x0 : ("invoke-direct" , [['v',1] , ['meth@', 4, 'Ljava/lang/Object;', '()', 'V', '<init>']]), 0xa : ("invoke-virtual", [['v',1], ['v',0] , ['meth@', 49, 'Ltests/androguard/TestInvoke;', '(I)', 'I', 'TestInvoke1']]), }, "Ltests/androguard/TestInvoke; TestInvoke1 (I)I" : { 0x4 : ("invoke-virtual", [['v',1] , ['v',2] , ['v',0] , ['meth@', 50,'Ltests/androguard/TestInvoke;' ,'(I I)', 'I', 'TestInvoke2']]), }, "Ltests/androguard/TestInvoke; TestInvoke2 (I I)I" : { 0x4 : ("invoke-virtual", [['v',1] , ['v',2] , ['v',3] , ['v',0] , ['meth@', 51, 'Ltests/androguard/TestInvoke;', '(I I I)', 'I', 'TestInvoke3']]), }, "Ltests/androguard/TestInvoke; TestInvoke3 (I I I)I" : { 0x4 : ("invoke-virtual", [['v', 1], ['v', 2], ['v', 3], ['v', 4], ['v', 0], ['meth@', 52, 'Ltests/androguard/TestInvoke;', '(I I I I)', 'I', 'TestInvoke4']]), }, "Ltests/androguard/TestInvoke; TestInvoke4 (I I I I)I" : { 0xe : ("invoke-virtual/range", [['v', 0], ['v', 1], ['v', 2], ['v', 3], ['v', 4], ['v', 5], ['meth@', 53, 'Ltests/androguard/TestInvoke;', '(I I I I I)', 'I', 'TestInvoke5']]), }, "Ltests/androguard/TestInvoke; TestInvoke5 (I I I I I)I" : { 0x10 : ("invoke-virtual/range", [['v', 0], ['v', 1], ['v', 2], ['v', 3], ['v', 4], ['v', 5], ['v', 6], ['meth@', 54, 'Ltests/androguard/TestInvoke;', '(I I I I I I)', 'I', 'TestInvoke6']]), }, "Ltests/androguard/TestInvoke; TestInvoke6 (I I I I I I)I" : { 0x12 : ("invoke-virtual/range", [['v', 0], ['v', 1], ['v', 2], ['v', 3], ['v', 4], ['v', 5], ['v', 6], ['v', 7], ['meth@', 55, 'Ltests/androguard/TestInvoke;', '(I I I I I I I)', 'I', 'TestInvoke7']]), }, "Ltests/androguard/TestInvoke; TestInvoke7 (I I I I I I I)I" : { 0x16 : ("invoke-virtual/range", [['v', 0], ['v', 1], ['v', 2], ['v', 3], ['v', 4], ['v', 5], ['v', 6], ['v', 7], ['v', 8], ['meth@', 56, 'Ltests/androguard/TestInvoke;', '(I I I I I I I I)', 'I', 'TestInvoke8']]), },<|fim▁hole|> 0x0 : ("mul-int", [['v', 0], ['v', 2], ['v', 3]]), 0x4 : ("mul-int/2addr", [['v', 0], ['v', 4]]), 0x10 : ("return", [['v', 0]]), } }, } def test(got, expected): if got == expected: prefix = ' OK ' else: prefix = ' X ' print('\t%s got: %s expected: %s' % (prefix, repr(got), repr(expected))) def getVal(i): op = i.get_operands() if isinstance(op, int): return [ op ] elif i.get_name() == "lookupswitch": x = [] x.append( i.get_operands().default ) for idx in range(0, i.get_operands().npairs): off = getattr(i.get_operands(), "offset%d" % idx) x.append( off ) return x return [-1] def check(a, values): for method in a.get_methods(): key = method.get_class_name() + " " + method.get_name() + " " + method.get_descriptor() if key not in values: continue print("CHECKING ...", method.get_class_name(), method.get_name(), method.get_descriptor()) code = method.get_code() bc = code.get_bc() idx = 0 for i in bc.get(): # print "\t", "%x(%d)" % (idx, idx), i.get_name(), i.get_operands() if idx in values[key]: elem = values[key][idx] val1 = i.get_name() + "%s" % i.get_operands() val2 = elem[0] + "%s" % elem[1] test(val1, val2) del values[key][idx] idx += i.get_length() for i in TESTS_CASES: a = AndroguardS( i ) check( a, VALUES[i] ) x = analysis.VMAnalysis( a.get_vm() ) print(x)<|fim▁end|>
"Ltests/androguard/TestInvoke; TestInvoke8 (I I I I I I I I)I" : {
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// This file is part of rgtk. // // rgtk is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or<|fim▁hole|>// rgtk is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with rgtk. If not, see <http://www.gnu.org/licenses/>. pub use self::item::Item; pub use self::rectangle::Rectangle; pub use self::matrix::Matrix; pub use self::glyph_string::GlyphString; mod item; mod rectangle; mod matrix; mod glyph_string;<|fim▁end|>
// (at your option) any later version. //
<|file_name|>MissFailoverRoute.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ #include "MissFailoverRoute.h" #include <folly/dynamic.h> #include "mcrouter/lib/config/RouteHandleFactory.h" #include "mcrouter/routes/McRouteHandleBuilder.h" #include "mcrouter/routes/McrouterRouteHandle.h" namespace facebook { namespace memcache { namespace mcrouter { <|fim▁hole|> if (targets.empty()) { return makeNullRoute(); } if (targets.size() == 1) { return std::move(targets[0]); } return makeMcrouterRouteHandle<MissFailoverRoute>(std::move(targets)); } McrouterRouteHandlePtr makeMissFailoverRoute( RouteHandleFactory<McrouterRouteHandleIf>& factory, const folly::dynamic& json) { std::vector<McrouterRouteHandlePtr> children; if (json.isObject()) { if (auto jchildren = json.get_ptr("children")) { children = factory.createList(*jchildren); } } else { children = factory.createList(json); } return makeMissFailoverRoute(std::move(children)); } }}} // facebook::memcache::mcrouter<|fim▁end|>
McrouterRouteHandlePtr makeNullRoute(); McrouterRouteHandlePtr makeMissFailoverRoute( std::vector<McrouterRouteHandlePtr> targets) {
<|file_name|>entrypoint.go<|end_file_name|><|fim▁begin|>package tracing import ( "context" "net/http" "github.com/containous/alice" "github.com/containous/traefik/middlewares" "github.com/containous/traefik/tracing" "github.com/opentracing/opentracing-go" "github.com/opentracing/opentracing-go/ext" ) const ( entryPointTypeName = "TracingEntryPoint" ) // NewEntryPoint creates a new middleware that the incoming request. func NewEntryPoint(ctx context.Context, t *tracing.Tracing, entryPointName string, next http.Handler) http.Handler { middlewares.GetLogger(ctx, "tracing", entryPointTypeName).Debug("Creating middleware") return &entryPointMiddleware{ entryPoint: entryPointName, Tracing: t, next: next, } } type entryPointMiddleware struct { *tracing.Tracing entryPoint string next http.Handler } func (e *entryPointMiddleware) ServeHTTP(rw http.ResponseWriter, req *http.Request) { spanCtx, _ := e.Extract(opentracing.HTTPHeaders, tracing.HTTPHeadersCarrier(req.Header)) span, req, finish := e.StartSpanf(req, ext.SpanKindRPCServerEnum, "EntryPoint", []string{e.entryPoint, req.Host}, " ", ext.RPCServerOption(spanCtx)) defer finish() ext.Component.Set(span, e.ServiceName) tracing.LogRequest(span, req) req = req.WithContext(tracing.WithTracing(req.Context(), e.Tracing)) recorder := newStatusCodeRecoder(rw, http.StatusOK)<|fim▁hole|> // WrapEntryPointHandler Wraps tracing to alice.Constructor. func WrapEntryPointHandler(ctx context.Context, tracer *tracing.Tracing, entryPointName string) alice.Constructor { return func(next http.Handler) (http.Handler, error) { return NewEntryPoint(ctx, tracer, entryPointName, next), nil } }<|fim▁end|>
e.next.ServeHTTP(recorder, req) tracing.LogResponseCode(span, recorder.Status()) }
<|file_name|>TestSetEvent.java<|end_file_name|><|fim▁begin|>/* * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ /*<|fim▁hole|> * TestSetEvent.java * Copyright (C) 2002 Mark Hall * */ package weka.gui.beans; import java.util.EventObject; import weka.core.Instances; /** * Event encapsulating a test set * * @author <a href="mailto:[email protected]">Mark Hall</a> * @version $Revision: 1.2 $ */ public class TestSetEvent extends EventObject { /** * The test set instances */ protected Instances m_testSet; private boolean m_structureOnly; /** * what number is this test set (ie fold 2 of 10 folds) */ protected int m_setNumber; /** * Maximum number of sets (ie 10 in a 10 fold) */ protected int m_maxSetNumber; public TestSetEvent(Object source, Instances testSet) { super(source); m_testSet = testSet; if (m_testSet != null && m_testSet.numInstances() == 0) { m_structureOnly = true; } } /** * Get the test set instances * * @return an <code>Instances</code> value */ public Instances getTestSet() { return m_testSet; } /** * Get the test set number (eg. fold 2 of a 10 fold split) * * @return an <code>int</code> value */ public int getSetNumber() { return m_setNumber; } /** * Get the maximum set number * * @return an <code>int</code> value */ public int getMaxSetNumber() { return m_maxSetNumber; } /** * Returns true if the encapsulated instances * contain just header information * * @return true if only header information is * available in this DataSetEvent */ public boolean isStructureOnly() { return m_structureOnly; } }<|fim▁end|>
<|file_name|>wiredata.py<|end_file_name|><|fim▁begin|># Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license # Copyright (C) 2011,2017 Nominum, Inc. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose with or without fee is hereby granted, # provided that the above copyright notice and this permission notice # appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """DNS Wire Data Helper""" import dns.exception from ._compat import binary_type, string_types, PY2 # Figure out what constant python passes for an unspecified slice bound. # It's supposed to be sys.maxint, yet on 64-bit windows sys.maxint is 2^31 - 1 # but Python uses 2^63 - 1 as the constant. Rather than making pointless # extra comparisons, duplicating code, or weakening WireData, we just figure # out what constant Python will use. class _SliceUnspecifiedBound(binary_type): def __getitem__(self, key): return key.stop if PY2: def __getslice__(self, i, j): # pylint: disable=getslice-method return self.__getitem__(slice(i, j)) _unspecified_bound = _SliceUnspecifiedBound()[1:] class WireData(binary_type): # WireData is a binary type with stricter slicing def __getitem__(self, key): try: if isinstance(key, slice): # make sure we are not going outside of valid ranges, # do stricter control of boundaries than python does # by default start = key.start stop = key.stop if PY2: if stop == _unspecified_bound: # handle the case where the right bound is unspecified stop = len(self)<|fim▁hole|> # to make sure they're valid if start != stop: super(WireData, self).__getitem__(start) super(WireData, self).__getitem__(stop - 1) else: for index in (start, stop): if index is None: continue elif abs(index) > len(self): raise dns.exception.FormError return WireData(super(WireData, self).__getitem__( slice(start, stop))) return bytearray(self.unwrap())[key] except IndexError: raise dns.exception.FormError if PY2: def __getslice__(self, i, j): # pylint: disable=getslice-method return self.__getitem__(slice(i, j)) def __iter__(self): i = 0 while 1: try: yield self[i] i += 1 except dns.exception.FormError: raise StopIteration def unwrap(self): return binary_type(self) def maybe_wrap(wire): if isinstance(wire, WireData): return wire elif isinstance(wire, binary_type): return WireData(wire) elif isinstance(wire, string_types): return WireData(wire.encode()) raise ValueError("unhandled type %s" % type(wire))<|fim▁end|>
if start < 0 or stop < 0: raise dns.exception.FormError # If it's not an empty slice, access left and right bounds
<|file_name|>toLower.ts<|end_file_name|><|fim▁begin|>import { Injectable, Pipe } from '@angular/core'; /* Generated class for the ToLower pipe. See https://angular.io/docs/ts/latest/guide/pipes.html for more info on Angular 2 Pipes. */ @Pipe({ name: 'tolower' }) @Injectable() export class ToLower { /* Takes a value and makes it lowercase. */ transform(value: string, args: any[]) { value = value + ''; // make sure it's a string<|fim▁hole|> } }<|fim▁end|>
return value.toLowerCase();
<|file_name|>text_edit.rs<|end_file_name|><|fim▁begin|>use crate::{TextUnit, TextRange, TextBuf, Text, tu}; use std::cmp::Ordering; #[derive(Clone, Debug)] pub struct TextEdit { pub ops: Vec<TextEditOp>, } #[derive(Clone, Debug)] pub enum TextEditOp { Copy(TextRange), // TODO: check for disjoint ranges Insert(TextBuf), } impl TextEdit { pub fn apply(&self, text: Text) -> TextBuf { let mut result = String::new(); for s in self.ops.iter() { match *s { TextEditOp::Copy(range) => result += &text.slice(range).to_cow(), TextEditOp::Insert(ref i) => result += &i.as_text().to_cow(), } } result.into() } } pub struct TextEditBuilder { segments: Vec<TextEditOp>, last_offset: TextUnit, text_len: TextUnit, } impl TextEditBuilder { pub fn new(text: Text) -> TextEditBuilder { TextEditBuilder { segments: Vec::new(), last_offset: tu(0), text_len: text.len() } } pub fn build(mut self) -> TextEdit { let len = self.text_len; self.advance_to(len); TextEdit { ops: self.segments } } pub fn insert<T: Into<TextBuf>>(&mut self, offset: TextUnit, text: T) { self.advance_to(offset); self.insert_(text.into()); } pub fn delete(&mut self, range: TextRange) { self.advance_to(range.start()); self.delete_len(range.len()); } pub fn replace<T: Into<TextBuf>>(&mut self, range: TextRange, text: T) { self.advance_to(range.start()); self.insert_(text.into()); self.delete_len(range.len()); } fn advance_to(&mut self, offset: TextUnit) { match self.last_offset.cmp(&offset) { Ordering::Less => self.copy_up_to(offset), Ordering::Equal => (),<|fim▁hole|> fn copy_up_to(&mut self, offset: TextUnit) { let len = offset - self.last_offset; self.copy_len(len) } fn copy_len(&mut self, len: TextUnit) { let range = TextRange::from_len(self.last_offset, len); self.segments.push(TextEditOp::Copy(range)); self.last_offset += len } fn insert_(&mut self, text: TextBuf) { self.segments.push(TextEditOp::Insert(text)) } fn delete_len(&mut self, len: TextUnit) { self.last_offset += len } } #[cfg(test)] mod tests { use super::*; #[test] fn test_edits() { let text: TextBuf = "Hello, World!".into(); let edit = { let mut e = TextEditBuilder::new(text.as_text()); e.replace(TextRange::from_len(tu(0), tu(5)), "Goodbye"); e.insert(tu(7), "cruel "); e.delete(TextRange::from_len(tu(12), tu(1))); e.build() }; let new_text = edit.apply(text.as_text()); assert_eq!(new_text, "Goodbye, cruel World"); } }<|fim▁end|>
Ordering::Greater => panic!("Invalid edit"), } }
<|file_name|>modules.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on Tue Mar 14 02:17:11 2017 <|fim▁hole|>@author: guida """ import json import requests def get_url(url): response = requests.get(url) content = response.content.decode("utf8") return content #Json parser def get_json_from_url(url): content = get_url(url) js = json.loads(content) return js<|fim▁end|>
<|file_name|>uint_impl.rs<|end_file_name|><|fim▁begin|>use parse::uint::*; use RoundingMode; impl ToU32 for bool { /// Parse [`bool`](https://doc.rust-lang.org/std/primitive.bool.html) to /// [`u32`](https://doc.rust-lang.org/std/primitive.u32.html) /// (see more: [`bool_to_u32_res`](../../parse/uint/fn.bool_to_u32_res.html)) /// /// # Examples /// /// ``` /// use rustils::parse::uint::ToU32; /// /// assert_eq!(true.to_u32_res(), Ok(1_u32)); /// assert_eq!(false.to_u32_res(), Ok(0_u32)); /// ``` fn to_u32_res(self) -> ParseResultU32 { bool_to_u32_res(self) } /// Parse [`bool`](https://doc.rust-lang.org/std/primitive.bool.html) to /// [`u32`](https://doc.rust-lang.org/std/primitive.u32.html) /// (see more: [`bool_to_u32`](../../parse/uint/fn.bool_to_u32.html)) /// /// # Examples /// /// ``` /// use rustils::parse::uint::ToU32; /// /// assert_eq!(true.to_u32(), 1_u32); /// assert_eq!(false.to_u32(), 0_u32); /// ``` fn to_u32(self) -> u32 { bool_to_u32(self) } } impl ToU32 for i8 { fn to_u32_res(self) -> ParseResultU32 { i8_to_u32_res(self) } fn to_u32(self) -> u32 { i8_to_u32(self) } } impl ToU32 for i16 { fn to_u32_res(self) -> ParseResultU32 { i16_to_u32_res(self) } fn to_u32(self) -> u32 { i16_to_u32(self) } } impl ToU32 for i32 { fn to_u32_res(self) -> ParseResultU32 { i32_to_u32_res(self) } fn to_u32(self) -> u32 { i32_to_u32(self) } } impl ToU32 for f32 { fn to_u32_res(self) -> ParseResultU32 { f32_to_u32_res(self) } fn to_u32(self) -> u32 { f32_to_u32(self) } } impl ToU32RM for f32 { fn to_u32_rm_res(self, rm: RoundingMode) -> ParseResultU32 { f32_to_u32_rm_res(self, rm) } fn to_u32_rm(self, rm: RoundingMode) -> u32 { f32_to_u32_rm(self, rm) } } impl ToU32 for i64 { fn to_u32_res(self) -> ParseResultU32 { i64_to_u32_res(self) } fn to_u32(self) -> u32 { i64_to_u32(self) } } impl ToU32 for u64 { fn to_u32_res(self) -> ParseResultU32 { u64_to_u32_res(self) }<|fim▁hole|> fn to_u32(self) -> u32 { u64_to_u32(self) } } impl ToU32 for f64 { fn to_u32_res(self) -> ParseResultU32 { f64_to_u32_res(self) } fn to_u32(self) -> u32 { f64_to_u32(self) } } impl ToU32RM for f64 { fn to_u32_rm_res(self, rm: RoundingMode) -> ParseResultU32 { f64_to_u32_rm_res(self, rm) } fn to_u32_rm(self, rm: RoundingMode) -> u32 { f64_to_u32_rm(self, rm) } } impl ToU32 for isize { fn to_u32_res(self) -> ParseResultU32 { isize_to_u32_res(self) } fn to_u32(self) -> u32 { isize_to_u32(self) } } impl ToU32 for usize { fn to_u32_res(self) -> ParseResultU32 { usize_to_u32_res(self) } fn to_u32(self) -> u32 { usize_to_u32(self) } } impl ToU32 for String { fn to_u32_res(self) -> ParseResultU32 { string_to_u32_res(self) } fn to_u32(self) -> u32 { string_to_u32(self) } } impl ToU32 for &'static str { fn to_u32_res(self) -> ParseResultU32 { str_to_u32_res(self) } fn to_u32(self) -> u32 { str_to_u32(self) } }<|fim▁end|>
<|file_name|>run_parallel_Heat_NumPy.py<|end_file_name|><|fim▁begin|>from mpi4py import MPI from pySDC.helpers.stats_helper import filter_stats, sort_stats from pySDC.implementations.controller_classes.controller_MPI import controller_MPI from pySDC.implementations.collocation_classes.gauss_radau_right import CollGaussRadau_Right from pySDC.implementations.problem_classes.HeatEquation_1D_FD import heat1d from pySDC.implementations.sweeper_classes.generic_LU import generic_implicit from pySDC.implementations.transfer_classes.TransferMesh import mesh_to_mesh def set_parameters_ml(): """ Helper routine to set parameters for the following multi-level runs Returns: dict: dictionary containing the simulation parameters dict: dictionary containing the controller parameters float: starting time float: end time """ # initialize level parameters level_params = dict() level_params['restol'] = 5E-10 level_params['dt'] = 0.125 # initialize sweeper parameters sweeper_params = dict() sweeper_params['collocation_class'] = CollGaussRadau_Right sweeper_params['QI'] = 'LU' sweeper_params['num_nodes'] = [3] # initialize problem parameters problem_params = dict() problem_params['nu'] = 0.1 # diffusion coefficient problem_params['freq'] = 2 # frequency for the test value problem_params['nvars'] = [63, 31] # number of degrees of freedom for each level # initialize step parameters step_params = dict() step_params['maxiter'] = 50 step_params['errtol'] = 1E-05 # initialize space transfer parameters space_transfer_params = dict() space_transfer_params['rorder'] = 2 space_transfer_params['iorder'] = 6 # initialize controller parameters controller_params = dict() controller_params['logger_level'] = 30 controller_params['all_to_done'] = True # can ask the controller to keep iterating all steps until the end controller_params['use_iteration_estimator'] = False # activate iteration estimator # fill description dictionary for easy step instantiation description = dict() description['problem_class'] = heat1d # pass problem class description['problem_params'] = problem_params # pass problem parameters<|fim▁hole|> description['step_params'] = step_params # pass step parameters description['space_transfer_class'] = mesh_to_mesh # pass spatial transfer class description['space_transfer_params'] = space_transfer_params # pass paramters for spatial transfer # set time parameters t0 = 0.0 Tend = 1.0 return description, controller_params, t0, Tend if __name__ == "__main__": """ A simple test program to do MPI-parallel PFASST runs """ # set MPI communicator comm = MPI.COMM_WORLD # get parameters from Part A description, controller_params, t0, Tend = set_parameters_ml() # instantiate controllers controller = controller_MPI(controller_params=controller_params, description=description, comm=comm) # get initial values on finest level P = controller.S.levels[0].prob uinit = P.u_exact(t0) # call main functions to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics by type (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') # combine statistics into list of statistics iter_counts_list = comm.gather(iter_counts, root=0) rank = comm.Get_rank() size = comm.Get_size() if rank == 0: out = 'Working with %2i processes...' % size print(out) # compute exact solutions and compare with both results uex = P.u_exact(Tend) err = abs(uex - uend) out = 'Error vs. exact solution: %12.8e' % err print(out) # build one list of statistics instead of list of lists, the sort by time iter_counts_gather = [item for sublist in iter_counts_list for item in sublist] iter_counts = sorted(iter_counts_gather, key=lambda tup: tup[0]) # compute and print statistics for item in iter_counts: out = 'Number of iterations for time %4.2f: %1i ' % (item[0], item[1]) print(out)<|fim▁end|>
description['sweeper_class'] = generic_implicit # pass sweeper description['sweeper_params'] = sweeper_params # pass sweeper parameters description['level_params'] = level_params # pass level parameters
<|file_name|>template_expansion.go<|end_file_name|><|fim▁begin|><|fim▁hole|>package internalversion import ( "github.com/openshift/origin/pkg/template/api" "k8s.io/apimachinery/pkg/api/errors" ) // TemplateListerExpansion allows custom methods to be added to // TemplateLister. type TemplateListerExpansion interface { GetByUID(uid string) (*api.Template, error) } // TemplateNamespaceListerExpansion allows custom methods to be added to // TemplateNamespaceLister. type TemplateNamespaceListerExpansion interface{} func (s templateLister) GetByUID(uid string) (*api.Template, error) { templates, err := s.indexer.ByIndex(api.TemplateUIDIndex, uid) if err != nil { return nil, err } if len(templates) == 0 { return nil, errors.NewNotFound(api.Resource("template"), uid) } return templates[0].(*api.Template), nil }<|fim▁end|>
<|file_name|>sync.rs<|end_file_name|><|fim▁begin|>//! `Await` implementations for _some_ `std::sync`` types use Await; use std::sync::{Mutex, MutexGuard, LockResult}; // Await `Mutex` impl<'a, T: ?Sized> Await<LockResult<MutexGuard<'a, T>>> for &'a Mutex<T> { fn await(self) -> LockResult<MutexGuard<'a, T>> { self.lock() } } use std::sync::{Barrier, BarrierWaitResult}; // Await `Mutex` impl<'a> Await<BarrierWaitResult> for &'a Barrier {<|fim▁hole|> fn await(self) -> BarrierWaitResult { self.wait() } } use std::sync::mpsc::{Receiver, RecvError}; // Await `Receiver` impl<'a, T> Await<Result<T, RecvError>> for &'a Receiver<T> { fn await(self) -> Result<T, RecvError> { self.recv() } }<|fim▁end|>
<|file_name|>dac.py<|end_file_name|><|fim▁begin|># j4cDAC test code # # Copyright 2011 Jacob Potter # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, version 3. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import socket import time import struct def pack_point(x, y, r, g, b, i = -1, u1 = 0, u2 = 0, flags = 0): """Pack some color values into a struct dac_point. Values must be specified for x, y, r, g, and b. If a value is not passed in for the other fields, i will default to max(r, g, b); the rest default to zero. """ if i < 0: i = max(r, g, b) return struct.pack("<HhhHHHHHH", flags, x, y, r, g, b, i, u1, u2) class ProtocolError(Exception): """Exception used when a protocol error is detected.""" pass class Status(object): """Represents a status response from the DAC.""" def __init__(self, data): """Initialize from a chunk of data.""" self.protocol_version, self.le_state, self.playback_state, \ self.source, self.le_flags, self.playback_flags, \ self.source_flags, self.fullness, self.point_rate, \ self.point_count = \ struct.unpack("<BBBBHHHHII", data) def dump(self, prefix = " - "): """Dump to a string.""" lines = [ "Light engine: state %d, flags 0x%x" % (self.le_state, self.le_flags), "Playback: state %d, flags 0x%x" % (self.playback_state, self.playback_flags), "Buffer: %d points" % (self.fullness, ), "Playback: %d kpps, %d points played" % (self.point_rate, self.point_count), "Source: %d, flags 0x%x" % (self.source, self.source_flags) ] for l in lines: print prefix + l class BroadcastPacket(object): """Represents a broadcast packet from the DAC.""" def __init__(self, st): """Initialize from a chunk of data.""" self.mac = st[:6] self.hw_rev, self.sw_rev, self.buffer_capacity, \ self.max_point_rate = struct.unpack("<HHHI", st[6:16]) self.status = Status(st[16:36]) def dump(self, prefix = " - "): """Dump to a string.""" lines = [ "MAC: " + ":".join( "%02x" % (ord(o), ) for o in self.mac), "HW %d, SW %d" % (self.hw_rev, self.sw_rev), "Capabilities: max %d points, %d kpps" % (self.buffer_capacity, self.max_point_rate) ] for l in lines: print prefix + l #self.status.dump(prefix) class DAC(object): """A connection to a DAC.""" def read(self, l): """Read exactly length bytes from the connection.""" while l > len(self.buf): self.buf += self.conn.recv(4096) obuf = self.buf self.buf = obuf[l:] return obuf[:l] def readresp(self, cmd): """Read a response from the DAC.""" data = self.read(22) response = data[0] cmdR = data[1] status = Status(data[2:]) # status.dump() if cmdR != cmd: raise ProtocolError("expected resp for %r, got %r" % (cmd, cmdR)) if response != "a": raise ProtocolError("expected ACK, got %r" % (response, )) self.last_status = status return status <|fim▁hole|> """Connect to the DAC over TCP.""" conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) conn.connect((host, port)) #print "Connected to %s:%s" % (host, port) self.conn = conn self.buf = "" # Read the "hello" message first_status = self.readresp("?") #first_status.dump() def begin(self, lwm, rate): cmd = struct.pack("<cHI", "b", lwm, rate) self.conn.sendall(cmd) return self.readresp("b") def update(self, lwm, rate): cmd = struct.pack("<cHI", "u", lwm, rate) self.conn.sendall(cmd) return self.readresp("u") def encode_point(self, point): try: return pack_point(*point) except Exception as e: ##print "Exception" #print point raise e def write(self, points): epoints = map(self.encode_point, points) cmd = struct.pack("<cH", "d", len(epoints)) self.conn.sendall(cmd + "".join(epoints)) return self.readresp("d") def prepare(self): self.conn.sendall("p") return self.readresp("p") def stop(self): self.conn.sendall("s") return self.readresp("s") def estop(self): self.conn.sendall("\xFF") return self.readresp("\xFF") def clear_estop(self): self.conn.sendall("c") return self.readresp("c") def ping(self): self.conn.sendall("?") return self.readresp("?") def play_stream(self, stream): # First, prepare the stream if self.last_status.playback_state == 2: raise Exception("already playing?!") elif self.last_status.playback_state == 0: self.prepare() started = 0 while True: # How much room? cap = 1799 - self.last_status.fullness points = stream.read(cap) if cap < 100: time.sleep(0.005) cap += 150 # print "Writing %d points" % (cap, ) t0 = time.time() self.write(points) t1 = time.time() # print "Took %f" % (t1 - t0, ) if not started: self.begin(0, 30000) started = 1 def find_dac(): """Listen for broadcast packets.""" s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.bind(("0.0.0.0", 7654)) while True: data, addr = s.recvfrom(1024) bp = BroadcastPacket(data) #print "Packet from %s: " % (addr, ) #bp.dump() def find_first_dac(): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.bind(("0.0.0.0", 7654)) data, addr = s.recvfrom(1024) bp = BroadcastPacket(data) #print "Packet from %s: " % (addr, ) return addr[0]<|fim▁end|>
def __init__(self, host, port = 7765):
<|file_name|>templates.test.js<|end_file_name|><|fim▁begin|>/* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ const _ = require("underscore") const path = require("path") const jade = require("jade") const fs = require("fs") const moment = require("moment") const Curation = require("../../../../../models/curation.coffee") const Article = require("../../../../../models/article.coffee") const render = function (templateName) { const filename = path.resolve( __dirname, `../../../components/venice_2017/templates/${templateName}.jade` ) return jade.compile(fs.readFileSync(filename), { filename }) } describe("Venice index", () => it("uses social metadata", function () { const curation = new Curation({ sections: [ { social_description: "Social Description", social_title: "Social Title", social_image: "files.artsy.net/img/social_image.jpg", seo_description: "Seo Description", }, ], sub_articles: [], }) const html = render("index")({ videoIndex: 0, curation, isSubscribed: false, sub_articles: [], videoGuide: new Article(), crop(url) { return url }, resize(url) { return url }, moment, sd: {}, markdown() {}, asset() {}, }) html.should.containEql( '<meta property="og:image" content="files.artsy.net/img/social_image.jpg">' ) html.should.containEql('<meta property="og:title" content="Social Title">') html.should.containEql( '<meta property="og:description" content="Social Description">' ) return html.should.containEql( '<meta name="description" content="Seo Description">' ) })) describe("Venice video_completed", () => it("passes section url to social mixin", function () { const html = render("video_completed")({ section: { social_title: "Social Title", slug: "ep-1", }, sd: { APP_URL: "http://localhost:5000" }, }) html.should.containEql( "https://www.facebook.com/sharer/sharer.php?u=http%3A%2F%2Flocalhost%3A5000%2Fvenice-biennale%2Fep-1" ) return html.should.containEql("Social Title") })) describe("Venice video_description", () => it("passes section url to social mixin", function () { const html = render("video_description")({ section: { social_title: "Social Title", slug: "ep-1", published: true, }, sd: { APP_URL: "http://localhost:5000" }, markdown() {}, }) html.should.containEql( "https://www.facebook.com/sharer/sharer.php?u=http%3A%2F%2Flocalhost%3A5000%2Fvenice-biennale%2Fep-1"<|fim▁hole|> ) return html.should.containEql("Social Title") }))<|fim▁end|>
<|file_name|>fixedcouponbond_project.py<|end_file_name|><|fim▁begin|>from datetime import date from openpyxl import load_workbook if __name__ == '__main__': wb = load_workbook('FixedCouponBond.xlsx') ws = wb.active # Take the input parameters today = ws['C2'].value.date() # OIS Data ois_startdate = today ois_maturities = [] ois_mktquotes = [] for cell in list(ws.iter_rows('B15:C44')): ois_maturities.append(cell[0].value) ois_mktquotes.append(cell[1].value) # Credit Curve Data ndps = [] ndpdates = [] for cell in list(ws.iter_rows('B6:C11')): ndpdates.append(cell[0].value.date()) ndps.append(cell[1].value) # Bond data nominals = [] start_dates = [] end_dates = [] cpn_frequency = [] coupons = [] recovery_rates = [] for cell in list(ws.iter_rows('E5:J19')): nominals.append(cell[0].value) start_dates.append(cell[1].value.date()) end_dates.append(cell[2].value.date()) cpn_frequency.append(cell[3].value) coupons.append(cell[4].value) recovery_rates.append(cell[5].value) # YOUR CODE HERE .... # In the coupon calculation use 30e360 convention to compute the accrual period (i.e. tau) # The result of your code must be a variables of type list named # output_npv. The length of this list has to be the equal to the number of bonds # i.e len(nominals) for example # END OF YOUR CODE # Write results # A variable named output_results of type list, with the same length of output_dates, is expected. # In case this is not present, a message is written if 'output_npv' not in locals(): output_npv = ["Not Successful" for x in range(len(nominals))] out_list = list(ws.iter_rows('K5:K19')) for i in range(len(output_npv)):<|fim▁hole|> # A new file with the results is created wb.save("FixedCouponBond_output.xlsx")<|fim▁end|>
out_list[i][0].value = output_npv[i]
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import functools from time import strftime import tensorflow as tf # lazy_property: no need for if $ not None logic def lazy_property(function): attribute = '_cache_' + function.__name__ @property @functools.wraps(function) def decorator(self): if not hasattr(self, attribute): setattr(self, attribute, function(self)) return getattr(self, attribute) return decorator def timestamp() -> str: return strftime('%Y%m%d-%H%M%S') # from https://gist.github.com/danijar/8663d3bbfd586bffecf6a0094cd116f2: def doublewrap(function): """ A decorator decorator, allowing to use the decorator to be used without parentheses if not arguments are provided. All arguments must be optional. """ @functools.wraps(function) def decorator(*args, **kwargs): if len(args) == 1 and len(kwargs) == 0 and callable(args[0]): return function(args[0]) else: return lambda wrapee: function(wrapee, *args, **kwargs) return decorator @doublewrap def define_scope(function, scope=None, *args, **kwargs): """ A decorator for functions that define TensorFlow operations. The wrapped function will only be executed once. Subsequent calls to it will directly return the result so that operations are added to the graph only once. The operations added by the function live within a tf.variable_scope(). If this decorator is used with arguments, they will be forwarded to the variable scope. The scope name defaults to the name of the wrapped function. """ attribute = '_cache_' + function.__name__ name = scope or function.__name__ @property @functools.wraps(function) def decorator(self): if not hasattr(self, attribute): with tf.variable_scope(name, *args, **kwargs): setattr(self, attribute, function(self)) return getattr(self, attribute) return decorator def unzip(iterable):<|fim▁hole|> return zip(*iterable) def single(list): first = list[0] assert (len(list) == 1) return first<|fim▁end|>
<|file_name|>0009_auto_20150821_0243.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('core', '0008_auto_20150819_0050'),<|fim▁hole|> ] operations = [ migrations.AlterUniqueTogether( name='test', unique_together=set([('owner', 'name')]), ), ]<|fim▁end|>
<|file_name|>Rule_PREVTAG.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 1995-2012, The University of Sheffield. See the file * COPYRIGHT.txt in the software or at http://gate.ac.uk/gate/COPYRIGHT.txt * * This file is part of GATE (see http://gate.ac.uk/), and is free * software, licenced under the GNU Library General Public License, * Version 2, June 1991 (in the distribution as file licence.html, * and also available at http://gate.ac.uk/gate/licence.html).<|fim▁hole|> * * $Id: Rule_PREVTAG.java 15333 2012-02-07 13:18:33Z ian_roberts $ */ package hepple.postag.rules; import hepple.postag.*; /** * Title: HepTag * Description: Mark Hepple's POS tagger * Copyright: Copyright (c) 2001 * Company: University of Sheffield * @author Mark Hepple * @version 1.0 */ public class Rule_PREVTAG extends Rule { public Rule_PREVTAG() { } public boolean checkContext(POSTagger tagger) { return (tagger.lexBuff[2][0].equals(context[0])); } }<|fim▁end|>
* * HepTag was originally written by Mark Hepple, this version contains * modifications by Valentin Tablan and Niraj Aswani.
<|file_name|>general_script_L1_PACS.py<|end_file_name|><|fim▁begin|>### This script fetches level-1 PACS imaging data, using a list generated by the ### archive (in the CSV format), attaches sky coordinates and masks to them ### (by calling the convertL1ToScanam task) and save them to disk in the correct ### format for later use by Scanamorphos. ### See important instructions below. ####################################################### ### This script is part of the Scanamorphos package. ### HCSS is free software: you can redistribute it and/or modify ### it under the terms of the GNU Lesser General Public License as ### published by the Free Software Foundation, either version 3 of ### the License, or (at your option) any later version. ####################################################### ## Import classes and definitions: import os from herschel.pacs.spg.phot import ConvertL1ToScanamTask ####################################################### ## local settings: dir_root = "/pcdisk/stark/aribas/Desktop/modeling_TDs/remaps_Cha/PACS/scanamorphos/"<|fim▁hole|> ### number of observations: n_obs = 2 ####################################################### ## Do a multiple target search in the archive and use the "save all results as CSV" option. ## --> ascii table 'results.csv' where lines can be edited ## (suppress unwanted observations and correct target names) ## Create the directories contained in the dir_out variables (l. 57) ## before running this script. ####################################################### ## observations: table_obs = asciiTableReader(file=dir_root+'results_fast.csv', tableType='CSV', skipRows=1) list_obsids = table_obs[0].data list_names = table_obs[1].data for i_obs in range(n_obs): ## num_obsid = list_obsids[i_obs] source = list_names[i_obs] source = str.lower(str(source)) dir_out = path+source+"_processed_obsids" # create directory if it does not exist if not(os.path.exists(dir_out)): os.system('mkdir '+dir_out) ## print "" print "Downloading obsid " + `num_obsid` obs = getObservation(num_obsid, useHsa=True, instrument="PACS", verbose=True) ### frames = obs.level1.refs["HPPAVGR"].product.refs[0].product convertL1ToScanam(frames, cancelGlitch=1, assignRaDec=1, outDir=dir_out) ### frames = obs.level1.refs["HPPAVGB"].product.refs[0].product convertL1ToScanam(frames, cancelGlitch=1, assignRaDec=1, outDir=dir_out) ### END OF SCRIPT #######################################################<|fim▁end|>
path = dir_root +"L1/"
<|file_name|>JobExecutionState.java<|end_file_name|><|fim▁begin|>package com.swfarm.biz.chain.bo; import java.io.Serializable; import java.sql.Timestamp; import java.util.Random; import com.swfarm.pub.framework.FormNumberCache; public class JobExecutionState implements Serializable { private Long id; private String jobName; private String jobInstanceName; private String saleChannel; private String accountNumber; private Timestamp executionTime = new Timestamp(System.currentTimeMillis()); public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getJobName() { return jobName; } public void setJobName(String jobName) { this.jobName = jobName; } <|fim▁hole|> public String getJobInstanceName() { return jobInstanceName; } public void setJobInstanceName(String jobInstanceName) { this.jobInstanceName = jobInstanceName; } public String getSaleChannel() { return saleChannel; } public void setSaleChannel(String saleChannel) { this.saleChannel = saleChannel; } public String getAccountNumber() { return accountNumber; } public void setAccountNumber(String accountNumber) { this.accountNumber = accountNumber; } public Timestamp getExecutionTime() { return executionTime; } public void setExecutionTime(Timestamp executionTime) { this.executionTime = executionTime; } public String generateJobInstanceName() { StringBuffer jobInstanceNameBuffer = new StringBuffer(); jobInstanceNameBuffer.append(this.jobName); jobInstanceNameBuffer.append(System.currentTimeMillis()); Random random = new Random(); int i1 = FormNumberCache.getRandomInteger(1, 9, random); int i2 = FormNumberCache.getRandomInteger(1, 9, random); int i3 = FormNumberCache.getRandomInteger(1, 9, random); int i4 = FormNumberCache.getRandomInteger(1, 9, random); jobInstanceNameBuffer.append(i1); jobInstanceNameBuffer.append(i2); jobInstanceNameBuffer.append(i3); jobInstanceNameBuffer.append(i4); return jobInstanceNameBuffer.toString(); } public static void main(String[] args) { } }<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. extern crate foo; fn main() { foo::foo();<|fim▁hole|><|fim▁end|>
}
<|file_name|>os.bot.hpp<|end_file_name|><|fim▁begin|>/** Copyright (c) 2017, Philip Deegan. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Philip Deegan nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef _KUL_OS_WIN_OS_BOT_HPP_ #define _KUL_OS_WIN_OS_BOT_HPP_ void kul::Dir::rm() const { if (is()) { for (const auto &a : files()) a.rm(); for (const auto &a : dirs()) a.rm(); _rmdir(path().c_str()); } } std::string kul::Dir::ESC(std::string s) { ESC_REPLACE(s, "\\", "\\\\"); return s; } std::string kul::Dir::LOCL(std::string s) { kul::String::REPLACE_ALL(s, "/", "\\"); return s; } bool kul::Dir::is() const { if (path().empty()) return false; DWORD ftyp = GetFileAttributesA(path().c_str()); return (ftyp != INVALID_FILE_ATTRIBUTES && ftyp & FILE_ATTRIBUTE_DIRECTORY); } bool kul::Dir::mk() const { if (path().empty()) return false;<|fim▁hole|>} bool kul::Dir::root() const { return is() && real().size() == 3; } bool kul::File::is() const { return !name().empty() && (bool)std::ifstream(_d.join(_n).c_str()); } bool kul::File::rm() const { if (is()) { _unlink(_d.join(_n).c_str()); return true; } return false; } bool kul::File::mk() const { FILE *pFile; fopen_s(&pFile, full().c_str(), "w"); if (pFile != NULL) { fclose(pFile); } return pFile != NULL; } uint64_t kul::File::size() const { uint64_t r = 0; WIN32_FIND_DATA ffd; HANDLE h = FindFirstFile(TEXT(mini().c_str()), &ffd); if (h) { ULARGE_INTEGER ul; ul.HighPart = ffd.nFileSizeHigh; ul.LowPart = ffd.nFileSizeLow; r = ul.QuadPart; FindClose(h); } return r; } namespace kul { namespace os { inline uint16_t exec(std::string const &cmd, bool q = false) { if (q) { return system(std::string(cmd + " > nul").c_str()); } return system(cmd.c_str()); } inline std::string EOL() { #if (_MSC_VER >= 1800) return "\n"; #else return "\r\n"; #endif // _MSC_VER } } // namespace os namespace user { inline kul::Dir home() { const std::string h(env::GET("HOME")); if (h.size()) return kul::Dir(h); return kul::Dir(std::string(env::GET("HOMEDRIVE")) + std::string(env::GET("HOMEPATH"))); } inline kul::Dir home(std::string const &app) { return kul::Dir(home().join(app)); } } // namespace user namespace env { inline bool CWD(kul::Dir const &d) { return _chdir(d.path().c_str()) != -1; } } // namespace env } // namespace kul #ifndef _KUL_COMPILED_LIB_ #include "kul/os/win/src/os/dir/dirs.ipp" #include "kul/os/win/src/os/dir/files.ipp" #include "kul/os/win/src/os/dir/real.ipp" #endif //_KUL_COMPILED_LIB_ #endif /* _KUL_OS_WIN_OS_BOT_HPP_ */<|fim▁end|>
kul::Dir const &prnt(parent()); if (_p != prnt.path() && !prnt.is()) parent().mk(); return CreateDirectory(locl().c_str(), NULL);
<|file_name|>validated.rs<|end_file_name|><|fim▁begin|>//! Module for holding Validated logic //!<|fim▁hole|>//! an `HList` of all your results. //! //! # Examples //! //! ``` //! #[macro_use] //! extern crate frunk; //! //! # fn main() { //! use frunk::Validated; //! use frunk::prelude::*; // for .into_validated() //! //! #[derive(PartialEq, Eq, Debug)] //! struct Person { //! age: i32, //! name: String, //! } //! //! fn get_name() -> Result<String, String> { //! Result::Ok("James".to_owned()) //! } //! //! fn get_age() -> Result<i32, String> { //! Result::Ok(32) //! } //! //! let v: Validated<HList!(String, i32), String> = get_name().into_validated() + get_age(); //! let person = v.into_result() //! .map(|hlist_pat!(name, age)| { //! Person { //! name: name, //! age: age, //! } //! }); //! //! assert_eq!(person.unwrap(), //! Person { //! name: "James".to_owned(), //! age: 32, //! }); //! # } //! ``` use super::hlist::*; use std::ops::Add; /// A Validated is either an Ok holding an HList or an Err, holding a vector /// of collected errors. #[derive(PartialEq, Debug, Eq, Clone, PartialOrd, Ord, Hash)] pub enum Validated<T, E> where T: HList, { Ok(T), Err(Vec<E>), } impl<T, E> Validated<T, E> where T: HList, { /// Returns true if this validation is Ok, false otherwise /// /// # Examples /// /// ``` /// use frunk::Validated; /// use frunk::prelude::*; /// /// let r1: Result<String, String> = Result::Ok(String::from("hello")); /// let v = r1.into_validated(); /// assert!(v.is_ok()); /// ``` pub fn is_ok(&self) -> bool { matches!(*self, Validated::Ok(_)) } /// Returns true if this validation is Err, false otherwise /// /// # Examples /// /// ``` /// use frunk::prelude::*; /// /// let r1: Result<String, i32> = Result::Err(32); /// let v = r1.into_validated(); /// assert!(v.is_err()); /// ``` pub fn is_err(&self) -> bool { !self.is_ok() } /// Turns this Validated into a Result. /// /// If this Validated is Ok, it will become a Result::Ok, holding an HList of all the accumulated /// results. Otherwise, it will become a Result::Err with a list of all accumulated errors. /// /// # Examples /// /// ``` /// #[macro_use] extern crate frunk; /// /// use frunk::Validated; /// use frunk::prelude::*; /// /// # fn main() { /// #[derive(PartialEq, Eq, Debug)] /// struct Person { /// age: i32, /// name: String, /// } /// /// fn get_name() -> Result<String, String> { /// Result::Ok("James".to_owned()) /// } /// /// fn get_age() -> Result<i32, String> { /// Result::Ok(32) /// } /// /// let v = get_name().into_validated() + get_age(); /// let person = v.into_result() /// .map(|hlist_pat!(name, age)| { /// Person { /// name: name, /// age: age, /// } /// }); /// /// assert_eq!(person.unwrap(), /// Person { /// name: "James".to_owned(), /// age: 32, /// }); /// # } pub fn into_result(self) -> Result<T, Vec<E>> { match self { Validated::Ok(h) => Result::Ok(h), Validated::Err(errors) => Result::Err(errors), } } } /// Trait for "lifting" a given type into a Validated pub trait IntoValidated<T, E> { /// Consumes the current Result into a Validated so that we can begin chaining /// /// # Examples /// /// ``` /// use frunk::prelude::*; // IntoValidated is in the prelude /// /// let r1: Result<String, i32> = Result::Err(32); /// let v = r1.into_validated(); /// assert!(v.is_err()); /// ``` fn into_validated(self) -> Validated<HCons<T, HNil>, E>; } impl<T, E> IntoValidated<T, E> for Result<T, E> { fn into_validated(self) -> Validated<HCons<T, HNil>, E> { match self { Result::Err(e) => Validated::Err(vec![e]), Result::Ok(v) => Validated::Ok(HCons { head: v, tail: HNil, }), } } } /// Implements Add for the current Validated with a Result, returning a new Validated. /// /// # Examples /// /// ``` /// # #[macro_use] extern crate frunk; /// # fn main() { /// use frunk::Validated; /// use frunk::prelude::*; /// /// let r1: Result<String, String> = Result::Ok(String::from("hello")); /// let r2: Result<i32, String> = Result::Ok(1); /// let v = r1.into_validated() + r2; /// assert_eq!(v, Validated::Ok(hlist!(String::from("hello"), 1))) /// # } /// ``` /// impl<T, E, T2> Add<Result<T2, E>> for Validated<T, E> where T: HList + Add<HCons<T2, HNil>>, <T as Add<HCons<T2, HNil>>>::Output: HList, { type Output = Validated<<T as Add<HCons<T2, HNil>>>::Output, E>; fn add(self, other: Result<T2, E>) -> Self::Output { let other_as_validated = other.into_validated(); self + other_as_validated } } /// Implements Add for the current Validated with another Validated, returning a new Validated. /// /// # Examples /// /// ``` /// # #[macro_use] extern crate frunk; /// # fn main() { /// use frunk::Validated; /// use frunk::prelude::*; /// /// let r1: Result<String, String> = Result::Ok(String::from("hello")); /// let r2: Result<i32, String> = Result::Ok(1); /// let v1 = r1.into_validated(); /// let v2 = r2.into_validated(); /// let v3 = v1 + v2; /// assert_eq!(v3, Validated::Ok(hlist!(String::from("hello"), 1))) /// # } /// ``` impl<T, E, T2> Add<Validated<T2, E>> for Validated<T, E> where T: HList + Add<T2>, T2: HList, <T as Add<T2>>::Output: HList, { type Output = Validated<<T as Add<T2>>::Output, E>; fn add(self, other: Validated<T2, E>) -> Self::Output { match (self, other) { (Validated::Err(mut errs), Validated::Err(errs2)) => { errs.extend(errs2); Validated::Err(errs) } (Validated::Err(errs), _) => Validated::Err(errs), (_, Validated::Err(errs)) => Validated::Err(errs), (Validated::Ok(h1), Validated::Ok(h2)) => Validated::Ok(h1 + h2), } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_adding_ok_results() { let r1: Result<String, String> = Result::Ok(String::from("hello")); let r2: Result<i32, String> = Result::Ok(1); let v = r1.into_validated() + r2; assert_eq!(v, Validated::Ok(hlist!(String::from("hello"), 1))) } #[test] fn test_adding_validated_oks() { let r1: Result<String, String> = Result::Ok(String::from("hello")); let r2: Result<i32, String> = Result::Ok(1); let r3: Result<i32, String> = Result::Ok(3); let v1 = r1.into_validated(); let v2 = r2.into_validated(); let v3 = r3.into_validated(); let comb = v1 + v2 + v3; assert_eq!(comb, Validated::Ok(hlist!(String::from("hello"), 1, 3))) } #[test] fn test_adding_err_results() { let r1: Result<i16, String> = Result::Ok(1); let r2: Result<i16, String> = Result::Err(String::from("NO!")); let v1 = r1.into_validated() + r2; assert!(v1.is_err()); assert_eq!(v1, Validated::Err(vec!["NO!".to_owned()])) } #[derive(PartialEq, Eq, Debug)] struct Person { age: i32, name: String, email: String, } #[derive(PartialEq, Eq, Debug)] pub enum YahNah { Yah, Nah, } /// Our Errors #[derive(PartialEq, Eq, Debug)] pub enum Nope { NameNope, AgeNope, EmailNope, } fn get_name(yah_nah: YahNah) -> Result<String, Nope> { match yah_nah { YahNah::Yah => Result::Ok("James".to_owned()), _ => Result::Err(Nope::NameNope), } } fn get_age(yah_nah: YahNah) -> Result<i32, Nope> { match yah_nah { YahNah::Yah => Result::Ok(32), _ => Result::Err(Nope::AgeNope), } } fn get_email(yah_nah: YahNah) -> Result<String, Nope> { match yah_nah { YahNah::Yah => Result::Ok("[email protected]".to_owned()), _ => Result::Err(Nope::EmailNope), } } #[test] fn test_to_result_ok() { let v = get_name(YahNah::Yah).into_validated() + get_age(YahNah::Yah) + get_email(YahNah::Yah); let person = v.into_result().map(|hlist_pat!(name, age, email)| Person { name: name, age: age, email: email, }); assert_eq!( person.unwrap(), Person { name: "James".to_owned(), age: 32, email: "[email protected]".to_owned(), } ); } #[test] fn test_to_result_all_faulty() { let v = get_name(YahNah::Nah).into_validated() + get_age(YahNah::Nah) + get_email(YahNah::Nah); let person = v.into_result().map(|_| unimplemented!()); assert_eq!( person.unwrap_err(), vec![Nope::NameNope, Nope::AgeNope, Nope::EmailNope] ); } #[test] fn test_to_result_one_faulty() { let v = get_name(YahNah::Nah).into_validated() + get_age(YahNah::Yah) + get_email(YahNah::Nah); let person = v.into_result().map(|_| unimplemented!()); assert_eq!(person.unwrap_err(), vec![Nope::NameNope, Nope::EmailNope]); } }<|fim▁end|>
//! `Validated` is a way of running a bunch of operations that can go wrong (for example, //! functions returning `Result<T, E>`) and, in the case of one or more things going wrong, //! having all the errors returned to you all at once. In the case that everything went well, you get
<|file_name|>dialogs.hpp<|end_file_name|><|fim▁begin|>class Global_var_WeaponGUI { idd=-1; movingenable=false; class controls { class Global_var_WeaponGUI_Frame: RscFrame { idc = -1; x = 0.365937 * safezoneW + safezoneX; y = 0.379 * safezoneH + safezoneY; w = 0.170156 * safezoneW; h = 0.143 * safezoneH; }; class Global_var_WeaponGUI_Background: Box { idc = -1; x = 0.365937 * safezoneW + safezoneX; y = 0.379 * safezoneH + safezoneY; w = 0.170156 * safezoneW; h = 0.143 * safezoneH; };<|fim▁hole|> x = 0.365937 * safezoneW + safezoneX; y = 0.39 * safezoneH + safezoneY; w = 0.0773437 * safezoneW; h = 0.022 * safezoneH; }; class Global_var_WeaponGUI_Combo: RscCombo { idc = 2100; x = 0.371094 * safezoneW + safezoneX; y = 0.445 * safezoneH + safezoneY; w = 0.159844 * safezoneW; h = 0.022 * safezoneH; }; class Global_var_WeaponGUI_Button_OK: RscButton { idc = 2101; text = "OK"; //--- ToDo: Localize; x = 0.371094 * safezoneW + safezoneX; y = 0.489 * safezoneH + safezoneY; w = 0.0773437 * safezoneW; h = 0.022 * safezoneH; action = "(lbText [2100, lbCurSel 2100]) execVM ""Ammo\Dialog.sqf"";closeDialog 1;"; }; class Global_var_WeaponGUI_Button_Cancel: RscButton { idc = 2102; text = "Abbruch"; //--- ToDo: Localize; x = 0.453594 * safezoneW + safezoneX; y = 0.489 * safezoneH + safezoneY; w = 0.0773437 * safezoneW; h = 0.022 * safezoneH; action = "closeDialog 2;"; }; }; };<|fim▁end|>
class Global_var_WeaponGUI_Text: RscText { idc = -1; text = "Waffenauswahl"; //--- ToDo: Localize;
<|file_name|>job_delete.spec.js<|end_file_name|><|fim▁begin|>var expect = require('expect.js'); var Promise = require('bluebird'); var ZugZug = require('../lib/zugzug'); var Job = require('../lib/job'); describe('job.delete():Promise(self)', function() { var zz, job; beforeEach(function() { zz = new ZugZug(); job = new Job(zz, 'default'); }); afterEach(function(done) { Promise.promisify(zz._client.flushall, zz._client)() .then(zz.quit.bind(zz)) .done(function() {done();}); }); it('returns a promise', function(done) { job.save() .then(function() { var p = job.delete(); expect(p).to.be.a(Promise); return p.thenReturn(); }) .done(done); }); it('resolves to the instance', function(done) { job.save() .then(function() { return job.delete(); }) .then(function(res) { expect(res).to.equal(job); }) .done(done); }); it('deletes the job from the database', function(done) { var id; job.save() .then(function() { expect(job.id).to.equal('1'); id = job.id; return job.delete(); }) .then(function() { return zz.getJob(id); }) .then(function(res) { expect(res).to.equal(null); }) .done(done); }); it('resets the job\'s state', function(done) { job.save() .then(function() { return job.delete(); }) .then(function() { expect(job.id).to.be(undefined); expect(job.created).to.be(undefined); expect(job.updated).to.be(undefined); expect(job.state).to.be(undefined); }) .done(done); }); it('deletes the job\'s log', function(done) { var id; job.save() .then(function() { expect(job.id).to.equal('1'); id = job.id; return job.delete(); }) .then(function() { var m = zz._client.multi() .llen('zugzug:job:' + id + ':log'); return Promise.promisify(m.exec, m)(); }) .spread(function(newlen) { expect(newlen).to.equal(0); }) .done(done);<|fim▁hole|><|fim▁end|>
}); });
<|file_name|>hello_world.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import keras<|fim▁end|>
<|file_name|>B_Frame.py<|end_file_name|><|fim▁begin|>from DemoFramework import DemoFramework from LUIVerticalLayout import LUIVerticalLayout from LUIFrame import LUIFrame from LUILabel import LUILabel from LUIButton import LUIButton from LUIObject import LUIObject import random f = DemoFramework() f.prepare_demo("LUIFrame") # Constructor f.add_constructor_parameter("width", "200") f.add_constructor_parameter("height", "200") f.add_constructor_parameter("innerPadding", "5") f.add_constructor_parameter("scrollable", "False") f.add_constructor_parameter("style", "UIFrame.Raised") <|fim▁hole|># Events f.construct_sourcecode("LUIFrame") # Construct a new frame frame = LUIFrame(parent=f.get_widget_node()) layout = LUIVerticalLayout(parent=frame, spacing=5) layout.add(LUILabel(text="This is some frame ..", color=(0.2, 0.6, 1.0, 1.0), font_size=20)) layout.add(LUILabel(text="It can contain arbitrary elements.")) layout.add(LUILabel(text="For example this button:")) layout.add(LUIButton(text="Fancy button")) # frame.fit_to_children() f.set_actions({ "Resize to 300x160": lambda: frame.set_size(300, 160), "Fit to children": lambda: frame.clear_size(), }) run()<|fim▁end|>
# Functions
<|file_name|>unified-builder.ts<|end_file_name|><|fim▁begin|>import { Cache } from '../cache/cache'; import { BoundedCache } from '../cache/bounded'; import { KeyType } from '../cache/key-type'; import { BoundlessCache } from '../cache/boundless'; import { Weigher } from '../cache/weigher'; import { DefaultLoadingCache } from '../cache/loading'; import { ExpirationCache } from '../cache/expiration'; import { MetricsCache } from '../cache/metrics/index'; import { RemovalListener } from '../cache/removal-listener'; import { Loader } from '../cache/loading/loader'; import { MaxAgeDecider } from '../cache/expiration/max-age-decider'; import { LoadingCache } from '../cache/loading/loading-cache'; import { Expirable } from '../cache/expiration/expirable'; export interface CacheBuilder<K extends KeyType, V> { /** * Set a listener that will be called every time something is removed * from the cache. */ withRemovalListener(listener: RemovalListener<K, V>): this; /** * Set the maximum number of items to keep in the cache before evicting * something. */ maxSize(size: number): this; /** * Set a function to use to determine the size of a cached object. */ withWeigher(weigher: Weigher<K, V>): this; /** * Change to a cache where get can also resolve values if provided with * a function as the second argument. */ loading(): LoadingCacheBuilder<K, V>; /** * Change to a loading cache, where the get-method will return instances * of Promise and automatically load unknown values. */ withLoader(loader: Loader<K, V>): LoadingCacheBuilder<K, V>; /** * Set that the cache should expire items some time after they have been * written to the cache. */ expireAfterWrite(time: number | MaxAgeDecider<K, V>): this; /** * Set that the cache should expire items some time after they have been * read from the cache. */ expireAfterRead(time: number | MaxAgeDecider<K, V>): this; /** * Activate tracking of metrics for this cache. */ metrics(): this; /** * Build the cache. */ build(): Cache<K, V>; } export interface LoadingCacheBuilder<K extends KeyType, V> extends CacheBuilder<K, V> { /** * Build the cache. */ build(): LoadingCache<K, V>; } /** * Builder for cache instances.<|fim▁hole|> private optRemovalListener?: RemovalListener<K, V>; private optMaxSize?: number; private optWeigher?: Weigher<K, V>; private optMaxWriteAge?: MaxAgeDecider<K, V>; private optMaxNoReadAge?: MaxAgeDecider<K, V>; private optMetrics: boolean = false; /** * Set a listener that will be called every time something is removed * from the cache. */ public withRemovalListener(listener: RemovalListener<K, V>) { this.optRemovalListener = listener; return this; } /** * Set the maximum number of items to keep in the cache before evicting * something. */ public maxSize(size: number) { this.optMaxSize = size; return this; } /** * Set a function to use to determine the size of a cached object. */ public withWeigher(weigher: Weigher<K, V>) { if(typeof weigher !== 'function') { throw new Error('Weigher should be a function that takes a key and value and returns a number'); } this.optWeigher = weigher; return this; } /** * Change to a cache where get can also resolve values if provided with * a function as the second argument. */ public loading(): LoadingCacheBuilder<K, V> { return new LoadingCacheBuilderImpl(this, null); } /** * Change to a loading cache, where the get-method will return instances * of Promise and automatically load unknown values. */ public withLoader(loader: Loader<K, V>): LoadingCacheBuilder<K, V> { if(typeof loader !== 'function') { throw new Error('Loader should be a function that takes a key and returns a value or a promise that resolves to a value'); } return new LoadingCacheBuilderImpl(this, loader); } /** * Set that the cache should expire items after some time. */ public expireAfterWrite(time: number | MaxAgeDecider<K, V>) { let evaluator; if(typeof time === 'function') { evaluator = time; } else if(typeof time === 'number') { evaluator = () => time; } else { throw new Error('expireAfterWrite needs either a maximum age as a number or a function that returns a number'); } this.optMaxWriteAge = evaluator; return this; } /** * Set that the cache should expire items some time after they have been read. */ public expireAfterRead(time: number | MaxAgeDecider<K, V>): this { let evaluator; if(typeof time === 'function') { evaluator = time; } else if(typeof time === 'number') { evaluator = () => time; } else { throw new Error('expireAfterRead needs either a maximum age as a number or a function that returns a number'); } this.optMaxNoReadAge = evaluator; return this; } /** * Activate tracking of metrics for this cache. */ public metrics(): this { this.optMetrics = true; return this; } /** * Build and return the cache. */ public build() { let cache: Cache<K, V>; if(typeof this.optMaxWriteAge !== 'undefined' || typeof this.optMaxNoReadAge !== 'undefined') { /* * Requested expiration - wrap the base cache a bit as it needs * custom types, a custom weigher if used and removal listeners * are added on the expiration cache instead. */ let parentCache: Cache<K, Expirable<V>>; if(this.optMaxSize) { parentCache = new BoundedCache({ maxSize: this.optMaxSize, weigher: createExpirableWeigher(this.optWeigher) }); } else { parentCache = new BoundlessCache({}); } cache = new ExpirationCache({ maxNoReadAge: this.optMaxNoReadAge, maxWriteAge: this.optMaxWriteAge, removalListener: this.optRemovalListener, parent: parentCache }); } else { if(this.optMaxSize) { cache = new BoundedCache({ maxSize: this.optMaxSize, weigher: this.optWeigher, removalListener: this.optRemovalListener }); } else { cache = new BoundlessCache({ removalListener: this.optRemovalListener }); } } if(this.optMetrics) { // Collect metrics if requested cache = new MetricsCache({ parent: cache }); } return cache; } } class LoadingCacheBuilderImpl<K extends KeyType, V> implements LoadingCacheBuilder<K, V> { private parent: CacheBuilder<K, V>; private loader: Loader<K, V> | null; constructor(parent: CacheBuilder<K, V>, loader: Loader<K, V> | null) { this.parent = parent; this.loader = loader; } public withRemovalListener(listener: RemovalListener<K, V>): this { this.parent.withRemovalListener(listener); return this; } public maxSize(size: number): this { this.parent.maxSize(size); return this; } public withWeigher(weigher: Weigher<K, V>): this { this.parent.withWeigher(weigher); return this; } public loading(): LoadingCacheBuilder<K, V> { throw new Error('Already building a loading cache'); } public withLoader(loader: Loader<K, V>): LoadingCacheBuilder<K, V> { throw new Error('Already building a loading cache'); } public expireAfterWrite(time: number | MaxAgeDecider<K, V>): this { this.parent.expireAfterWrite(time); return this; } public expireAfterRead(time: number | MaxAgeDecider<K, V>): this { this.parent.expireAfterRead(time); return this; } public metrics(): this { this.parent.metrics(); return this; } public build(): LoadingCache<K, V> { return new DefaultLoadingCache({ loader: this.loader, parent: this.parent.build() }); } } function createExpirableWeigher<K extends KeyType, V>(w: Weigher<K, V> | undefined): Weigher<K, Expirable<V>> | null { if(! w) return null; return (key, node) => w(key, node.value as V); }<|fim▁end|>
*/ export class CacheBuilderImpl<K extends KeyType, V> implements CacheBuilder<K, V> {
<|file_name|>adjlist.hpp<|end_file_name|><|fim▁begin|>/* COPYRIGHT (c) 2014 Umut Acar, Arthur Chargueraud, and Michael * Rainey * All rights reserved. * * \file adjlist.hpp * \brief Adjacency-list graph format * */ #ifndef _PASL_GRAPH_ADJLIST_H_ #define _PASL_GRAPH_ADJLIST_H_ #include "../../graph/include/graph.hpp" /***********************************************************************/ namespace pasl { namespace graph { /*---------------------------------------------------------------------*/ /* Symmetric vertex */ template <class Vertex_id_bag> class symmetric_vertex { public: typedef Vertex_id_bag vtxid_bag_type; typedef typename vtxid_bag_type::value_type vtxid_type; symmetric_vertex() { } symmetric_vertex(vtxid_bag_type neighbors) : neighbors(neighbors) { } vtxid_bag_type neighbors; vtxid_type get_in_neighbor(vtxid_type j) const { return neighbors[j]; } vtxid_type get_out_neighbor(vtxid_type j) const { return neighbors[j]; } vtxid_type* get_in_neighbors() const {<|fim▁hole|> return neighbors.data(); } void set_in_neighbor(vtxid_type j, vtxid_type nbr) { neighbors[j] = nbr; } void set_out_neighbor(vtxid_type j, vtxid_type nbr) { neighbors[j] = nbr; } vtxid_type get_in_degree() const { return vtxid_type(neighbors.size()); } vtxid_type get_out_degree() const { return vtxid_type(neighbors.size()); } void set_in_degree(vtxid_type j) { neighbors.alloc(j); } // todo: use neighbors.resize() void set_out_degree(vtxid_type j) { neighbors.alloc(j); } void swap_in_neighbors(vtxid_bag_type& other) { neighbors.swap(other); } void swap_out_neighbors(vtxid_bag_type& other) { neighbors.swap(other); } void check(vtxid_type nb_vertices) const { #ifndef NDEBUG for (vtxid_type i = 0; i < neighbors.size(); i++) check_vertex(neighbors[i], nb_vertices); #endif } }; /*---------------------------------------------------------------------*/ /* Asymmetric vertex */ template <class Vertex_id_bag> class asymmetric_vertex { public: typedef Vertex_id_bag vtxid_bag_type; typedef typename vtxid_bag_type::value_type vtxid_type; vtxid_bag_type in_neighbors; vtxid_bag_type out_neighbors; vtxid_type get_in_neighbor(vtxid_type j) const { return in_neighbors[j]; } vtxid_type get_out_neighbor(vtxid_type j) const { return out_neighbors[j]; } vtxid_type* get_in_neighbors() const { return in_neighbors.data(); } vtxid_type* get_out_neighbors() const { return out_neighbors.data(); } void set_in_neighbor(vtxid_type j, vtxid_type nbr) { in_neighbors[j] = nbr; } void set_out_neighbor(vtxid_type j, vtxid_type nbr) { out_neighbors[j] = nbr; } vtxid_type get_in_degree() const { return vtxid_type(in_neighbors.size()); } vtxid_type get_out_degree() const { return vtxid_type(out_neighbors.size()); } void set_in_degree(vtxid_type j) { in_neighbors.alloc(j); } void set_out_degree(vtxid_type j) { out_neighbors.alloc(j); } void swap_in_neighbors(vtxid_bag_type& other) { in_neighbors.swap(other); } void swap_out_neighbors(vtxid_bag_type& other) { out_neighbors.swap(other); } void check(vtxid_type nb_vertices) const { for (vtxid_type i = 0; i < in_neighbors.size(); i++) check_vertex(in_neighbors[i], nb_vertices); for (vtxid_type i = 0; i < out_neighbors.size(); i++) check_vertex(out_neighbors[i], nb_vertices); } }; /*---------------------------------------------------------------------*/ /* Adjacency-list format */ template <class Adjlist_seq> class adjlist { public: typedef Adjlist_seq adjlist_seq_type; typedef typename adjlist_seq_type::value_type vertex_type; typedef typename vertex_type::vtxid_bag_type::value_type vtxid_type; typedef typename adjlist_seq_type::alias_type adjlist_seq_alias_type; typedef adjlist<adjlist_seq_alias_type> alias_type; edgeid_type nb_edges; adjlist_seq_type adjlists; adjlist() : nb_edges(0) { } adjlist(edgeid_type nb_edges) : nb_edges(nb_edges) { } vtxid_type get_nb_vertices() const { return vtxid_type(adjlists.size()); } void check() const { #ifndef NDEBUG for (vtxid_type i = 0; i < adjlists.size(); i++) adjlists[i].check(get_nb_vertices()); size_t m = 0; for (vtxid_type i = 0; i < adjlists.size(); i++) m += adjlists[i].get_in_degree(); assert(m == nb_edges); m = 0; for (vtxid_type i = 0; i < adjlists.size(); i++) m += adjlists[i].get_out_degree(); assert(m == nb_edges); #endif } }; /*---------------------------------------------------------------------*/ /* Equality operators */ template <class Vertex_id_bag> bool operator==(const symmetric_vertex<Vertex_id_bag>& v1, const symmetric_vertex<Vertex_id_bag>& v2) { using vtxid_type = typename symmetric_vertex<Vertex_id_bag>::vtxid_type; if (v1.get_out_degree() != v2.get_out_degree()) return false; for (vtxid_type i = 0; i < v1.get_out_degree(); i++) if (v1.get_out_neighbor(i) != v2.get_out_neighbor(i)) return false; return true; } template <class Vertex_id_bag> bool operator!=(const symmetric_vertex<Vertex_id_bag>& v1, const symmetric_vertex<Vertex_id_bag>& v2) { return ! (v1 == v2); } template <class Adjlist_seq> bool operator==(const adjlist<Adjlist_seq>& g1, const adjlist<Adjlist_seq>& g2) { using vtxid_type = typename adjlist<Adjlist_seq>::vtxid_type; if (g1.get_nb_vertices() != g2.get_nb_vertices()) return false; if (g1.nb_edges != g2.nb_edges) return false; for (vtxid_type i = 0; i < g1.get_nb_vertices(); i++) if (g1.adjlists[i] != g2.adjlists[i]) return false; return true; } template <class Adjlist_seq> bool operator!=(const adjlist<Adjlist_seq>& g1, const adjlist<Adjlist_seq>& g2) { return ! (g1 == g2); } /*---------------------------------------------------------------------*/ /* Flat adjacency-list format */ template <class Vertex_id, bool Is_alias = false> class flat_adjlist_seq { public: typedef flat_adjlist_seq<Vertex_id> self_type; typedef Vertex_id vtxid_type; typedef size_t size_type; typedef data::pointer_seq<vtxid_type> vertex_seq_type; typedef symmetric_vertex<vertex_seq_type> value_type; typedef flat_adjlist_seq<vtxid_type, true> alias_type; char* underlying_array; vtxid_type* offsets; vtxid_type nb_offsets; vtxid_type* edges; flat_adjlist_seq() : underlying_array(NULL), offsets(NULL), nb_offsets(0), edges(NULL) { } flat_adjlist_seq(const flat_adjlist_seq& other) { if (Is_alias) { underlying_array = other.underlying_array; offsets = other.offsets; nb_offsets = other.nb_offsets; edges = other.edges; } else { util::atomic::die("todo"); } } //! \todo instead of using Is_alias, pass either ptr_seq or array_seq as underlying_array ~flat_adjlist_seq() { if (! Is_alias) clear(); } void get_alias(alias_type& alias) const { alias.underlying_array = NULL; alias.offsets = offsets; alias.nb_offsets = nb_offsets; alias.edges = edges; } alias_type get_alias() const { alias_type alias; alias.underlying_array = NULL; alias.offsets = offsets; alias.nb_offsets = nb_offsets; alias.edges = edges; return alias; } void clear() { if (underlying_array != NULL) data::myfree(underlying_array); offsets = NULL; edges = NULL; } vtxid_type degree(vtxid_type v) const { assert(v >= 0); assert(v < size()); return offsets[v + 1] - offsets[v]; } value_type operator[](vtxid_type ix) const { assert(ix >= 0); assert(ix < size()); return value_type(vertex_seq_type(&edges[offsets[ix]], degree(ix))); } vtxid_type size() const { return nb_offsets - 1; } void swap(self_type& other) { std::swap(underlying_array, other.underlying_array); std::swap(offsets, other.offsets); std::swap(nb_offsets, other.nb_offsets); std::swap(edges, other.edges); } void alloc(size_type) { util::atomic::die("unsupported"); } void init(char* bytes, vtxid_type nb_vertices, edgeid_type nb_edges) { nb_offsets = nb_vertices + 1; underlying_array = bytes; offsets = (vtxid_type*)bytes; edges = &offsets[nb_offsets]; } value_type* data() { util::atomic::die("unsupported"); return NULL; } }; template <class Vertex_id, bool Is_alias = false> using flat_adjlist = adjlist<flat_adjlist_seq<Vertex_id, Is_alias>>; template <class Vertex_id> using flat_adjlist_alias = flat_adjlist<Vertex_id, true>; } // end namespace } // end namespace /***********************************************************************/ #endif /*! _PASL_GRAPH_ADJLIST_H_ */<|fim▁end|>
return neighbors.data(); } vtxid_type* get_out_neighbors() const {
<|file_name|>EditorToolbarAction.java<|end_file_name|><|fim▁begin|>package de.uni.bremen.stummk.psp.calculation; import java.lang.reflect.InvocationTargetException; import java.util.List; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jface.action.Action; import org.eclipse.jface.dialogs.ProgressMonitorDialog; import org.eclipse.jface.operation.IRunnableWithProgress; import org.eclipse.swt.widgets.Shell; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.actions.ActionFactory.IWorkbenchAction; import de.uni.bremen.stummk.psp.control.BarChart; import de.uni.bremen.stummk.psp.control.LineChart; import de.uni.bremen.stummk.psp.control.PieChart; import de.uni.bremen.stummk.psp.data.PSPProject; import de.uni.bremen.stummk.psp.data.ScheduleEntry; import de.uni.bremen.stummk.psp.utility.CheckOperation; import de.uni.bremen.stummk.psp.utility.Constants; import de.uni.bremen.stummk.psp.utility.DataIO; import de.uni.bremen.stummk.psp.utility.FileHash; /** * Class represents an action of the toolbar in the editor * * @author Konstantin * */ public class EditorToolbarAction extends Action implements IWorkbenchAction { private EditorToolbarController etc; /** * Constructor * * @param id the Id of the Action * @param editorToolbarController the {@link EditorToolbarController} of the * {@link EditorToolbarAction} */ public EditorToolbarAction(String id, EditorToolbarController editorToolbarController) { setId(id); this.etc = editorToolbarController; } @Override public void run() { handleAction(getId()); } private void handleAction(String id) { // execute action depending on id switch (id) { case Constants.COMMAND_SYNC: exportData(); break; case Constants.COMMAND_PLAN_ACTUAL_DIAGRAM: new BarChart(etc.getProjectPlanSummary(), "Plan vs. Actual Values - " + etc.getProjectPlanSummary().getProject().getProjectName()); break; case Constants.COMMAND_TIME_IN_PHASE_PERCENTAGE: new PieChart(etc.getProjectPlanSummary(), Constants.KEY_TIME_IN_PHASE_IDX, "Distribution of time in phase - " + etc.getProjectPlanSummary().getProject().getProjectName()); break; case Constants.COMMAND_DEFECT_INJECTED_PERCENTAGE: new PieChart(etc.getProjectPlanSummary(), Constants.KEY_DEFECTS_INJECTED_IDX, "Distribution of injected defects - " + etc.getProjectPlanSummary().getProject().getProjectName()); break; case Constants.COMMAND_DEFECT_REMOVED_PERCENTAGE: new PieChart(etc.getProjectPlanSummary(), Constants.KEY_DEFECTS_REMOVED_IDX, "Distribution of removed defects - " + etc.getProjectPlanSummary().getProject().getProjectName()); break; case Constants.COMMAND_TIME_TRACKING: List<ScheduleEntry> entries = Manager.getInstance().getSchedulePlanning(etc.getProjectPlanSummary().getProject().getProjectName()); new LineChart("Time Progress in Project - " + etc.getProjectPlanSummary().getProject().getProjectName(), Constants.CHART_TIME, entries); break; case Constants.COMMAND_EARNED_VALUE_TRACKING:<|fim▁hole|> new LineChart("Earned Value Tracking in Project - " + etc.getProjectPlanSummary().getProject().getProjectName(), Constants.CHART_VALUE, e); break; } } private void exportData() { // exports data to psp-file and create hash try { Shell activeShell = PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(); IRunnableWithProgress op = new IRunnableWithProgress() { @Override public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException { try { monitor.beginTask("Export data to psp.csv file", 2); PSPProject psp = Manager.getInstance().loadBackupProject(etc.getProjectPlanSummary().getProject().getProjectName()); if (psp != null && psp.getSummary() != null) { DataIO.saveToFile(etc.getProjectPlanSummary().getProject().getProjectName(), psp, null); } monitor.worked(1); if (psp != null && psp.getSummary() != null) { IProject[] projects = ResourcesPlugin.getWorkspace().getRoot().getProjects(); for (IProject project : projects) { if (project.getName().equals(etc.getProjectPlanSummary().getProject().getProjectName())) { IFile file = CheckOperation.getProjectFile(project); String hash = FileHash.hash(file); try { file.setPersistentProperty(Constants.PROPERTY_HASH, hash); } catch (CoreException e) { e.printStackTrace(); } } } } monitor.worked(1); } finally { monitor.done(); } } }; new ProgressMonitorDialog(activeShell).run(true, true, op); } catch (InvocationTargetException | InterruptedException e) { e.printStackTrace(); } } @Override public void dispose() {} }<|fim▁end|>
List<ScheduleEntry> e = Manager.getInstance().getSchedulePlanning(etc.getProjectPlanSummary().getProject().getProjectName());
<|file_name|>objectClassificationDataExportGui.py<|end_file_name|><|fim▁begin|>############################################################################### # ilastik: interactive learning and segmentation toolkit # # Copyright (C) 2011-2014, the ilastik developers # <[email protected]> # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # In addition, as a special exception, the copyright holders of # ilastik give you permission to combine ilastik with applets, # workflows and plugins which are not covered under the GNU # General Public License. # # See the LICENSE file for details. License information is also available # on the ilastik web site at: # http://ilastik.org/license.html ############################################################################### from PyQt4.QtCore import Qt from PyQt4.QtGui import QColor from volumina.api import LazyflowSource, ColortableLayer, AlphaModulatedLayer from ilastik.applets.dataExport.dataExportGui import DataExportGui, DataExportLayerViewerGui from lazyflow.operators import OpMultiArraySlicer2 from ilastik.utility.exportingOperator import ExportingGui class ObjectClassificationDataExportGui( DataExportGui, ExportingGui ): """ A subclass of the generic data export gui that creates custom layer viewers. """ def __init__(self, *args, **kwargs): super(ObjectClassificationDataExportGui, self).__init__(*args, **kwargs) self._exporting_operator = None def set_exporting_operator(self, op): self._exporting_operator = op def get_exporting_operator(self, lane=0): return self._exporting_operator.getLane(lane) def createLayerViewer(self, opLane): return ObjectClassificationResultsViewer(self.parentApplet, opLane) def get_export_dialog_title(self): return "Export Object Information" @property def gui_applet(self): return self.parentApplet def get_raw_shape(self): return self.get_exporting_operator().RawImages.meta.shape def get_feature_names(self): return self.get_exporting_operator().ComputedFeatureNames([]).wait() def _initAppletDrawerUic(self): super(ObjectClassificationDataExportGui, self)._initAppletDrawerUic() from PyQt4.QtGui import QGroupBox, QPushButton, QVBoxLayout group = QGroupBox("Export Object Feature Table", self.drawer) group.setLayout(QVBoxLayout()) self.drawer.layout().addWidget(group) btn = QPushButton("Configure and export", group) btn.clicked.connect(self.show_export_dialog) group.layout().addWidget(btn) def _createDefault16ColorColorTable(): colors = [] # Transparent for the zero label colors.append(QColor(0,0,0,0)) # ilastik v0.5 colors colors.append( QColor( Qt.red ) ) colors.append( QColor( Qt.green ) ) colors.append( QColor( Qt.yellow ) ) colors.append( QColor( Qt.blue ) ) colors.append( QColor( Qt.magenta ) ) colors.append( QColor( Qt.darkYellow ) ) colors.append( QColor( Qt.lightGray ) ) # Additional colors colors.append( QColor(255, 105, 180) ) #hot pink colors.append( QColor(102, 205, 170) ) #dark aquamarine colors.append( QColor(165, 42, 42) ) #brown colors.append( QColor(0, 0, 128) ) #navy colors.append( QColor(255, 165, 0) ) #orange colors.append( QColor(173, 255, 47) ) #green-yellow<|fim▁hole|># colors.append( QColor(192, 192, 192) ) #silver # colors.append( QColor(69, 69, 69) ) # dark grey # colors.append( QColor( Qt.cyan ) ) assert len(colors) == 16 return [c.rgba() for c in colors] class ObjectClassificationResultsViewer(DataExportLayerViewerGui): _colorTable16 = _createDefault16ColorColorTable() def setupLayers(self): layers = [] opLane = self.topLevelOperatorView selection_names = opLane.SelectionNames.value selection = selection_names[ opLane.InputSelection.value ] # This code depends on a specific order for the export slots. # If those change, update this function! assert selection in ['Object Predictions', 'Object Probabilities', 'Pixel Probabilities'] if selection == "Object Predictions": fromDiskSlot = self.topLevelOperatorView.ImageOnDisk if fromDiskSlot.ready(): exportLayer = ColortableLayer( LazyflowSource(fromDiskSlot), colorTable=self._colorTable16 ) exportLayer.name = "Prediction - Exported" exportLayer.visible = True layers.append(exportLayer) previewSlot = self.topLevelOperatorView.ImageToExport if previewSlot.ready(): previewLayer = ColortableLayer( LazyflowSource(previewSlot), colorTable=self._colorTable16 ) previewLayer.name = "Prediction - Preview" previewLayer.visible = False layers.append(previewLayer) elif selection == "Object Probabilities": exportedLayers = self._initPredictionLayers(opLane.ImageOnDisk) for layer in exportedLayers: layer.visible = True layer.name = layer.name + "- Exported" layers += exportedLayers previewLayers = self._initPredictionLayers(opLane.ImageToExport) for layer in previewLayers: layer.visible = False layer.name = layer.name + "- Preview" layers += previewLayers elif selection == 'Pixel Probabilities': exportedLayers = self._initPredictionLayers(opLane.ImageOnDisk) for layer in exportedLayers: layer.visible = True layer.name = layer.name + "- Exported" layers += exportedLayers previewLayers = self._initPredictionLayers(opLane.ImageToExport) for layer in previewLayers: layer.visible = False layer.name = layer.name + "- Preview" layers += previewLayers else: assert False, "Unknown selection." rawSlot = self.topLevelOperatorView.RawData if rawSlot.ready(): rawLayer = self.createStandardLayerFromSlot(rawSlot) rawLayer.name = "Raw Data" rawLayer.opacity = 1.0 layers.append(rawLayer) return layers def _initPredictionLayers(self, predictionSlot): layers = [] opLane = self.topLevelOperatorView # Use a slicer to provide a separate slot for each channel layer opSlicer = OpMultiArraySlicer2( parent=opLane.viewed_operator().parent ) opSlicer.Input.connect( predictionSlot ) opSlicer.AxisFlag.setValue('c') for channel, channelSlot in enumerate(opSlicer.Slices): if channelSlot.ready(): drange = channelSlot.meta.drange or (0.0, 1.0) predictsrc = LazyflowSource(channelSlot) predictLayer = AlphaModulatedLayer( predictsrc, tintColor=QColor.fromRgba(self._colorTable16[channel+1]), # FIXME: This is weird. Why are range and normalize both set to the same thing? range=drange, normalize=drange ) predictLayer.opacity = 1.0 predictLayer.visible = True predictLayer.name = "Probability Channel #{}".format( channel+1 ) layers.append(predictLayer) return layers<|fim▁end|>
colors.append( QColor(128,0, 128) ) #purple colors.append( QColor(240, 230, 140) ) #khaki
<|file_name|>index.js<|end_file_name|><|fim▁begin|>console.log('----加载开始----'); <|fim▁hole|><|fim▁end|>
module.exports = (num) => { return num + 1; } console.log('----加载结束----');
<|file_name|>consensus_tree.py<|end_file_name|><|fim▁begin|>from flask import session, Blueprint from lexos.managers import session_manager from lexos.helpers import constants from lexos.models.consensus_tree_model import BCTModel from lexos.views.base import render<|fim▁hole|> consensus_tree_blueprint = Blueprint("consensus-tree", __name__) @consensus_tree_blueprint.route("/consensus-tree", methods=["GET"]) def consensus_tree() -> str: """Gets the consensus tree page. :return: The consensus tree page. """ # Set the default options if "analyoption" not in session: session["analyoption"] = constants.DEFAULT_ANALYZE_OPTIONS if "bctoption" not in session: session["bctoption"] = constants.DEFAULT_BCT_OPTIONS # Return the consensus tree page return render("consensus-tree.html") @consensus_tree_blueprint.route("/consensus-tree/graph", methods=["POST"]) def graph() -> str: """Gets the consensus tree graph. :return: The consensus tree graph. """ # Cache the options session_manager.cache_bct_option() session_manager.cache_analysis_option() # Return the bootstrap consensus tree return BCTModel().get_bootstrap_consensus_tree_plot_decoded()<|fim▁end|>
<|file_name|>profiles.go<|end_file_name|><|fim▁begin|>package trafficmanager // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "context" "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/azure" "net/http" ) // ProfilesClient is the client for the Profiles methods of the Trafficmanager service. type ProfilesClient struct { BaseClient } // NewProfilesClient creates an instance of the ProfilesClient client. func NewProfilesClient(subscriptionID string) ProfilesClient { return NewProfilesClientWithBaseURI(DefaultBaseURI, subscriptionID) } // NewProfilesClientWithBaseURI creates an instance of the ProfilesClient client. func NewProfilesClientWithBaseURI(baseURI string, subscriptionID string) ProfilesClient { return ProfilesClient{NewWithBaseURI(baseURI, subscriptionID)} } // CheckTrafficManagerRelativeDNSNameAvailability checks the availability of a Traffic Manager Relative DNS name. // // parameters is the Traffic Manager name parameters supplied to the CheckTrafficManagerNameAvailability operation. func (client ProfilesClient) CheckTrafficManagerRelativeDNSNameAvailability(ctx context.Context, parameters CheckTrafficManagerRelativeDNSNameAvailabilityParameters) (result NameAvailability, err error) { req, err := client.CheckTrafficManagerRelativeDNSNameAvailabilityPreparer(ctx, parameters) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "CheckTrafficManagerRelativeDNSNameAvailability", nil, "Failure preparing request") return } resp, err := client.CheckTrafficManagerRelativeDNSNameAvailabilitySender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "CheckTrafficManagerRelativeDNSNameAvailability", resp, "Failure sending request") return } result, err = client.CheckTrafficManagerRelativeDNSNameAvailabilityResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "CheckTrafficManagerRelativeDNSNameAvailability", resp, "Failure responding to request") } return } // CheckTrafficManagerRelativeDNSNameAvailabilityPreparer prepares the CheckTrafficManagerRelativeDNSNameAvailability request. func (client ProfilesClient) CheckTrafficManagerRelativeDNSNameAvailabilityPreparer(ctx context.Context, parameters CheckTrafficManagerRelativeDNSNameAvailabilityParameters) (*http.Request, error) { const APIVersion = "2017-05-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsJSON(), autorest.AsPost(), autorest.WithBaseURL(client.BaseURI), autorest.WithPath("/providers/Microsoft.Network/checkTrafficManagerNameAvailability"), autorest.WithJSON(parameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // CheckTrafficManagerRelativeDNSNameAvailabilitySender sends the CheckTrafficManagerRelativeDNSNameAvailability request. The method will close the // http.Response Body if it receives an error. func (client ProfilesClient) CheckTrafficManagerRelativeDNSNameAvailabilitySender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) } // CheckTrafficManagerRelativeDNSNameAvailabilityResponder handles the response to the CheckTrafficManagerRelativeDNSNameAvailability request. The method always // closes the http.Response Body. func (client ProfilesClient) CheckTrafficManagerRelativeDNSNameAvailabilityResponder(resp *http.Response) (result NameAvailability, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // CreateOrUpdate create or update a Traffic Manager profile. // // resourceGroupName is the name of the resource group containing the Traffic Manager profile. profileName is the name // of the Traffic Manager profile. parameters is the Traffic Manager profile parameters supplied to the CreateOrUpdate // operation. func (client ProfilesClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, profileName string, parameters Profile) (result Profile, err error) { req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, profileName, parameters) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "CreateOrUpdate", nil, "Failure preparing request") return } resp, err := client.CreateOrUpdateSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "CreateOrUpdate", resp, "Failure sending request") return } result, err = client.CreateOrUpdateResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "CreateOrUpdate", resp, "Failure responding to request") } return } // CreateOrUpdatePreparer prepares the CreateOrUpdate request. func (client ProfilesClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, profileName string, parameters Profile) (*http.Request, error) { pathParameters := map[string]interface{}{ "profileName": autorest.Encode("path", profileName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-05-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsJSON(), autorest.AsPut(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficmanagerprofiles/{profileName}", pathParameters), autorest.WithJSON(parameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the // http.Response Body if it receives an error. func (client ProfilesClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always // closes the http.Response Body. func (client ProfilesClient) CreateOrUpdateResponder(resp *http.Response) (result Profile, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Delete deletes a Traffic Manager profile. // // resourceGroupName is the name of the resource group containing the Traffic Manager profile to be deleted. // profileName is the name of the Traffic Manager profile to be deleted. func (client ProfilesClient) Delete(ctx context.Context, resourceGroupName string, profileName string) (result DeleteOperationResult, err error) { req, err := client.DeletePreparer(ctx, resourceGroupName, profileName) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "Delete", nil, "Failure preparing request") return } resp, err := client.DeleteSender(req) if err != nil {<|fim▁hole|> } result, err = client.DeleteResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "Delete", resp, "Failure responding to request") } return } // DeletePreparer prepares the Delete request. func (client ProfilesClient) DeletePreparer(ctx context.Context, resourceGroupName string, profileName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "profileName": autorest.Encode("path", profileName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-05-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsDelete(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficmanagerprofiles/{profileName}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // DeleteSender sends the Delete request. The method will close the // http.Response Body if it receives an error. func (client ProfilesClient) DeleteSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // DeleteResponder handles the response to the Delete request. The method always // closes the http.Response Body. func (client ProfilesClient) DeleteResponder(resp *http.Response) (result DeleteOperationResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Get gets a Traffic Manager profile. // // resourceGroupName is the name of the resource group containing the Traffic Manager profile. profileName is the name // of the Traffic Manager profile. func (client ProfilesClient) Get(ctx context.Context, resourceGroupName string, profileName string) (result Profile, err error) { req, err := client.GetPreparer(ctx, resourceGroupName, profileName) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "Get", nil, "Failure preparing request") return } resp, err := client.GetSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "Get", resp, "Failure sending request") return } result, err = client.GetResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "Get", resp, "Failure responding to request") } return } // GetPreparer prepares the Get request. func (client ProfilesClient) GetPreparer(ctx context.Context, resourceGroupName string, profileName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "profileName": autorest.Encode("path", profileName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-05-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficmanagerprofiles/{profileName}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // GetSender sends the Get request. The method will close the // http.Response Body if it receives an error. func (client ProfilesClient) GetSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // GetResponder handles the response to the Get request. The method always // closes the http.Response Body. func (client ProfilesClient) GetResponder(resp *http.Response) (result Profile, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListByResourceGroup lists all Traffic Manager profiles within a resource group. // // resourceGroupName is the name of the resource group containing the Traffic Manager profiles to be listed. func (client ProfilesClient) ListByResourceGroup(ctx context.Context, resourceGroupName string) (result ProfileListResult, err error) { req, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "ListByResourceGroup", nil, "Failure preparing request") return } resp, err := client.ListByResourceGroupSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "ListByResourceGroup", resp, "Failure sending request") return } result, err = client.ListByResourceGroupResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "ListByResourceGroup", resp, "Failure responding to request") } return } // ListByResourceGroupPreparer prepares the ListByResourceGroup request. func (client ProfilesClient) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-05-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficmanagerprofiles", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the // http.Response Body if it receives an error. func (client ProfilesClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always // closes the http.Response Body. func (client ProfilesClient) ListByResourceGroupResponder(resp *http.Response) (result ProfileListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListBySubscription lists all Traffic Manager profiles within a subscription. func (client ProfilesClient) ListBySubscription(ctx context.Context) (result ProfileListResult, err error) { req, err := client.ListBySubscriptionPreparer(ctx) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "ListBySubscription", nil, "Failure preparing request") return } resp, err := client.ListBySubscriptionSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "ListBySubscription", resp, "Failure sending request") return } result, err = client.ListBySubscriptionResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "ListBySubscription", resp, "Failure responding to request") } return } // ListBySubscriptionPreparer prepares the ListBySubscription request. func (client ProfilesClient) ListBySubscriptionPreparer(ctx context.Context) (*http.Request, error) { pathParameters := map[string]interface{}{ "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-05-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/trafficmanagerprofiles", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListBySubscriptionSender sends the ListBySubscription request. The method will close the // http.Response Body if it receives an error. func (client ProfilesClient) ListBySubscriptionSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // ListBySubscriptionResponder handles the response to the ListBySubscription request. The method always // closes the http.Response Body. func (client ProfilesClient) ListBySubscriptionResponder(resp *http.Response) (result ProfileListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Update update a Traffic Manager profile. // // resourceGroupName is the name of the resource group containing the Traffic Manager profile. profileName is the name // of the Traffic Manager profile. parameters is the Traffic Manager profile parameters supplied to the Update // operation. func (client ProfilesClient) Update(ctx context.Context, resourceGroupName string, profileName string, parameters Profile) (result Profile, err error) { req, err := client.UpdatePreparer(ctx, resourceGroupName, profileName, parameters) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "Update", nil, "Failure preparing request") return } resp, err := client.UpdateSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "Update", resp, "Failure sending request") return } result, err = client.UpdateResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "Update", resp, "Failure responding to request") } return } // UpdatePreparer prepares the Update request. func (client ProfilesClient) UpdatePreparer(ctx context.Context, resourceGroupName string, profileName string, parameters Profile) (*http.Request, error) { pathParameters := map[string]interface{}{ "profileName": autorest.Encode("path", profileName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-05-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsJSON(), autorest.AsPatch(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficmanagerprofiles/{profileName}", pathParameters), autorest.WithJSON(parameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // UpdateSender sends the Update request. The method will close the // http.Response Body if it receives an error. func (client ProfilesClient) UpdateSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // UpdateResponder handles the response to the Update request. The method always // closes the http.Response Body. func (client ProfilesClient) UpdateResponder(resp *http.Response) (result Profile, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return }<|fim▁end|>
result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "trafficmanager.ProfilesClient", "Delete", resp, "Failure sending request") return
<|file_name|>pagers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from typing import ( Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, ) from google.cloud.compute_v1.types import compute class ListPager: """A pager for iterating through ``list`` requests. This class thinly wraps an initial :class:`google.cloud.compute_v1.types.PublicDelegatedPrefixList` object, and provides an ``__iter__`` method to iterate through its ``items`` field. If there are more pages, the ``__iter__`` method will make additional<|fim▁hole|> All the usual :class:`google.cloud.compute_v1.types.PublicDelegatedPrefixList` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., compute.PublicDelegatedPrefixList], request: compute.ListGlobalPublicDelegatedPrefixesRequest, response: compute.PublicDelegatedPrefixList, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.compute_v1.types.ListGlobalPublicDelegatedPrefixesRequest): The initial request object. response (google.cloud.compute_v1.types.PublicDelegatedPrefixList): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = compute.ListGlobalPublicDelegatedPrefixesRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property def pages(self) -> Iterator[compute.PublicDelegatedPrefixList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[compute.PublicDelegatedPrefix]: for page in self.pages: yield from page.items def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response)<|fim▁end|>
``List`` requests and continue to iterate through the ``items`` field on the corresponding responses.
<|file_name|>IntermolecularDRMSD.cpp<|end_file_name|><|fim▁begin|>/* +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Copyright (c) 2016,2017 The plumed team (see the PEOPLE file at the root of the distribution for a list of names) See http://www.plumed.org for more information. This file is part of plumed, version 2. plumed is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. plumed is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with plumed. If not, see <http://www.gnu.org/licenses/>. +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ */ #include "DRMSD.h" #include "MetricRegister.h" namespace PLMD { class IntermolecularDRMSD : public DRMSD { private: unsigned nblocks; std::vector<unsigned> blocks; public: explicit IntermolecularDRMSD( const ReferenceConfigurationOptions& ro ); void read( const PDB& pdb ); void setup_targets(); }; PLUMED_REGISTER_METRIC(IntermolecularDRMSD,"INTER-DRMSD") IntermolecularDRMSD::IntermolecularDRMSD( const ReferenceConfigurationOptions& ro ): ReferenceConfiguration( ro ), DRMSD( ro ), nblocks(0) { } void IntermolecularDRMSD::read( const PDB& pdb ) { readAtomsFromPDB( pdb, true ); nblocks = pdb.getNumberOfAtomBlocks(); blocks.resize( nblocks+1 ); if( nblocks==1 ) error("Trying to compute intermolecular rmsd but found no TERs in input PDB"); blocks[0]=0; for(unsigned i=0; i<nblocks; ++i) blocks[i+1]=pdb.getAtomBlockEnds()[i]; readBounds(); setup_targets(); } void IntermolecularDRMSD::setup_targets() { plumed_massert( bounds_were_set, "I am missing a call to DRMSD::setBoundsOnDistances"); for(unsigned i=1; i<nblocks; ++i) { for(unsigned j=0; j<i; ++j) { for(unsigned iatom=blocks[i]; iatom<blocks[i+1]; ++iatom) { for(unsigned jatom=blocks[j]; jatom<blocks[j+1]; ++jatom) { double distance = delta( getReferencePosition(iatom), getReferencePosition(jatom) ).modulo(); if(distance < upper && distance > lower ) targets[std::make_pair(iatom,jatom)] = distance; }<|fim▁hole|> } } } }<|fim▁end|>
}
<|file_name|>test_scheduler.py<|end_file_name|><|fim▁begin|>from __future__ import annotations from decimal import Decimal from typing import ( Any, Mapping, Sequence, ) import uuid from pprint import pprint import pytest from ai.backend.common.docker import ImageRef from ai.backend.common.types import ( AccessKey, AgentId, KernelId, ResourceSlot, SessionTypes, ) from ai.backend.manager.scheduler import PendingSession, ExistingSession, AgentContext from ai.backend.manager.scheduler.dispatcher import load_scheduler from ai.backend.manager.scheduler.fifo import FIFOSlotScheduler, LIFOSlotScheduler from ai.backend.manager.scheduler.drf import DRFScheduler from ai.backend.manager.scheduler.mof import MOFScheduler def test_load_intrinsic(): assert isinstance(load_scheduler('fifo', {}), FIFOSlotScheduler) assert isinstance(load_scheduler('lifo', {}), LIFOSlotScheduler) assert isinstance(load_scheduler('drf', {}), DRFScheduler) assert isinstance(load_scheduler('mof', {}), MOFScheduler) example_group_id = uuid.uuid4() example_total_capacity = ResourceSlot({'cpu': '4.0', 'mem': '4096'}) @pytest.fixture def example_agents(): return [ AgentContext( agent_id=AgentId('i-001'), agent_addr='10.0.1.1:6001', scaling_group='sg01', available_slots=ResourceSlot({ 'cpu': Decimal('4.0'), 'mem': Decimal('4096'), 'cuda.shares': Decimal('4.0'), 'rocm.devices': Decimal('2'), }), occupied_slots=ResourceSlot({ 'cpu': Decimal('0'), 'mem': Decimal('0'), 'cuda.shares': Decimal('0'), 'rocm.devices': Decimal('0'), }), ), AgentContext( agent_id=AgentId('i-101'), agent_addr='10.0.2.1:6001', scaling_group='sg02', available_slots=ResourceSlot({ 'cpu': Decimal('3.0'), 'mem': Decimal('2560'), 'cuda.shares': Decimal('1.0'), 'rocm.devices': Decimal('8'), }), occupied_slots=ResourceSlot({ 'cpu': Decimal('0'), 'mem': Decimal('0'), 'cuda.shares': Decimal('0'), 'rocm.devices': Decimal('0'), }), ), ] @pytest.fixture def example_mixed_agents(): return [ AgentContext( agent_id=AgentId('i-gpu'), agent_addr='10.0.1.1:6001', scaling_group='sg01', available_slots=ResourceSlot({ 'cpu': Decimal('4.0'), 'mem': Decimal('4096'), 'cuda.shares': Decimal('4.0'), }), occupied_slots=ResourceSlot({ 'cpu': Decimal('0'), 'mem': Decimal('0'), 'cuda.shares': Decimal('0'), }), ), AgentContext( agent_id=AgentId('i-cpu'), agent_addr='10.0.2.1:6001', scaling_group='sg02', available_slots=ResourceSlot({ 'cpu': Decimal('3.0'), 'mem': Decimal('2560'), 'cuda.shares': Decimal('0'), }), occupied_slots=ResourceSlot({ 'cpu': Decimal('0'), 'mem': Decimal('0'), 'cuda.shares': Decimal('0'), }), ), ] @pytest.fixture def example_agents_first_one_assigned(): return [ AgentContext( agent_id=AgentId('i-001'), agent_addr='10.0.1.1:6001', scaling_group='sg01', available_slots=ResourceSlot({ 'cpu': Decimal('2.0'), 'mem': Decimal('2048'), 'cuda.shares': Decimal('2.0'), 'rocm.devices': Decimal('1'), }), occupied_slots=ResourceSlot({ 'cpu': Decimal('2.0'), 'mem': Decimal('2048'), 'cuda.shares': Decimal('2.0'), 'rocm.devices': Decimal('1'), }), ), AgentContext( agent_id=AgentId('i-101'), agent_addr='10.0.2.1:6001', scaling_group='sg02', available_slots=ResourceSlot({ 'cpu': Decimal('3.0'), 'mem': Decimal('2560'), 'cuda.shares': Decimal('1.0'), 'rocm.devices': Decimal('8'), }), occupied_slots=ResourceSlot({ 'cpu': Decimal('0'), 'mem': Decimal('0'), 'cuda.shares': Decimal('0'), 'rocm.devices': Decimal('0'), }), ), ] @pytest.fixture def example_agents_no_valid(): return [ AgentContext( agent_id=AgentId('i-001'), agent_addr='10.0.1.1:6001', scaling_group='sg01', available_slots=ResourceSlot({ 'cpu': Decimal('0'), 'mem': Decimal('0'), 'cuda.shares': Decimal('0'), 'rocm.devices': Decimal('0'), }), occupied_slots=ResourceSlot({ 'cpu': Decimal('4.0'), 'mem': Decimal('4096'), 'cuda.shares': Decimal('4.0'), 'rocm.devices': Decimal('2'), }), ), AgentContext( agent_id=AgentId('i-101'), agent_addr='10.0.2.1:6001', scaling_group='sg02', available_slots=ResourceSlot({ 'cpu': Decimal('0'), 'mem': Decimal('0'), 'cuda.shares': Decimal('0'), 'rocm.devices': Decimal('0'), }), occupied_slots=ResourceSlot({ 'cpu': Decimal('3.0'), 'mem': Decimal('2560'), 'cuda.shares': Decimal('1.0'), 'rocm.devices': Decimal('8'), }), ), ] pending_kernel_ids: Sequence[KernelId] = [ KernelId(uuid.uuid4()) for _ in range(3) ] existing_kernel_ids: Sequence[KernelId] = [ KernelId(uuid.uuid4()) for _ in range(3) ] _common_dummy_for_pending_session: Mapping[str, Any] = dict( image_ref=ImageRef('lablup/python:3.6-ubunt18.04'), domain_name='default', group_id=example_group_id, resource_policy={}, resource_opts={}, mounts=[], mount_map={}, environ={}, bootstrap_script=None, startup_command=None, internal_data=None, preopen_ports=[], ) _common_dummy_for_existing_session: Mapping[str, Any] = dict( image_ref=ImageRef('lablup/python:3.6-ubunt18.04'), domain_name='default', group_id=example_group_id, ) @pytest.fixture def example_pending_sessions(): # lower indicies are enqueued first. return [ PendingSession( # rocm kernel_id=pending_kernel_ids[0], access_key=AccessKey('user01'), session_name='es01', session_type=SessionTypes.BATCH, scaling_group='sg01', requested_slots=ResourceSlot({ 'cpu': Decimal('2.0'), 'mem': Decimal('1024'), 'cuda.shares': Decimal('0'), 'rocm.devices': Decimal('1'), }), target_sgroup_names=[], **_common_dummy_for_pending_session, ), PendingSession( # cuda kernel_id=pending_kernel_ids[1], access_key=AccessKey('user02'), session_name='es01', session_type=SessionTypes.BATCH, scaling_group='sg01', requested_slots=ResourceSlot({ 'cpu': Decimal('1.0'), 'mem': Decimal('2048'), 'cuda.shares': Decimal('0.5'), 'rocm.devices': Decimal('0'), }), target_sgroup_names=[], **_common_dummy_for_pending_session, ), PendingSession( # cpu-only kernel_id=pending_kernel_ids[2], access_key=AccessKey('user03'), session_name='es01', session_type=SessionTypes.BATCH, scaling_group='sg01', requested_slots=ResourceSlot({ 'cpu': Decimal('1.0'), 'mem': Decimal('1024'), 'cuda.shares': Decimal('0'), 'rocm.devices': Decimal('0'), }), target_sgroup_names=[], **_common_dummy_for_pending_session, ), ] @pytest.fixture def example_existing_sessions(): return [ ExistingSession( kernel_id=existing_kernel_ids[0], access_key=AccessKey('user01'), session_name='es01', session_type=SessionTypes.BATCH, occupying_slots=ResourceSlot({ 'cpu': Decimal('3.0'), 'mem': Decimal('1024'), 'cuda.shares': Decimal('0'), 'rocm.devices': Decimal('1'), }), scaling_group='sg01', **_common_dummy_for_existing_session, ), ExistingSession( kernel_id=existing_kernel_ids[1], access_key=AccessKey('user02'), session_name='es01', session_type=SessionTypes.BATCH, occupying_slots=ResourceSlot({ 'cpu': Decimal('1.0'), 'mem': Decimal('2048'), 'cuda.shares': Decimal('0.5'), 'rocm.devices': Decimal('0'), }), scaling_group='sg01', **_common_dummy_for_existing_session, ), ExistingSession( kernel_id=existing_kernel_ids[2], access_key=AccessKey('user03'), session_name='es01', session_type=SessionTypes.BATCH, occupying_slots=ResourceSlot({ 'cpu': Decimal('4.0'), 'mem': Decimal('4096'), 'cuda.shares': Decimal('0'), 'rocm.devices': Decimal('0'), }), scaling_group='sg01', **_common_dummy_for_existing_session, ), ] def _find_and_pop_picked_session(pending_sessions, picked_session_id): for picked_idx, pending_sess in enumerate(pending_sessions): if pending_sess.kernel_id == picked_session_id: break else: # no matching entry for picked session? raise RuntimeError('should not reach here') return pending_sessions.pop(picked_idx) def test_fifo_scheduler(example_agents, example_pending_sessions, example_existing_sessions): scheduler = FIFOSlotScheduler({}) picked_session_id = scheduler.pick_session( example_total_capacity, example_pending_sessions, example_existing_sessions) assert picked_session_id == example_pending_sessions[0].kernel_id picked_session = _find_and_pop_picked_session( example_pending_sessions, picked_session_id) agent_id = scheduler.assign_agent(example_agents, picked_session) assert agent_id == AgentId('i-001') def test_lifo_scheduler(example_agents, example_pending_sessions, example_existing_sessions): scheduler = LIFOSlotScheduler({}) picked_session_id = scheduler.pick_session(<|fim▁hole|> assert picked_session_id == example_pending_sessions[2].kernel_id picked_session = _find_and_pop_picked_session( example_pending_sessions, picked_session_id) agent_id = scheduler.assign_agent(example_agents, picked_session) assert agent_id == 'i-001' def test_fifo_scheduler_favor_cpu_for_requests_without_accelerators( example_mixed_agents, example_pending_sessions, ): scheduler = FIFOSlotScheduler({}) for idx in range(3): picked_session_id = scheduler.pick_session( example_total_capacity, example_pending_sessions, []) assert picked_session_id == example_pending_sessions[0].kernel_id picked_session = _find_and_pop_picked_session( example_pending_sessions, picked_session_id) agent_id = scheduler.assign_agent(example_mixed_agents, picked_session) if idx == 0: # example_mixed_agents do not have any agent with ROCM accelerators. assert agent_id is None elif idx == 1: assert agent_id == AgentId('i-gpu') elif idx == 2: # It should favor the CPU-only agent if the requested slots # do not include accelerators. assert agent_id == AgentId('i-cpu') def test_lifo_scheduler_favor_cpu_for_requests_without_accelerators( example_mixed_agents, example_pending_sessions, ): # Check the reverse with the LIFO scheduler. # The result must be same. scheduler = LIFOSlotScheduler({}) for idx in range(3): picked_session_id = scheduler.pick_session( example_total_capacity, example_pending_sessions, []) assert picked_session_id == example_pending_sessions[-1].kernel_id picked_session = _find_and_pop_picked_session( example_pending_sessions, picked_session_id) agent_id = scheduler.assign_agent(example_mixed_agents, picked_session) if idx == 2: # example_mixed_agents do not have any agent with ROCM accelerators. assert agent_id is None elif idx == 1: assert agent_id == AgentId('i-gpu') elif idx == 0: # It should favor the CPU-only agent if the requested slots # do not include accelerators. assert agent_id == AgentId('i-cpu') def test_drf_scheduler(example_agents, example_pending_sessions, example_existing_sessions): scheduler = DRFScheduler({}) picked_session_id = scheduler.pick_session( example_total_capacity, example_pending_sessions, example_existing_sessions) pprint(example_pending_sessions) assert picked_session_id == example_pending_sessions[1].kernel_id picked_session = _find_and_pop_picked_session( example_pending_sessions, picked_session_id) agent_id = scheduler.assign_agent(example_agents, picked_session) assert agent_id == 'i-001' def test_mof_scheduler_first_assign(example_agents, example_pending_sessions, example_existing_sessions): scheduler = MOFScheduler({}) picked_session_id = scheduler.pick_session( example_total_capacity, example_pending_sessions, example_existing_sessions) assert picked_session_id == example_pending_sessions[0].kernel_id picked_session = _find_and_pop_picked_session( example_pending_sessions, picked_session_id) agent_id = scheduler.assign_agent(example_agents, picked_session) assert agent_id == 'i-001' def test_mof_scheduler_second_assign(example_agents_first_one_assigned, example_pending_sessions, example_existing_sessions): scheduler = MOFScheduler({}) picked_session_id = scheduler.pick_session( example_total_capacity, example_pending_sessions, example_existing_sessions) assert picked_session_id == example_pending_sessions[0].kernel_id picked_session = _find_and_pop_picked_session( example_pending_sessions, picked_session_id) agent_id = scheduler.assign_agent( example_agents_first_one_assigned, picked_session) assert agent_id == 'i-101' def test_mof_scheduler_no_valid_agent(example_agents_no_valid, example_pending_sessions, example_existing_sessions): scheduler = MOFScheduler({}) picked_session_id = scheduler.pick_session( example_total_capacity, example_pending_sessions, example_existing_sessions) assert picked_session_id == example_pending_sessions[0].kernel_id picked_session = _find_and_pop_picked_session( example_pending_sessions, picked_session_id) agent_id = scheduler.assign_agent(example_agents_no_valid, picked_session) assert agent_id is None # TODO: write tests for multiple agents and scaling groups<|fim▁end|>
example_total_capacity, example_pending_sessions, example_existing_sessions)
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export { Button, ButtonFactory } from './Button';<|fim▁end|>
<|file_name|>example_grid_time.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|>""" Grid time ============= """ from datetime import timedelta import numpy as np from opendrift.readers import reader_global_landmask from opendrift.readers import reader_netCDF_CF_generic from opendrift.models.oceandrift import OceanDrift # Seeding at a grid at regular interval o = OceanDrift(loglevel=20) # Set loglevel to 0 for debug information reader_norkyst = reader_netCDF_CF_generic.Reader(o.test_data_folder() + '16Nov2015_NorKyst_z_surface/norkyst800_subset_16Nov2015.nc') #%% # Landmask reader_landmask = reader_global_landmask.Reader( extent=[4.0, 5.5, 59.9, 61.2]) o.add_reader([reader_landmask, reader_norkyst]) #%% # Seeding some particles lons = np.linspace(4.4, 4.6, 10) lats = np.linspace(60.0, 60.1, 10) lons, lats = np.meshgrid(lons, lats) lons = lons.ravel() lats = lats.ravel() #%% # Seed oil elements on a grid at regular time interval start_time = reader_norkyst.start_time time_step = timedelta(hours=6) num_steps = 10 for i in range(num_steps+1): o.seed_elements(lons, lats, radius=0, number=100, time=start_time + i*time_step) #%% # Running model for 60 hours o.run(steps=60*4, time_step=900, time_step_output=3600) #%% # Print and plot results print(o) o.animation(fast=True) #%% # .. image:: /gallery/animations/example_grid_time_0.gif<|fim▁end|>
<|file_name|>backtrace.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! As always, windows has something very different than unix, we mainly want //! to avoid having to depend too much on libunwind for windows. //! //! If you google around, you'll find a fair bit of references to built-in //! functions to get backtraces on windows. It turns out that most of these are //! in an external library called dbghelp. I was unable to find this library //! via `-ldbghelp`, but it is apparently normal to do the `dlopen` equivalent //! of it. //! //! You'll also find that there's a function called CaptureStackBackTrace //! mentioned frequently (which is also easy to use), but sadly I didn't have a //! copy of that function in my mingw install (maybe it was broken?). Instead, //! this takes the route of using StackWalk64 in order to walk the stack. #![allow(dead_code)] #[cfg(stage0)] use prelude::v1::*; use io::prelude::*; use dynamic_lib::DynamicLibrary; use ffi::CStr; use intrinsics; use io; use libc; use mem; use path::Path; use ptr; use str; use sync::StaticMutex; use sys_common::backtrace::*; #[allow(non_snake_case)] extern "system" { fn GetCurrentProcess() -> libc::HANDLE; fn GetCurrentThread() -> libc::HANDLE; fn RtlCaptureContext(ctx: *mut arch::CONTEXT); } type SymFromAddrFn = extern "system" fn(libc::HANDLE, u64, *mut u64, *mut SYMBOL_INFO) -> libc::BOOL; type SymInitializeFn = extern "system" fn(libc::HANDLE, *mut libc::c_void, libc::BOOL) -> libc::BOOL; type SymCleanupFn = extern "system" fn(libc::HANDLE) -> libc::BOOL; type StackWalk64Fn = extern "system" fn(libc::DWORD, libc::HANDLE, libc::HANDLE, *mut STACKFRAME64, *mut arch::CONTEXT, *mut libc::c_void, *mut libc::c_void, *mut libc::c_void, *mut libc::c_void) -> libc::BOOL; const MAX_SYM_NAME: usize = 2000; const IMAGE_FILE_MACHINE_I386: libc::DWORD = 0x014c; const IMAGE_FILE_MACHINE_IA64: libc::DWORD = 0x0200; const IMAGE_FILE_MACHINE_AMD64: libc::DWORD = 0x8664; #[repr(C)] struct SYMBOL_INFO { SizeOfStruct: libc::c_ulong, TypeIndex: libc::c_ulong, Reserved: [u64; 2], Index: libc::c_ulong, Size: libc::c_ulong, ModBase: u64, Flags: libc::c_ulong, Value: u64, Address: u64, Register: libc::c_ulong, Scope: libc::c_ulong, Tag: libc::c_ulong, NameLen: libc::c_ulong, MaxNameLen: libc::c_ulong, // note that windows has this as 1, but it basically just means that // the name is inline at the end of the struct. For us, we just bump // the struct size up to MAX_SYM_NAME. Name: [libc::c_char; MAX_SYM_NAME], } #[repr(C)] enum ADDRESS_MODE { AddrMode1616, AddrMode1632, AddrModeReal, AddrModeFlat, } struct ADDRESS64 { Offset: u64, Segment: u16, Mode: ADDRESS_MODE, } pub struct STACKFRAME64 { AddrPC: ADDRESS64, AddrReturn: ADDRESS64, AddrFrame: ADDRESS64, AddrStack: ADDRESS64, AddrBStore: ADDRESS64, FuncTableEntry: *mut libc::c_void, Params: [u64; 4], Far: libc::BOOL, Virtual: libc::BOOL, Reserved: [u64; 3], KdHelp: KDHELP64, } struct KDHELP64 { Thread: u64, ThCallbackStack: libc::DWORD, ThCallbackBStore: libc::DWORD, NextCallback: libc::DWORD, FramePointer: libc::DWORD, KiCallUserMode: u64, KeUserCallbackDispatcher: u64, SystemRangeStart: u64, KiUserExceptionDispatcher: u64, StackBase: u64, StackLimit: u64, Reserved: [u64; 5], } #[cfg(target_arch = "x86")] mod arch { use libc; const MAXIMUM_SUPPORTED_EXTENSION: usize = 512; #[repr(C)] pub struct CONTEXT { ContextFlags: libc::DWORD, Dr0: libc::DWORD, Dr1: libc::DWORD, Dr2: libc::DWORD, Dr3: libc::DWORD, Dr6: libc::DWORD, Dr7: libc::DWORD, FloatSave: FLOATING_SAVE_AREA, SegGs: libc::DWORD, SegFs: libc::DWORD, SegEs: libc::DWORD, SegDs: libc::DWORD, Edi: libc::DWORD, Esi: libc::DWORD, Ebx: libc::DWORD, Edx: libc::DWORD, Ecx: libc::DWORD, Eax: libc::DWORD, Ebp: libc::DWORD, Eip: libc::DWORD, SegCs: libc::DWORD, EFlags: libc::DWORD, Esp: libc::DWORD, SegSs: libc::DWORD, ExtendedRegisters: [u8; MAXIMUM_SUPPORTED_EXTENSION], } #[repr(C)] pub struct FLOATING_SAVE_AREA { ControlWord: libc::DWORD, StatusWord: libc::DWORD, TagWord: libc::DWORD, ErrorOffset: libc::DWORD, ErrorSelector: libc::DWORD, DataOffset: libc::DWORD, DataSelector: libc::DWORD, RegisterArea: [u8; 80], Cr0NpxState: libc::DWORD, } pub fn init_frame(frame: &mut super::STACKFRAME64, ctx: &CONTEXT) -> libc::DWORD { frame.AddrPC.Offset = ctx.Eip as u64; frame.AddrPC.Mode = super::ADDRESS_MODE::AddrModeFlat; frame.AddrStack.Offset = ctx.Esp as u64; frame.AddrStack.Mode = super::ADDRESS_MODE::AddrModeFlat; frame.AddrFrame.Offset = ctx.Ebp as u64; frame.AddrFrame.Mode = super::ADDRESS_MODE::AddrModeFlat; super::IMAGE_FILE_MACHINE_I386 } } #[cfg(target_arch = "x86_64")] mod arch { use libc::{c_longlong, c_ulonglong}; use libc::types::os::arch::extra::{WORD, DWORD, DWORDLONG}; use simd; #[repr(C)] pub struct CONTEXT { _align_hack: [simd::u64x2; 0], // FIXME align on 16-byte P1Home: DWORDLONG, P2Home: DWORDLONG, P3Home: DWORDLONG, P4Home: DWORDLONG, P5Home: DWORDLONG, P6Home: DWORDLONG, ContextFlags: DWORD, MxCsr: DWORD, SegCs: WORD, SegDs: WORD, SegEs: WORD, SegFs: WORD, SegGs: WORD, SegSs: WORD, EFlags: DWORD, Dr0: DWORDLONG, Dr1: DWORDLONG, Dr2: DWORDLONG, Dr3: DWORDLONG, Dr6: DWORDLONG, Dr7: DWORDLONG, Rax: DWORDLONG, Rcx: DWORDLONG, Rdx: DWORDLONG, Rbx: DWORDLONG, Rsp: DWORDLONG, Rbp: DWORDLONG, Rsi: DWORDLONG, Rdi: DWORDLONG, R8: DWORDLONG, R9: DWORDLONG, R10: DWORDLONG, R11: DWORDLONG, R12: DWORDLONG, R13: DWORDLONG, R14: DWORDLONG, R15: DWORDLONG, Rip: DWORDLONG, FltSave: FLOATING_SAVE_AREA, VectorRegister: [M128A; 26], VectorControl: DWORDLONG, DebugControl: DWORDLONG, LastBranchToRip: DWORDLONG, LastBranchFromRip: DWORDLONG, LastExceptionToRip: DWORDLONG, LastExceptionFromRip: DWORDLONG, } #[repr(C)] pub struct M128A { _align_hack: [simd::u64x2; 0], // FIXME align on 16-byte Low: c_ulonglong, High: c_longlong } #[repr(C)] pub struct FLOATING_SAVE_AREA { _align_hack: [simd::u64x2; 0], // FIXME align on 16-byte _Dummy: [u8; 512] // FIXME: Fill this out } pub fn init_frame(frame: &mut super::STACKFRAME64, ctx: &CONTEXT) -> DWORD { frame.AddrPC.Offset = ctx.Rip as u64; frame.AddrPC.Mode = super::ADDRESS_MODE::AddrModeFlat; frame.AddrStack.Offset = ctx.Rsp as u64; frame.AddrStack.Mode = super::ADDRESS_MODE::AddrModeFlat; frame.AddrFrame.Offset = ctx.Rbp as u64; frame.AddrFrame.Mode = super::ADDRESS_MODE::AddrModeFlat; super::IMAGE_FILE_MACHINE_AMD64 } } struct Cleanup { handle: libc::HANDLE, SymCleanup: SymCleanupFn, } impl Drop for Cleanup { fn drop(&mut self) { (self.SymCleanup)(self.handle); } } pub fn write(w: &mut Write) -> io::Result<()> { // According to windows documentation, all dbghelp functions are // single-threaded. static LOCK: StaticMutex = StaticMutex::new(); let _g = LOCK.lock(); // Open up dbghelp.dll, we don't link to it explicitly because it can't // always be found. Additionally, it's nice having fewer dependencies. let path = Path::new("dbghelp.dll"); let lib = match DynamicLibrary::open(Some(&path)) { Ok(lib) => lib, Err(..) => return Ok(()), }; macro_rules! sym{ ($e:expr, $t:ident) => (unsafe { match lib.symbol($e) { Ok(f) => mem::transmute::<*mut u8, $t>(f), Err(..) => return Ok(()) } }) } // Fetch the symbols necessary from dbghelp.dll let SymFromAddr = sym!("SymFromAddr", SymFromAddrFn); let SymInitialize = sym!("SymInitialize", SymInitializeFn); let SymCleanup = sym!("SymCleanup", SymCleanupFn); let StackWalk64 = sym!("StackWalk64", StackWalk64Fn); <|fim▁hole|> unsafe { RtlCaptureContext(&mut context); } let mut frame: STACKFRAME64 = unsafe { intrinsics::init() }; let image = arch::init_frame(&mut frame, &context); // Initialize this process's symbols let ret = SymInitialize(process, ptr::null_mut(), libc::TRUE); if ret != libc::TRUE { return Ok(()) } let _c = Cleanup { handle: process, SymCleanup: SymCleanup }; // And now that we're done with all the setup, do the stack walking! let mut i = 0; try!(write!(w, "stack backtrace:\n")); while StackWalk64(image, process, thread, &mut frame, &mut context, ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut()) == libc::TRUE{ let addr = frame.AddrPC.Offset; if addr == frame.AddrReturn.Offset || addr == 0 || frame.AddrReturn.Offset == 0 { break } i += 1; try!(write!(w, " {:2}: {:#2$x}", i, addr, HEX_WIDTH)); let mut info: SYMBOL_INFO = unsafe { intrinsics::init() }; info.MaxNameLen = MAX_SYM_NAME as libc::c_ulong; // the struct size in C. the value is different to // `size_of::<SYMBOL_INFO>() - MAX_SYM_NAME + 1` (== 81) // due to struct alignment. info.SizeOfStruct = 88; let mut displacement = 0u64; let ret = SymFromAddr(process, addr as u64, &mut displacement, &mut info); if ret == libc::TRUE { try!(write!(w, " - ")); let ptr = info.Name.as_ptr() as *const libc::c_char; let bytes = unsafe { CStr::from_ptr(ptr).to_bytes() }; match str::from_utf8(bytes) { Ok(s) => try!(demangle(w, s)), Err(..) => try!(w.write_all(&bytes[..bytes.len()-1])), } } try!(w.write_all(&['\n' as u8])); } Ok(()) }<|fim▁end|>
// Allocate necessary structures for doing the stack walk let process = unsafe { GetCurrentProcess() }; let thread = unsafe { GetCurrentThread() }; let mut context: arch::CONTEXT = unsafe { intrinsics::init() };
<|file_name|>discard_framebuffer.py<|end_file_name|><|fim▁begin|>from OpenGLCffi.GLES1 import params<|fim▁hole|>@params(api='gles1', prms=['target', 'numAttachments', 'attachments']) def glDiscardFramebufferEXT(target, numAttachments, attachments): pass<|fim▁end|>
<|file_name|>test_add_project.py<|end_file_name|><|fim▁begin|>from model.project import Project def test_add_project(app): project=Project(name="students_project", description="about Project") try: ind = app.project.get_project_list().index(project) app.project.delete_named_project(project) except ValueError: pass old_projects = app.project.get_project_list() app.project.create(project) new_projects = app.project.get_project_list()<|fim▁hole|> old_projects.append(project) assert sorted(old_projects,key=Project.id_or_max) == sorted(new_projects,key=Project.id_or_max)<|fim▁end|>
assert len(old_projects) + 1 == len(new_projects)
<|file_name|>package.py<|end_file_name|><|fim▁begin|>############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and<|fim▁hole|># # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Scalasca(AutotoolsPackage): """Scalasca is a software tool that supports the performance optimization of parallel programs by measuring and analyzing their runtime behavior. The analysis identifies potential performance bottlenecks - in particular those concerning communication and synchronization - and offers guidance in exploring their causes. """ homepage = "http://www.scalasca.org" url = "http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz" version('2.3.1', 'a83ced912b9d2330004cb6b9cefa7585') version('2.2.2', '2bafce988b0522d18072f7771e491ab9') version('2.1', 'bab9c2b021e51e2ba187feec442b96e6') depends_on("mpi") # version 2.3 depends_on('[email protected]:', when='@2.3:') depends_on('otf2@2:', when='@2.3:') # version 2.1+ depends_on('[email protected]', when='@2.1:2.2.999') depends_on('[email protected]', when='@2.1:2.2.999') def url_for_version(self, version): return 'http://apps.fz-juelich.de/scalasca/releases/scalasca/{0}/dist/scalasca-{1}.tar.gz'.format(version.up_to(2), version) def configure_args(self): spec = self.spec config_args = ["--enable-shared"] config_args.append("--with-cube=%s" % spec['cube'].prefix.bin) config_args.append("--with-otf2=%s" % spec['otf2'].prefix.bin) if self.spec['mpi'].name == 'openmpi': config_args.append("--with-mpi=openmpi") elif self.spec.satisfies('^mpich@3:'): config_args.append("--with-mpi=mpich3") return config_args<|fim▁end|>
# conditions of the GNU Lesser General Public License for more details.
<|file_name|>findARestaurant.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import json import httplib2 import sys import codecs sys.stdout = codecs.getwriter('utf8')(sys.stdout) sys.stderr = codecs.getwriter('utf8')(sys.stderr) foursquare_client_id = 'SMQNYZFVCIOYIRAIXND2D5SYBLQUOPDB4HZTV13TT22AGACD' foursquare_client_secret = 'IHBS4VBHYWJL53NLIY2HSVI5A1144GJ3MDTYYY1KLKTMC4BV' google_api_key = 'AIzaSyBz7r2Kz6x7wO1zV9_O5Rcxmt8NahJ6kos' def getGeocodeLocation(inputString): #Replace Spaces with '+' in URL locationString = inputString.replace(" ", "+") url = ('https://maps.googleapis.com/maps/api/geocode/json?address=%s&key=%s'% (locationString, google_api_key)) h = httplib2.Http() result = json.loads(h.request(url,'GET')[1]) #print response latitude = result['results'][0]['geometry']['location']['lat'] longitude = result['results'][0]['geometry']['location']['lng'] return (latitude,longitude) #This function takes in a string representation of a location and cuisine type, geocodes the location, and then pass in the latitude and longitude coordinates to the Foursquare API def findARestaurant(mealType, location): latitude, longitude = getGeocodeLocation(location) url = ('https://api.foursquare.com/v2/venues/search?client_id=%s&client_secret=%s&v=20130815&ll=%s,%s&query=%s' % (foursquare_client_id, foursquare_client_secret,latitude,longitude,mealType)) h = httplib2.Http() result = json.loads(h.request(url,'GET')[1]) if result['response']['venues']: #Grab the first restaurant restaurant = result['response']['venues'][0] venue_id = restaurant['id'] <|fim▁hole|> address = "" for i in restaurant_address: address += i + " " restaurant_address = address #Get a 300x300 picture of the restaurant using the venue_id (you can change this by altering the 300x300 value in the URL or replacing it with 'orginal' to get the original picture url = ('https://api.foursquare.com/v2/venues/%s/photos?client_id=%s&v=20150603&client_secret=%s' % ((venue_id,foursquare_client_id,foursquare_client_secret))) result = json.loads(h.request(url,'GET')[1]) #Grab the first image #if no image available, insert default image url if result['response']['photos']['items']: firstpic = result['response']['photos']['items'][0] prefix = firstpic['prefix'] suffix = firstpic['suffix'] imageURL = prefix + "300x300" + suffix else: imageURL = "http://pixabay.com/get/8926af5eb597ca51ca4c/1433440765/cheeseburger-34314_1280.png?direct" restaurantInfo = {'name':restaurant_name, 'address':restaurant_address, 'image':imageURL} #print "Restaurant Name: %s " % restaurantInfo['name'] #print "Restaurant Address: %s " % restaurantInfo['address'] #print "Image: %s \n " % restaurantInfo['image'] return restaurantInfo else: #print "No Restaurants Found for %s" % location return "No Restaurants Found" if __name__ == '__main__': findARestaurant("Pizza", "Tokyo, Japan") findARestaurant("Tacos", "Jakarta, Indonesia") findARestaurant("Tapas", "Maputo, Mozambique") findARestaurant("Falafel", "Cairo, Egypt") findARestaurant("Spaghetti", "New Delhi, India") findARestaurant("Cappuccino", "Geneva, Switzerland") findARestaurant("Sushi", "Los Angeles, California") findARestaurant("Steak", "La Paz, Bolivia") findARestaurant("Gyros", "Sydney Austrailia")<|fim▁end|>
restaurant_name = restaurant['name'] restaurant_address = restaurant['location']['formattedAddress'] #Format the Restaurant Address into one string
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from django.conf.urls import url from . import views urlpatterns = [ url(r"^/android/setup$", views.android_setup_view, name="notif_android_setup"),<|fim▁hole|>]<|fim▁end|>
url(r"^/chrome/setup$", views.chrome_setup_view, name="notif_chrome_setup"), url(r"^/chrome/getdata$", views.chrome_getdata_view, name="notif_chrome_getdata"), url(r"^/gcm/post$", views.gcm_post_view, name="notif_gcm_post"), url(r"^/gcm/list$", views.gcm_list_view, name="notif_gcm_list")
<|file_name|>calc.py<|end_file_name|><|fim▁begin|># Copyright 2017 Battelle Energy Alliance, LLC<|fim▁hole|># # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import math import numpy def run(self, Input): number_of_steps = 16 self.time = numpy.zeros(number_of_steps) uniform = Input["uniform"] self.out = numpy.zeros(number_of_steps) for i in range(len(self.time)): self.time[i] = 0.25*i time = self.time[i] self.out[i] = math.sin(time+uniform)<|fim▁end|>
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from django.contrib import auth from django.contrib.auth.models import User from django.test import TestCase from django.urls.base import reverse class TestAccountRegistration(TestCase): def setUp(self): # create one user for convenience response = self.client.post( reverse('account:register'), { 'username': 'Alice', 'email': 'alice@localhost', 'password': 'supasecret', 'password2': 'supasecret', }, follow=True ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:login')) self.assertEqual(response.status_code, 200) def test_registration(self): self.assertEqual(len(User.objects.all()), 1) user = User.objects.get(username='Alice') self.assertEqual(user.email, 'alice@localhost') response = self.client.post( reverse('account:register'), { 'username': 'Bob', 'email': 'bob@localhost', 'password': 'foo', 'password2': 'foo', }, follow=True<|fim▁hole|> ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:login')) self.assertEqual(response.status_code, 200) self.assertEqual(len(User.objects.all()), 2) def test_duplicate_username(self): response = self.client.post( reverse('account:register'), { 'username': 'Alice', 'email': 'alice2@localhost', 'password': 'supasecret', 'password2': 'supasecret', }, follow=True ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:register')) self.assertEqual(response.status_code, 200) self.assertEqual(len(User.objects.all()), 1) def test_duplicate_email(self): response = self.client.post( reverse('account:register'), { 'username': 'Alice2000', 'email': 'alice@localhost', 'password': 'supasecret', 'password2': 'supasecret', }, follow=True ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:register')) self.assertEqual(response.status_code, 200) self.assertEqual(len(User.objects.all()), 1) def test_non_matching_passwords(self): response = self.client.post( reverse('account:register'), { 'username': 'Bob', 'email': 'bob@localhost', 'password': 'foo', 'password2': 'bar', }, follow=True ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:register')) self.assertEqual(response.status_code, 200) self.assertEqual(len(User.objects.all()), 1) def test_form_view(self): response = self.client.get(reverse('account:register')) self.assertEqual(response.status_code, 200) class TestLogin(TestCase): def setUp(self): # create one user for convenience response = self.client.post( reverse('account:register'), { 'username': 'Alice', 'email': 'alice@localhost', 'password': 'supasecret', 'password2': 'supasecret', }, follow=True ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:login')) self.assertEqual(response.status_code, 200) def test_login(self): response = self.client.post( reverse('account:login'), {'username': 'Alice', 'password': 'supasecret'}, follow=True ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:home')) self.assertEqual(response.status_code, 200) def test_disabled_login(self): user = User.objects.all().update(is_active=False) response = self.client.post( reverse('account:login'), {'username': 'Alice', 'password': 'supasecret'}, follow=True ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:login')) self.assertEqual(response.status_code, 200) def test_wrong_credentials(self): response = self.client.post( reverse('account:login'), {'username': 'Alice', 'password': 'wrong'}, follow=True ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:login')) self.assertEqual(response.status_code, 200) def test_wrong_user(self): response = self.client.post( reverse('account:login'), {'username': 'Bob', 'password': 'supasecret'}, follow=True ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:login')) self.assertEqual(response.status_code, 200) def test_login_view(self): response = self.client.get(reverse('account:login')) self.assertEqual(response.status_code, 200) def test_login_view_being_logged_in(self): response = self.client.post( reverse('account:login'), {'username': 'Alice', 'password': 'supasecret'}, follow=True ) response = self.client.get( reverse('account:login'), follow=True ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:home')) self.assertEqual(response.status_code, 200) response = self.client.post( reverse('account:login'), {'username': 'Alice', 'password': 'supasecret'}, follow=True ) self.assertEqual(response.redirect_chain[0][1], 302) self.assertEqual(response.redirect_chain[0][0], reverse('account:home')) self.assertEqual(response.status_code, 200) def test_home_view_while_not_logged_in(self): response = self.client.get(reverse('account:home'), follow=True) self.assertEqual(response.redirect_chain[0][1], 302) self.assertTrue(response.redirect_chain[0][0].startswith(reverse('account:login'))) self.assertEqual(response.status_code, 200) def test_home_view_while_logged_in(self): response = self.client.post( reverse('account:login'), {'username': 'Alice', 'password': 'supasecret'}, follow=True ) response = self.client.get(reverse('account:home')) self.assertEqual(response.status_code, 200) def test_register_view_while_logged_in(self): response = self.client.post( reverse('account:login'), {'username': 'Alice', 'password': 'supasecret'}, follow=True ) response = self.client.get(reverse('account:register'), follow=True) self.assertEqual(response.redirect_chain[0][1], 302) self.assertTrue(response.redirect_chain[0][0].startswith(reverse('account:home'))) self.assertEqual(response.status_code, 200) def test_logout(self): response = self.client.post( reverse('account:login'), {'username': 'Alice', 'password': 'supasecret'}, follow=True ) user = auth.get_user(self.client) self.assertTrue(user.is_authenticated) response = self.client.get(reverse('account:logout'), follow=True) self.assertEqual(response.redirect_chain[0][1], 302) self.assertTrue(response.redirect_chain[0][0].startswith(reverse('base:home'))) self.assertEqual(response.status_code, 200) user = auth.get_user(self.client) self.assertFalse(user.is_authenticated)<|fim▁end|>
<|file_name|>memory_info.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """ DragonPy - base memory info ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :created: 2013 by Jens Diemer - www.jensdiemer.de :copyleft: 2013 by the MC6809 team, see AUTHORS for more details. :license: GNU GPL v3 or above, see LICENSE for more details. """ import sys class BaseMemoryInfo: def __init__(self, out_func): self.out_func = out_func def get_shortest(self, addr): shortest = None size = sys.maxsize for start, end, txt in self.MEM_INFO:<|fim▁hole|> if not start <= addr <= end: continue current_size = abs(end - start) if current_size < size: size = current_size shortest = start, end, txt if shortest is None: return f"${addr:x}: UNKNOWN" start, end, txt = shortest if start == end: return f"${addr:x}: {txt}" else: return f"${addr:x}: ${start:x}-${end:x} - {txt}" def __call__(self, addr, info="", shortest=True): if shortest: mem_info = self.get_shortest(addr) if info: self.out_func(f"{info}: {mem_info}") else: self.out_func(mem_info) return mem_info = [] for start, end, txt in self.MEM_INFO: if start <= addr <= end: mem_info.append( (start, end, txt) ) if not mem_info: self.out_func(f"{info} ${addr:x}: UNKNOWN") else: self.out_func(f"{info} ${addr:x}:") for start, end, txt in mem_info: if start == end: self.out_func(f" * ${start:x} - {txt}") else: self.out_func(f" * ${start:x}-${end:x} - {txt}")<|fim▁end|>
<|file_name|>structs.rs<|end_file_name|><|fim▁begin|>use collections::BTreeMap; //JSON support use serialize::json::{ self, ToJson, Json}; // syntax elements use syntax::ast::StructDef; use syntax::ast::StructFieldKind::*; pub struct StructDefinition { name: String, // (name ,type ) fields: Vec<(String, String)>, } <|fim▁hole|> let mut tuple_struct_index = 0i64; let fields: Vec<(String,String)> = struct_def.fields .iter() .map(|field| match field.node.kind { // TODO: handle visibility NamedField(ref ident, ref _visibility) => (super::get_name_from_ident(ident), super::read_type(&field.node.ty.node)), UnnamedField(ref _visibility) => { let t = (tuple_struct_index.to_string(), super::read_type(&field.node.ty.node)); tuple_struct_index += 1; t } } ) .collect(); StructDefinition { name: name, fields: fields } } } // for serialization // eg: struct Test { a: i64, b: String } // => {"name":"Test","fields":[["a","i64"],["b","String"]]} impl ToJson for StructDefinition { fn to_json(&self) -> json::Json { let mut j = BTreeMap::new(); j.insert("name".to_string(), self.name.to_json()); j.insert("fields".to_string(), self.fields.to_json()); j.to_json() } }<|fim▁end|>
impl StructDefinition { pub fn new(name: String, struct_def: &StructDef) -> StructDefinition {
<|file_name|>specularlighting.cpp<|end_file_name|><|fim▁begin|>/** \file * SVG <feSpecularLighting> implementation. * */ /* * Authors: * hugo Rodrigues <[email protected]> * Jean-Rene Reinhard <[email protected]> * Abhishek Sharma * * Copyright (C) 2006 Hugo Rodrigues * 2007 authors * * Released under GNU GPL, read the file 'COPYING' for more information */ #include "strneq.h" #include "attributes.h" #include "svg/svg.h" #include "sp-object.h" #include "svg/svg-color.h" #include "svg/svg-icc-color.h" #include "filters/specularlighting.h" #include "filters/distantlight.h" #include "filters/pointlight.h" #include "filters/spotlight.h" #include "xml/repr.h" #include "display/nr-filter.h" #include "display/nr-filter-specularlighting.h" /* FeSpecularLighting base class */ static void sp_feSpecularLighting_children_modified(SPFeSpecularLighting *sp_specularlighting); SPFeSpecularLighting::SPFeSpecularLighting() : SPFilterPrimitive() { this->surfaceScale = 1; this->specularConstant = 1; this->specularExponent = 1; this->lighting_color = 0xffffffff; this->icc = NULL; //TODO kernelUnit this->renderer = NULL; this->surfaceScale_set = FALSE; this->specularConstant_set = FALSE; this->specularExponent_set = FALSE; this->lighting_color_set = FALSE; } SPFeSpecularLighting::~SPFeSpecularLighting() { } /** * Reads the Inkscape::XML::Node, and initializes SPFeSpecularLighting variables. For this to get called, * our name must be associated with a repr via "sp_object_type_register". Best done through * sp-object-repr.cpp's repr_name_entries array. */ void SPFeSpecularLighting::build(SPDocument *document, Inkscape::XML::Node *repr) { SPFilterPrimitive::build(document, repr); /*LOAD ATTRIBUTES FROM REPR HERE*/ this->readAttr( "surfaceScale" ); this->readAttr( "specularConstant" ); this->readAttr( "specularExponent" ); this->readAttr( "kernelUnitLength" ); this->readAttr( "lighting-color" ); } /** * Drops any allocated memory. */ void SPFeSpecularLighting::release() { SPFilterPrimitive::release(); } /** * Sets a specific value in the SPFeSpecularLighting. */ void SPFeSpecularLighting::set(unsigned int key, gchar const *value) { gchar const *cend_ptr = NULL; gchar *end_ptr = NULL; switch(key) { /*DEAL WITH SETTING ATTRIBUTES HERE*/ //TODO test forbidden values case SP_ATTR_SURFACESCALE: end_ptr = NULL; if (value) { this->surfaceScale = g_ascii_strtod(value, &end_ptr); if (end_ptr) { this->surfaceScale_set = TRUE; } else { g_warning("this: surfaceScale should be a number ... defaulting to 1"); } } //if the attribute is not set or has an unreadable value if (!value || !end_ptr) { this->surfaceScale = 1; this->surfaceScale_set = FALSE; } if (this->renderer) { this->renderer->surfaceScale = this->surfaceScale; } this->parent->requestModified(SP_OBJECT_MODIFIED_FLAG); break; case SP_ATTR_SPECULARCONSTANT: end_ptr = NULL; if (value) { this->specularConstant = g_ascii_strtod(value, &end_ptr); if (end_ptr && this->specularConstant >= 0) { this->specularConstant_set = TRUE; } else { end_ptr = NULL; g_warning("this: specularConstant should be a positive number ... defaulting to 1"); } } if (!value || !end_ptr) { this->specularConstant = 1;<|fim▁hole|> if (this->renderer) { this->renderer->specularConstant = this->specularConstant; } this->parent->requestModified(SP_OBJECT_MODIFIED_FLAG); break; case SP_ATTR_SPECULAREXPONENT: end_ptr = NULL; if (value) { this->specularExponent = g_ascii_strtod(value, &end_ptr); if (this->specularExponent >= 1 && this->specularExponent <= 128) { this->specularExponent_set = TRUE; } else { end_ptr = NULL; g_warning("this: specularExponent should be a number in range [1, 128] ... defaulting to 1"); } } if (!value || !end_ptr) { this->specularExponent = 1; this->specularExponent_set = FALSE; } if (this->renderer) { this->renderer->specularExponent = this->specularExponent; } this->parent->requestModified(SP_OBJECT_MODIFIED_FLAG); break; case SP_ATTR_KERNELUNITLENGTH: //TODO kernelUnit //this->kernelUnitLength.set(value); /*TODOif (feSpecularLighting->renderer) { feSpecularLighting->renderer->surfaceScale = feSpecularLighting->renderer; } */ this->parent->requestModified(SP_OBJECT_MODIFIED_FLAG); break; case SP_PROP_LIGHTING_COLOR: cend_ptr = NULL; this->lighting_color = sp_svg_read_color(value, &cend_ptr, 0xffffffff); //if a value was read if (cend_ptr) { while (g_ascii_isspace(*cend_ptr)) { ++cend_ptr; } if (strneq(cend_ptr, "icc-color(", 10)) { if (!this->icc) this->icc = new SVGICCColor(); if ( ! sp_svg_read_icc_color( cend_ptr, this->icc ) ) { delete this->icc; this->icc = NULL; } } this->lighting_color_set = TRUE; } else { //lighting_color already contains the default value this->lighting_color_set = FALSE; } if (this->renderer) { this->renderer->lighting_color = this->lighting_color; } this->parent->requestModified(SP_OBJECT_MODIFIED_FLAG); break; default: SPFilterPrimitive::set(key, value); break; } } /** * Receives update notifications. */ void SPFeSpecularLighting::update(SPCtx *ctx, guint flags) { if (flags & (SP_OBJECT_MODIFIED_FLAG)) { this->readAttr( "surfaceScale" ); this->readAttr( "specularConstant" ); this->readAttr( "specularExponent" ); this->readAttr( "kernelUnitLength" ); this->readAttr( "lighting-color" ); } SPFilterPrimitive::update(ctx, flags); } /** * Writes its settings to an incoming repr object, if any. */ Inkscape::XML::Node* SPFeSpecularLighting::write(Inkscape::XML::Document *doc, Inkscape::XML::Node *repr, guint flags) { /* TODO: Don't just clone, but create a new repr node and write all * relevant values _and children_ into it */ if (!repr) { repr = this->getRepr()->duplicate(doc); //repr = doc->createElement("svg:feSpecularLighting"); } if (this->surfaceScale_set) { sp_repr_set_css_double(repr, "surfaceScale", this->surfaceScale); } if (this->specularConstant_set) { sp_repr_set_css_double(repr, "specularConstant", this->specularConstant); } if (this->specularExponent_set) { sp_repr_set_css_double(repr, "specularExponent", this->specularExponent); } /*TODO kernelUnits */ if (this->lighting_color_set) { gchar c[64]; sp_svg_write_color(c, sizeof(c), this->lighting_color); repr->setAttribute("lighting-color", c); } SPFilterPrimitive::write(doc, repr, flags); return repr; } /** * Callback for child_added event. */ void SPFeSpecularLighting::child_added(Inkscape::XML::Node *child, Inkscape::XML::Node *ref) { SPFilterPrimitive::child_added(child, ref); sp_feSpecularLighting_children_modified(this); this->parent->requestModified(SP_OBJECT_MODIFIED_FLAG); } /** * Callback for remove_child event. */ void SPFeSpecularLighting::remove_child(Inkscape::XML::Node *child) { SPFilterPrimitive::remove_child(child); sp_feSpecularLighting_children_modified(this); this->parent->requestModified(SP_OBJECT_MODIFIED_FLAG); } void SPFeSpecularLighting::order_changed(Inkscape::XML::Node *child, Inkscape::XML::Node *old_ref, Inkscape::XML::Node *new_ref) { SPFilterPrimitive::order_changed(child, old_ref, new_ref); sp_feSpecularLighting_children_modified(this); this->parent->requestModified(SP_OBJECT_MODIFIED_FLAG); } static void sp_feSpecularLighting_children_modified(SPFeSpecularLighting *sp_specularlighting) { if (sp_specularlighting->renderer) { sp_specularlighting->renderer->light_type = Inkscape::Filters::NO_LIGHT; if (SP_IS_FEDISTANTLIGHT(sp_specularlighting->children)) { sp_specularlighting->renderer->light_type = Inkscape::Filters::DISTANT_LIGHT; sp_specularlighting->renderer->light.distant = SP_FEDISTANTLIGHT(sp_specularlighting->children); } if (SP_IS_FEPOINTLIGHT(sp_specularlighting->children)) { sp_specularlighting->renderer->light_type = Inkscape::Filters::POINT_LIGHT; sp_specularlighting->renderer->light.point = SP_FEPOINTLIGHT(sp_specularlighting->children); } if (SP_IS_FESPOTLIGHT(sp_specularlighting->children)) { sp_specularlighting->renderer->light_type = Inkscape::Filters::SPOT_LIGHT; sp_specularlighting->renderer->light.spot = SP_FESPOTLIGHT(sp_specularlighting->children); } } } void SPFeSpecularLighting::build_renderer(Inkscape::Filters::Filter* filter) { g_assert(this != NULL); g_assert(filter != NULL); int primitive_n = filter->add_primitive(Inkscape::Filters::NR_FILTER_SPECULARLIGHTING); Inkscape::Filters::FilterPrimitive *nr_primitive = filter->get_primitive(primitive_n); Inkscape::Filters::FilterSpecularLighting *nr_specularlighting = dynamic_cast<Inkscape::Filters::FilterSpecularLighting*>(nr_primitive); g_assert(nr_specularlighting != NULL); this->renderer = nr_specularlighting; sp_filter_primitive_renderer_common(this, nr_primitive); nr_specularlighting->specularConstant = this->specularConstant; nr_specularlighting->specularExponent = this->specularExponent; nr_specularlighting->surfaceScale = this->surfaceScale; nr_specularlighting->lighting_color = this->lighting_color; nr_specularlighting->set_icc(this->icc); //We assume there is at most one child nr_specularlighting->light_type = Inkscape::Filters::NO_LIGHT; if (SP_IS_FEDISTANTLIGHT(this->children)) { nr_specularlighting->light_type = Inkscape::Filters::DISTANT_LIGHT; nr_specularlighting->light.distant = SP_FEDISTANTLIGHT(this->children); } if (SP_IS_FEPOINTLIGHT(this->children)) { nr_specularlighting->light_type = Inkscape::Filters::POINT_LIGHT; nr_specularlighting->light.point = SP_FEPOINTLIGHT(this->children); } if (SP_IS_FESPOTLIGHT(this->children)) { nr_specularlighting->light_type = Inkscape::Filters::SPOT_LIGHT; nr_specularlighting->light.spot = SP_FESPOTLIGHT(this->children); } //nr_offset->set_dx(sp_offset->dx); //nr_offset->set_dy(sp_offset->dy); } /* Local Variables: mode:c++ c-file-style:"stroustrup" c-file-offsets:((innamespace . 0)(inline-open . 0)(case-label . +)) indent-tabs-mode:nil fill-column:99 End: */ // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:fileencoding=utf-8:textwidth=99 :<|fim▁end|>
this->specularConstant_set = FALSE; }
<|file_name|>max-length.ts<|end_file_name|><|fim▁begin|>import { Validation, ValidationType } from '.'; export class MaxLength extends Validation { <|fim▁hole|> super(ValidationType.MAX_LENGTH, message); } public get value(): number { return this._value; } }<|fim▁end|>
public constructor( message: string, private _value: number ) {
<|file_name|>RegAllocFast.cpp<|end_file_name|><|fim▁begin|>//===- RegAllocFast.cpp - A fast register allocator for debug code --------===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // /// \file This register allocator allocates registers to a basic block at a /// time, attempting to keep values in registers and reusing registers as /// appropriate. // //===----------------------------------------------------------------------===// #include "llvm/ADT/ArrayRef.h" #include "llvm/ADT/DenseMap.h" #include "llvm/ADT/IndexedMap.h" #include "llvm/ADT/SmallSet.h" #include "llvm/ADT/SmallVector.h" #include "llvm/ADT/SparseSet.h" #include "llvm/ADT/Statistic.h" #include "llvm/CodeGen/MachineBasicBlock.h" #include "llvm/CodeGen/MachineFrameInfo.h" #include "llvm/CodeGen/MachineFunction.h" #include "llvm/CodeGen/MachineFunctionPass.h" #include "llvm/CodeGen/MachineInstr.h" #include "llvm/CodeGen/MachineInstrBuilder.h" #include "llvm/CodeGen/MachineOperand.h" #include "llvm/CodeGen/MachineRegisterInfo.h" #include "llvm/CodeGen/RegAllocRegistry.h" #include "llvm/CodeGen/RegisterClassInfo.h" #include "llvm/CodeGen/TargetInstrInfo.h" #include "llvm/CodeGen/TargetOpcodes.h" #include "llvm/CodeGen/TargetRegisterInfo.h" #include "llvm/CodeGen/TargetSubtargetInfo.h" #include "llvm/IR/DebugLoc.h" #include "llvm/IR/Metadata.h" #include "llvm/MC/MCInstrDesc.h" #include "llvm/MC/MCRegisterInfo.h" #include "llvm/Pass.h" #include "llvm/Support/Casting.h" #include "llvm/Support/Compiler.h" #include "llvm/Support/Debug.h" #include "llvm/Support/ErrorHandling.h" #include "llvm/Support/raw_ostream.h" #include <cassert> #include <tuple> #include <vector> using namespace llvm; #define DEBUG_TYPE "regalloc" STATISTIC(NumStores, "Number of stores added"); STATISTIC(NumLoads , "Number of loads added"); STATISTIC(NumCopies, "Number of copies coalesced"); static RegisterRegAlloc fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); namespace { class RegAllocFast : public MachineFunctionPass { public: static char ID; RegAllocFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1) {} private: MachineFrameInfo *MFI; MachineRegisterInfo *MRI; const TargetRegisterInfo *TRI; const TargetInstrInfo *TII; RegisterClassInfo RegClassInfo; /// Basic block currently being allocated. MachineBasicBlock *MBB; /// Maps virtual regs to the frame index where these values are spilled. IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; /// Everything we know about a live virtual register. struct LiveReg { MachineInstr *LastUse = nullptr; ///< Last instr to use reg. unsigned VirtReg; ///< Virtual register number. MCPhysReg PhysReg = 0; ///< Currently held here. unsigned short LastOpNum = 0; ///< OpNum on LastUse. bool Dirty = false; ///< Register needs spill. explicit LiveReg(unsigned v) : VirtReg(v) {} unsigned getSparseSetIndex() const { return TargetRegisterInfo::virtReg2Index(VirtReg); } }; using LiveRegMap = SparseSet<LiveReg>; /// This map contains entries for each virtual register that is currently /// available in a physical register. LiveRegMap LiveVirtRegs; DenseMap<unsigned, SmallVector<MachineInstr *, 4>> LiveDbgValueMap; /// Track the state of a physical register. enum RegState { /// A disabled register is not available for allocation, but an alias may /// be in use. A register can only be moved out of the disabled state if /// all aliases are disabled. regDisabled, /// A free register is not currently in use and can be allocated /// immediately without checking aliases. regFree, /// A reserved register has been assigned explicitly (e.g., setting up a /// call parameter), and it remains reserved until it is used. regReserved /// A register state may also be a virtual register number, indication /// that the physical register is currently allocated to a virtual /// register. In that case, LiveVirtRegs contains the inverse mapping. }; /// One of the RegState enums, or a virtreg. std::vector<unsigned> PhysRegState; SmallVector<unsigned, 16> VirtDead; SmallVector<MachineInstr *, 32> Coalesced; /// Set of register units. using UsedInInstrSet = SparseSet<unsigned>; /// Set of register units that are used in the current instruction, and so /// cannot be allocated. UsedInInstrSet UsedInInstr; /// Mark a physreg as used in this instruction. void markRegUsedInInstr(MCPhysReg PhysReg) { for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) UsedInInstr.insert(*Units); } /// Check if a physreg or any of its aliases are used in this instruction. bool isRegUsedInInstr(MCPhysReg PhysReg) const { for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) if (UsedInInstr.count(*Units)) return true; return false; } /// This flag is set when LiveRegMap will be cleared completely after /// spilling all live registers. LiveRegMap entries should not be erased. bool isBulkSpilling = false; enum : unsigned { spillClean = 1, spillDirty = 100, spillImpossible = ~0u }; public: StringRef getPassName() const override { return "Fast Register Allocator"; } void getAnalysisUsage(AnalysisUsage &AU) const override { AU.setPreservesCFG(); MachineFunctionPass::getAnalysisUsage(AU); } MachineFunctionProperties getRequiredProperties() const override { return MachineFunctionProperties().set( MachineFunctionProperties::Property::NoPHIs); } MachineFunctionProperties getSetProperties() const override { return MachineFunctionProperties().set( MachineFunctionProperties::Property::NoVRegs); } private:<|fim▁hole|> bool runOnMachineFunction(MachineFunction &MF) override; void allocateBasicBlock(MachineBasicBlock &MBB); void handleThroughOperands(MachineInstr &MI, SmallVectorImpl<unsigned> &VirtDead); int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass &RC); bool isLastUseOfLocalReg(const MachineOperand &MO) const; void addKillFlag(const LiveReg &LRI); void killVirtReg(LiveRegMap::iterator LRI); void killVirtReg(unsigned VirtReg); void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); void usePhysReg(MachineOperand &MO); void definePhysReg(MachineBasicBlock::iterator MI, MCPhysReg PhysReg, RegState NewState); unsigned calcSpillCost(MCPhysReg PhysReg) const; void assignVirtToPhysReg(LiveReg &, MCPhysReg PhysReg); LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) { return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); } LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const { return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); } LiveRegMap::iterator assignVirtToPhysReg(unsigned VirtReg, MCPhysReg PhysReg); LiveRegMap::iterator allocVirtReg(MachineInstr &MI, LiveRegMap::iterator, unsigned Hint); LiveRegMap::iterator defineVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg, unsigned Hint); LiveRegMap::iterator reloadVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg, unsigned Hint); void spillAll(MachineBasicBlock::iterator MI); bool setPhysReg(MachineInstr &MI, unsigned OpNum, MCPhysReg PhysReg); void dumpState(); }; } // end anonymous namespace char RegAllocFast::ID = 0; INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false, false) /// This allocates space for the specified virtual register to be held on the /// stack. int RegAllocFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass &RC) { // Find the location Reg would belong... int SS = StackSlotForVirtReg[VirtReg]; // Already has space allocated? if (SS != -1) return SS; // Allocate a new stack object for this spill location... unsigned Size = TRI->getSpillSize(RC); unsigned Align = TRI->getSpillAlignment(RC); int FrameIdx = MFI->CreateSpillStackObject(Size, Align); // Assign the slot. StackSlotForVirtReg[VirtReg] = FrameIdx; return FrameIdx; } /// Return true if MO is the only remaining reference to its virtual register, /// and it is guaranteed to be a block-local register. bool RegAllocFast::isLastUseOfLocalReg(const MachineOperand &MO) const { // If the register has ever been spilled or reloaded, we conservatively assume // it is a global register used in multiple blocks. if (StackSlotForVirtReg[MO.getReg()] != -1) return false; // Check that the use/def chain has exactly one operand - MO. MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg()); if (&*I != &MO) return false; return ++I == MRI->reg_nodbg_end(); } /// Set kill flags on last use of a virtual register. void RegAllocFast::addKillFlag(const LiveReg &LR) { if (!LR.LastUse) return; MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { if (MO.getReg() == LR.PhysReg) MO.setIsKill(); // else, don't do anything we are problably redefining a // subreg of this register and given we don't track which // lanes are actually dead, we cannot insert a kill flag here. // Otherwise we may end up in a situation like this: // ... = (MO) physreg:sub1, implicit killed physreg // ... <== Here we would allow later pass to reuse physreg:sub1 // which is potentially wrong. // LR:sub0 = ... // ... = LR.sub1 <== This is going to use physreg:sub1 } } /// Mark virtreg as no longer available. void RegAllocFast::killVirtReg(LiveRegMap::iterator LRI) { addKillFlag(*LRI); assert(PhysRegState[LRI->PhysReg] == LRI->VirtReg && "Broken RegState mapping"); PhysRegState[LRI->PhysReg] = regFree; // Erase from LiveVirtRegs unless we're spilling in bulk. if (!isBulkSpilling) LiveVirtRegs.erase(LRI); } /// Mark virtreg as no longer available. void RegAllocFast::killVirtReg(unsigned VirtReg) { assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && "killVirtReg needs a virtual register"); LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); if (LRI != LiveVirtRegs.end()) killVirtReg(LRI); } /// This method spills the value specified by VirtReg into the corresponding /// stack slot if needed. void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && "Spilling a physical register is illegal!"); LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); spillVirtReg(MI, LRI); } /// Do the actual work of spilling. void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator LRI) { LiveReg &LR = *LRI; assert(PhysRegState[LR.PhysReg] == LRI->VirtReg && "Broken RegState mapping"); if (LR.Dirty) { // If this physreg is used by the instruction, we want to kill it on the // instruction, not on the spill. bool SpillKill = MachineBasicBlock::iterator(LR.LastUse) != MI; LR.Dirty = false; LLVM_DEBUG(dbgs() << "Spilling " << printReg(LRI->VirtReg, TRI) << " in " << printReg(LR.PhysReg, TRI)); const TargetRegisterClass &RC = *MRI->getRegClass(LRI->VirtReg); int FI = getStackSpaceFor(LRI->VirtReg, RC); LLVM_DEBUG(dbgs() << " to stack slot #" << FI << "\n"); TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, &RC, TRI); ++NumStores; // Update statistics // If this register is used by DBG_VALUE then insert new DBG_VALUE to // identify spilled location as the place to find corresponding variable's // value. SmallVectorImpl<MachineInstr *> &LRIDbgValues = LiveDbgValueMap[LRI->VirtReg]; for (MachineInstr *DBG : LRIDbgValues) { MachineInstr *NewDV = buildDbgValueForSpill(*MBB, MI, *DBG, FI); assert(NewDV->getParent() == MBB && "dangling parent pointer"); (void)NewDV; LLVM_DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV); } // Now this register is spilled there is should not be any DBG_VALUE // pointing to this register because they are all pointing to spilled value // now. LRIDbgValues.clear(); if (SpillKill) LR.LastUse = nullptr; // Don't kill register again } killVirtReg(LRI); } /// Spill all dirty virtregs without killing them. void RegAllocFast::spillAll(MachineBasicBlock::iterator MI) { if (LiveVirtRegs.empty()) return; isBulkSpilling = true; // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order // of spilling here is deterministic, if arbitrary. for (LiveRegMap::iterator I = LiveVirtRegs.begin(), E = LiveVirtRegs.end(); I != E; ++I) spillVirtReg(MI, I); LiveVirtRegs.clear(); isBulkSpilling = false; } /// Handle the direct use of a physical register. Check that the register is /// not used by a virtreg. Kill the physreg, marking it free. This may add /// implicit kills to MO->getParent() and invalidate MO. void RegAllocFast::usePhysReg(MachineOperand &MO) { // Ignore undef uses. if (MO.isUndef()) return; unsigned PhysReg = MO.getReg(); assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && "Bad usePhysReg operand"); markRegUsedInInstr(PhysReg); switch (PhysRegState[PhysReg]) { case regDisabled: break; case regReserved: PhysRegState[PhysReg] = regFree; LLVM_FALLTHROUGH; case regFree: MO.setIsKill(); return; default: // The physreg was allocated to a virtual register. That means the value we // wanted has been clobbered. llvm_unreachable("Instruction uses an allocated register"); } // Maybe a superregister is reserved? for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { MCPhysReg Alias = *AI; switch (PhysRegState[Alias]) { case regDisabled: break; case regReserved: // Either PhysReg is a subregister of Alias and we mark the // whole register as free, or PhysReg is the superregister of // Alias and we mark all the aliases as disabled before freeing // PhysReg. // In the latter case, since PhysReg was disabled, this means that // its value is defined only by physical sub-registers. This check // is performed by the assert of the default case in this loop. // Note: The value of the superregister may only be partial // defined, that is why regDisabled is a valid state for aliases. assert((TRI->isSuperRegister(PhysReg, Alias) || TRI->isSuperRegister(Alias, PhysReg)) && "Instruction is not using a subregister of a reserved register"); LLVM_FALLTHROUGH; case regFree: if (TRI->isSuperRegister(PhysReg, Alias)) { // Leave the superregister in the working set. PhysRegState[Alias] = regFree; MO.getParent()->addRegisterKilled(Alias, TRI, true); return; } // Some other alias was in the working set - clear it. PhysRegState[Alias] = regDisabled; break; default: llvm_unreachable("Instruction uses an alias of an allocated register"); } } // All aliases are disabled, bring register into working set. PhysRegState[PhysReg] = regFree; MO.setIsKill(); } /// Mark PhysReg as reserved or free after spilling any virtregs. This is very /// similar to defineVirtReg except the physreg is reserved instead of /// allocated. void RegAllocFast::definePhysReg(MachineBasicBlock::iterator MI, MCPhysReg PhysReg, RegState NewState) { markRegUsedInInstr(PhysReg); switch (unsigned VirtReg = PhysRegState[PhysReg]) { case regDisabled: break; default: spillVirtReg(MI, VirtReg); LLVM_FALLTHROUGH; case regFree: case regReserved: PhysRegState[PhysReg] = NewState; return; } // This is a disabled register, disable all aliases. PhysRegState[PhysReg] = NewState; for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { MCPhysReg Alias = *AI; switch (unsigned VirtReg = PhysRegState[Alias]) { case regDisabled: break; default: spillVirtReg(MI, VirtReg); LLVM_FALLTHROUGH; case regFree: case regReserved: PhysRegState[Alias] = regDisabled; if (TRI->isSuperRegister(PhysReg, Alias)) return; break; } } } /// Return the cost of spilling clearing out PhysReg and aliases so it is /// free for allocation. Returns 0 when PhysReg is free or disabled with all /// aliases disabled - it can be allocated directly. /// \returns spillImpossible when PhysReg or an alias can't be spilled. unsigned RegAllocFast::calcSpillCost(MCPhysReg PhysReg) const { if (isRegUsedInInstr(PhysReg)) { LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI) << " is already used in instr.\n"); return spillImpossible; } switch (unsigned VirtReg = PhysRegState[PhysReg]) { case regDisabled: break; case regFree: return 0; case regReserved: LLVM_DEBUG(dbgs() << printReg(VirtReg, TRI) << " corresponding " << printReg(PhysReg, TRI) << " is reserved already.\n"); return spillImpossible; default: { LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); return I->Dirty ? spillDirty : spillClean; } } // This is a disabled register, add up cost of aliases. LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI) << " is disabled.\n"); unsigned Cost = 0; for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { MCPhysReg Alias = *AI; switch (unsigned VirtReg = PhysRegState[Alias]) { case regDisabled: break; case regFree: ++Cost; break; case regReserved: return spillImpossible; default: { LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); Cost += I->Dirty ? spillDirty : spillClean; break; } } } return Cost; } /// This method updates local state so that we know that PhysReg is the /// proper container for VirtReg now. The physical register must not be used /// for anything else when this is called. void RegAllocFast::assignVirtToPhysReg(LiveReg &LR, MCPhysReg PhysReg) { LLVM_DEBUG(dbgs() << "Assigning " << printReg(LR.VirtReg, TRI) << " to " << printReg(PhysReg, TRI) << "\n"); PhysRegState[PhysReg] = LR.VirtReg; assert(!LR.PhysReg && "Already assigned a physreg"); LR.PhysReg = PhysReg; } RegAllocFast::LiveRegMap::iterator RegAllocFast::assignVirtToPhysReg(unsigned VirtReg, MCPhysReg PhysReg) { LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); assert(LRI != LiveVirtRegs.end() && "VirtReg disappeared"); assignVirtToPhysReg(*LRI, PhysReg); return LRI; } /// Allocates a physical register for VirtReg. RegAllocFast::LiveRegMap::iterator RegAllocFast::allocVirtReg(MachineInstr &MI, LiveRegMap::iterator LRI, unsigned Hint) { const unsigned VirtReg = LRI->VirtReg; assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && "Can only allocate virtual registers"); // Take hint when possible. const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg); if (TargetRegisterInfo::isPhysicalRegister(Hint) && MRI->isAllocatable(Hint) && RC.contains(Hint)) { // Ignore the hint if we would have to spill a dirty register. unsigned Cost = calcSpillCost(Hint); if (Cost < spillDirty) { if (Cost) definePhysReg(MI, Hint, regFree); // definePhysReg may kill virtual registers and modify LiveVirtRegs. // That invalidates LRI, so run a new lookup for VirtReg. return assignVirtToPhysReg(VirtReg, Hint); } } // First try to find a completely free register. ArrayRef<MCPhysReg> AO = RegClassInfo.getOrder(&RC); for (MCPhysReg PhysReg : AO) { if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) { assignVirtToPhysReg(*LRI, PhysReg); return LRI; } } LLVM_DEBUG(dbgs() << "Allocating " << printReg(VirtReg) << " from " << TRI->getRegClassName(&RC) << "\n"); unsigned BestReg = 0; unsigned BestCost = spillImpossible; for (MCPhysReg PhysReg : AO) { unsigned Cost = calcSpillCost(PhysReg); LLVM_DEBUG(dbgs() << "\tRegister: " << printReg(PhysReg, TRI) << "\n"); LLVM_DEBUG(dbgs() << "\tCost: " << Cost << "\n"); LLVM_DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n"); // Cost is 0 when all aliases are already disabled. if (Cost == 0) { assignVirtToPhysReg(*LRI, PhysReg); return LRI; } if (Cost < BestCost) BestReg = PhysReg, BestCost = Cost; } if (BestReg) { definePhysReg(MI, BestReg, regFree); // definePhysReg may kill virtual registers and modify LiveVirtRegs. // That invalidates LRI, so run a new lookup for VirtReg. return assignVirtToPhysReg(VirtReg, BestReg); } // Nothing we can do. Report an error and keep going with a bad allocation. if (MI.isInlineAsm()) MI.emitError("inline assembly requires more registers than available"); else MI.emitError("ran out of registers during register allocation"); definePhysReg(MI, *AO.begin(), regFree); return assignVirtToPhysReg(VirtReg, *AO.begin()); } /// Allocates a register for VirtReg and mark it as dirty. RegAllocFast::LiveRegMap::iterator RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg, unsigned Hint) { assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && "Not a virtual register"); LiveRegMap::iterator LRI; bool New; std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); if (New) { // If there is no hint, peek at the only use of this register. if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && MRI->hasOneNonDBGUse(VirtReg)) { const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg); // It's a copy, use the destination register as a hint. if (UseMI.isCopyLike()) Hint = UseMI.getOperand(0).getReg(); } LRI = allocVirtReg(MI, LRI, Hint); } else if (LRI->LastUse) { // Redefining a live register - kill at the last use, unless it is this // instruction defining VirtReg multiple times. if (LRI->LastUse != &MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse()) addKillFlag(*LRI); } assert(LRI->PhysReg && "Register not assigned"); LRI->LastUse = &MI; LRI->LastOpNum = OpNum; LRI->Dirty = true; markRegUsedInInstr(LRI->PhysReg); return LRI; } /// Make sure VirtReg is available in a physreg and return it. RegAllocFast::LiveRegMap::iterator RegAllocFast::reloadVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg, unsigned Hint) { assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && "Not a virtual register"); LiveRegMap::iterator LRI; bool New; std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); MachineOperand &MO = MI.getOperand(OpNum); if (New) { LRI = allocVirtReg(MI, LRI, Hint); const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg); int FrameIndex = getStackSpaceFor(VirtReg, RC); LLVM_DEBUG(dbgs() << "Reloading " << printReg(VirtReg, TRI) << " into " << printReg(LRI->PhysReg, TRI) << "\n"); TII->loadRegFromStackSlot(*MBB, MI, LRI->PhysReg, FrameIndex, &RC, TRI); ++NumLoads; } else if (LRI->Dirty) { if (isLastUseOfLocalReg(MO)) { LLVM_DEBUG(dbgs() << "Killing last use: " << MO << "\n"); if (MO.isUse()) MO.setIsKill(); else MO.setIsDead(); } else if (MO.isKill()) { LLVM_DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); MO.setIsKill(false); } else if (MO.isDead()) { LLVM_DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n"); MO.setIsDead(false); } } else if (MO.isKill()) { // We must remove kill flags from uses of reloaded registers because the // register would be killed immediately, and there might be a second use: // %foo = OR killed %x, %x // This would cause a second reload of %x into a different register. LLVM_DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); MO.setIsKill(false); } else if (MO.isDead()) { LLVM_DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n"); MO.setIsDead(false); } assert(LRI->PhysReg && "Register not assigned"); LRI->LastUse = &MI; LRI->LastOpNum = OpNum; markRegUsedInInstr(LRI->PhysReg); return LRI; } /// Changes operand OpNum in MI the refer the PhysReg, considering subregs. This /// may invalidate any operand pointers. Return true if the operand kills its /// register. bool RegAllocFast::setPhysReg(MachineInstr &MI, unsigned OpNum, MCPhysReg PhysReg) { MachineOperand &MO = MI.getOperand(OpNum); bool Dead = MO.isDead(); if (!MO.getSubReg()) { MO.setReg(PhysReg); MO.setIsRenamable(true); return MO.isKill() || Dead; } // Handle subregister index. MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); MO.setIsRenamable(true); MO.setSubReg(0); // A kill flag implies killing the full register. Add corresponding super // register kill. if (MO.isKill()) { MI.addRegisterKilled(PhysReg, TRI, true); return true; } // A <def,read-undef> of a sub-register requires an implicit def of the full // register. if (MO.isDef() && MO.isUndef()) MI.addRegisterDefined(PhysReg, TRI); return Dead; } // Handles special instruction operand like early clobbers and tied ops when // there are additional physreg defines. void RegAllocFast::handleThroughOperands(MachineInstr &MI, SmallVectorImpl<unsigned> &VirtDead) { LLVM_DEBUG(dbgs() << "Scanning for through registers:"); SmallSet<unsigned, 8> ThroughRegs; for (const MachineOperand &MO : MI.operands()) { if (!MO.isReg()) continue; unsigned Reg = MO.getReg(); if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; if (MO.isEarlyClobber() || (MO.isUse() && MO.isTied()) || (MO.getSubReg() && MI.readsVirtualRegister(Reg))) { if (ThroughRegs.insert(Reg).second) LLVM_DEBUG(dbgs() << ' ' << printReg(Reg)); } } // If any physreg defines collide with preallocated through registers, // we must spill and reallocate. LLVM_DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); for (const MachineOperand &MO : MI.operands()) { if (!MO.isReg() || !MO.isDef()) continue; unsigned Reg = MO.getReg(); if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; markRegUsedInInstr(Reg); for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { if (ThroughRegs.count(PhysRegState[*AI])) definePhysReg(MI, *AI, regFree); } } SmallVector<unsigned, 8> PartialDefs; LLVM_DEBUG(dbgs() << "Allocating tied uses.\n"); for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) { const MachineOperand &MO = MI.getOperand(I); if (!MO.isReg()) continue; unsigned Reg = MO.getReg(); if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; if (MO.isUse()) { if (!MO.isTied()) continue; LLVM_DEBUG(dbgs() << "Operand " << I << "(" << MO << ") is tied to operand " << MI.findTiedOperandIdx(I) << ".\n"); LiveRegMap::iterator LRI = reloadVirtReg(MI, I, Reg, 0); MCPhysReg PhysReg = LRI->PhysReg; setPhysReg(MI, I, PhysReg); // Note: we don't update the def operand yet. That would cause the normal // def-scan to attempt spilling. } else if (MO.getSubReg() && MI.readsVirtualRegister(Reg)) { LLVM_DEBUG(dbgs() << "Partial redefine: " << MO << "\n"); // Reload the register, but don't assign to the operand just yet. // That would confuse the later phys-def processing pass. LiveRegMap::iterator LRI = reloadVirtReg(MI, I, Reg, 0); PartialDefs.push_back(LRI->PhysReg); } } LLVM_DEBUG(dbgs() << "Allocating early clobbers.\n"); for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) { const MachineOperand &MO = MI.getOperand(I); if (!MO.isReg()) continue; unsigned Reg = MO.getReg(); if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; if (!MO.isEarlyClobber()) continue; // Note: defineVirtReg may invalidate MO. LiveRegMap::iterator LRI = defineVirtReg(MI, I, Reg, 0); MCPhysReg PhysReg = LRI->PhysReg; if (setPhysReg(MI, I, PhysReg)) VirtDead.push_back(Reg); } // Restore UsedInInstr to a state usable for allocating normal virtual uses. UsedInInstr.clear(); for (const MachineOperand &MO : MI.operands()) { if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; unsigned Reg = MO.getReg(); if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; LLVM_DEBUG(dbgs() << "\tSetting " << printReg(Reg, TRI) << " as used in instr\n"); markRegUsedInInstr(Reg); } // Also mark PartialDefs as used to avoid reallocation. for (unsigned PartialDef : PartialDefs) markRegUsedInInstr(PartialDef); } #ifndef NDEBUG void RegAllocFast::dumpState() { for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { if (PhysRegState[Reg] == regDisabled) continue; dbgs() << " " << printReg(Reg, TRI); switch(PhysRegState[Reg]) { case regFree: break; case regReserved: dbgs() << "*"; break; default: { dbgs() << '=' << printReg(PhysRegState[Reg]); LiveRegMap::iterator I = findLiveVirtReg(PhysRegState[Reg]); assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); if (I->Dirty) dbgs() << "*"; assert(I->PhysReg == Reg && "Bad inverse map"); break; } } } dbgs() << '\n'; // Check that LiveVirtRegs is the inverse. for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); i != e; ++i) { assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) && "Bad map key"); assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) && "Bad map value"); assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map"); } } #endif void RegAllocFast::allocateBasicBlock(MachineBasicBlock &MBB) { this->MBB = &MBB; LLVM_DEBUG(dbgs() << "\nAllocating " << MBB); PhysRegState.assign(TRI->getNumRegs(), regDisabled); assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?"); MachineBasicBlock::iterator MII = MBB.begin(); // Add live-in registers as live. for (const MachineBasicBlock::RegisterMaskPair LI : MBB.liveins()) if (MRI->isAllocatable(LI.PhysReg)) definePhysReg(MII, LI.PhysReg, regReserved); VirtDead.clear(); Coalesced.clear(); // Otherwise, sequentially allocate each instruction in the MBB. for (MachineInstr &MI : MBB) { const MCInstrDesc &MCID = MI.getDesc(); LLVM_DEBUG(dbgs() << "\n>> " << MI << "Regs:"; dumpState()); // Debug values are not allowed to change codegen in any way. if (MI.isDebugValue()) { MachineInstr *DebugMI = &MI; MachineOperand &MO = DebugMI->getOperand(0); // Ignore DBG_VALUEs that aren't based on virtual registers. These are // mostly constants and frame indices. if (!MO.isReg()) continue; unsigned Reg = MO.getReg(); if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; // See if this virtual register has already been allocated to a physical // register or spilled to a stack slot. LiveRegMap::iterator LRI = findLiveVirtReg(Reg); if (LRI != LiveVirtRegs.end()) setPhysReg(*DebugMI, 0, LRI->PhysReg); else { int SS = StackSlotForVirtReg[Reg]; if (SS != -1) { // Modify DBG_VALUE now that the value is in a spill slot. updateDbgValueForSpill(*DebugMI, SS); LLVM_DEBUG(dbgs() << "Modifying debug info due to spill:" << "\t" << *DebugMI); continue; } // We can't allocate a physreg for a DebugValue, sorry! LLVM_DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE"); MO.setReg(0); } // If Reg hasn't been spilled, put this DBG_VALUE in LiveDbgValueMap so // that future spills of Reg will have DBG_VALUEs. LiveDbgValueMap[Reg].push_back(DebugMI); continue; } if (MI.isDebugLabel()) continue; // If this is a copy, we may be able to coalesce. unsigned CopySrcReg = 0; unsigned CopyDstReg = 0; unsigned CopySrcSub = 0; unsigned CopyDstSub = 0; if (MI.isCopy()) { CopyDstReg = MI.getOperand(0).getReg(); CopySrcReg = MI.getOperand(1).getReg(); CopyDstSub = MI.getOperand(0).getSubReg(); CopySrcSub = MI.getOperand(1).getSubReg(); } // Track registers used by instruction. UsedInInstr.clear(); // First scan. // Mark physreg uses and early clobbers as used. // Find the end of the virtreg operands unsigned VirtOpEnd = 0; bool hasTiedOps = false; bool hasEarlyClobbers = false; bool hasPartialRedefs = false; bool hasPhysDefs = false; for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { MachineOperand &MO = MI.getOperand(i); // Make sure MRI knows about registers clobbered by regmasks. if (MO.isRegMask()) { MRI->addPhysRegsUsedFromRegMask(MO.getRegMask()); continue; } if (!MO.isReg()) continue; unsigned Reg = MO.getReg(); if (!Reg) continue; if (TargetRegisterInfo::isVirtualRegister(Reg)) { VirtOpEnd = i+1; if (MO.isUse()) { hasTiedOps = hasTiedOps || MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1; } else { if (MO.isEarlyClobber()) hasEarlyClobbers = true; if (MO.getSubReg() && MI.readsVirtualRegister(Reg)) hasPartialRedefs = true; } continue; } if (!MRI->isAllocatable(Reg)) continue; if (MO.isUse()) { usePhysReg(MO); } else if (MO.isEarlyClobber()) { definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? regFree : regReserved); hasEarlyClobbers = true; } else hasPhysDefs = true; } // The instruction may have virtual register operands that must be allocated // the same register at use-time and def-time: early clobbers and tied // operands. If there are also physical defs, these registers must avoid // both physical defs and uses, making them more constrained than normal // operands. // Similarly, if there are multiple defs and tied operands, we must make // sure the same register is allocated to uses and defs. // We didn't detect inline asm tied operands above, so just make this extra // pass for all inline asm. if (MI.isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) { handleThroughOperands(MI, VirtDead); // Don't attempt coalescing when we have funny stuff going on. CopyDstReg = 0; // Pretend we have early clobbers so the use operands get marked below. // This is not necessary for the common case of a single tied use. hasEarlyClobbers = true; } // Second scan. // Allocate virtreg uses. for (unsigned I = 0; I != VirtOpEnd; ++I) { const MachineOperand &MO = MI.getOperand(I); if (!MO.isReg()) continue; unsigned Reg = MO.getReg(); if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; if (MO.isUse()) { LiveRegMap::iterator LRI = reloadVirtReg(MI, I, Reg, CopyDstReg); MCPhysReg PhysReg = LRI->PhysReg; CopySrcReg = (CopySrcReg == Reg || CopySrcReg == PhysReg) ? PhysReg : 0; if (setPhysReg(MI, I, PhysReg)) killVirtReg(LRI); } } // Track registers defined by instruction - early clobbers and tied uses at // this point. UsedInInstr.clear(); if (hasEarlyClobbers) { for (const MachineOperand &MO : MI.operands()) { if (!MO.isReg()) continue; unsigned Reg = MO.getReg(); if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; // Look for physreg defs and tied uses. if (!MO.isDef() && !MO.isTied()) continue; markRegUsedInInstr(Reg); } } unsigned DefOpEnd = MI.getNumOperands(); if (MI.isCall()) { // Spill all virtregs before a call. This serves one purpose: If an // exception is thrown, the landing pad is going to expect to find // registers in their spill slots. // Note: although this is appealing to just consider all definitions // as call-clobbered, this is not correct because some of those // definitions may be used later on and we do not want to reuse // those for virtual registers in between. LLVM_DEBUG(dbgs() << " Spilling remaining registers before call.\n"); spillAll(MI); } // Third scan. // Allocate defs and collect dead defs. for (unsigned I = 0; I != DefOpEnd; ++I) { const MachineOperand &MO = MI.getOperand(I); if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) continue; unsigned Reg = MO.getReg(); if (TargetRegisterInfo::isPhysicalRegister(Reg)) { if (!MRI->isAllocatable(Reg)) continue; definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved); continue; } LiveRegMap::iterator LRI = defineVirtReg(MI, I, Reg, CopySrcReg); MCPhysReg PhysReg = LRI->PhysReg; if (setPhysReg(MI, I, PhysReg)) { VirtDead.push_back(Reg); CopyDstReg = 0; // cancel coalescing; } else CopyDstReg = (CopyDstReg == Reg || CopyDstReg == PhysReg) ? PhysReg : 0; } // Kill dead defs after the scan to ensure that multiple defs of the same // register are allocated identically. We didn't need to do this for uses // because we are crerating our own kill flags, and they are always at the // last use. for (unsigned VirtReg : VirtDead) killVirtReg(VirtReg); VirtDead.clear(); if (CopyDstReg && CopyDstReg == CopySrcReg && CopyDstSub == CopySrcSub) { LLVM_DEBUG(dbgs() << "-- coalescing: " << MI); Coalesced.push_back(&MI); } else { LLVM_DEBUG(dbgs() << "<< " << MI); } } // Spill all physical registers holding virtual registers now. LLVM_DEBUG(dbgs() << "Spilling live registers at end of block.\n"); spillAll(MBB.getFirstTerminator()); // Erase all the coalesced copies. We are delaying it until now because // LiveVirtRegs might refer to the instrs. for (MachineInstr *MI : Coalesced) MBB.erase(MI); NumCopies += Coalesced.size(); LLVM_DEBUG(MBB.dump()); } /// Allocates registers for a function. bool RegAllocFast::runOnMachineFunction(MachineFunction &MF) { LLVM_DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" << "********** Function: " << MF.getName() << '\n'); MRI = &MF.getRegInfo(); const TargetSubtargetInfo &STI = MF.getSubtarget(); TRI = STI.getRegisterInfo(); TII = STI.getInstrInfo(); MFI = &MF.getFrameInfo(); MRI->freezeReservedRegs(MF); RegClassInfo.runOnMachineFunction(MF); UsedInInstr.clear(); UsedInInstr.setUniverse(TRI->getNumRegUnits()); // initialize the virtual->physical register map to have a 'null' // mapping for all virtual registers unsigned NumVirtRegs = MRI->getNumVirtRegs(); StackSlotForVirtReg.resize(NumVirtRegs); LiveVirtRegs.setUniverse(NumVirtRegs); // Loop over all of the basic blocks, eliminating virtual register references for (MachineBasicBlock &MBB : MF) allocateBasicBlock(MBB); // All machine operands and other references to virtual registers have been // replaced. Remove the virtual registers. MRI->clearVirtRegs(); StackSlotForVirtReg.clear(); LiveDbgValueMap.clear(); return true; } FunctionPass *llvm::createFastRegisterAllocator() { return new RegAllocFast(); }<|fim▁end|>
<|file_name|>hex.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Hex binary-to-text encoding pub use self::FromHexError::*; use std::fmt; use std::error; /// A trait for converting a value to hexadecimal encoding pub trait ToHex { /// Converts the value of `self` to a hex value, returning the owned /// string. fn to_hex(&self) -> String; } const CHARS: &[u8] = b"0123456789abcdef"; impl ToHex for [u8] { /// Turn a vector of `u8` bytes into a hexadecimal string. /// /// # Examples /// /// ``` /// #![feature(rustc_private)] /// /// extern crate serialize; /// use serialize::hex::ToHex; /// /// fn main () { /// let str = [52,32].to_hex(); /// println!("{}", str); /// } /// ``` fn to_hex(&self) -> String { let mut v = Vec::with_capacity(self.len() * 2); for &byte in self { v.push(CHARS[(byte >> 4) as usize]); v.push(CHARS[(byte & 0xf) as usize]); } unsafe { String::from_utf8_unchecked(v) } } } /// A trait for converting hexadecimal encoded values pub trait FromHex { /// Converts the value of `self`, interpreted as hexadecimal encoded data, /// into an owned vector of bytes, returning the vector. fn from_hex(&self) -> Result<Vec<u8>, FromHexError>; } /// Errors that can occur when decoding a hex encoded string #[derive(Copy, Clone, Debug)] pub enum FromHexError { /// The input contained a character not part of the hex format<|fim▁hole|> /// The input had an invalid length InvalidHexLength, } impl fmt::Display for FromHexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { InvalidHexCharacter(ch, idx) => write!(f, "Invalid character '{}' at position {}", ch, idx), InvalidHexLength => write!(f, "Invalid input length"), } } } impl error::Error for FromHexError { fn description(&self) -> &str { match *self { InvalidHexCharacter(..) => "invalid character", InvalidHexLength => "invalid length", } } } impl FromHex for str { /// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`) /// to the byte values it encodes. /// /// You can use the `String::from_utf8` function to turn a /// `Vec<u8>` into a string with characters corresponding to those values. /// /// # Examples /// /// This converts a string literal to hexadecimal and back. /// /// ``` /// #![feature(rustc_private)] /// /// extern crate serialize; /// use serialize::hex::{FromHex, ToHex}; /// /// fn main () { /// let hello_str = "Hello, World".as_bytes().to_hex(); /// println!("{}", hello_str); /// let bytes = hello_str.from_hex().unwrap(); /// println!("{:?}", bytes); /// let result_str = String::from_utf8(bytes).unwrap(); /// println!("{}", result_str); /// } /// ``` fn from_hex(&self) -> Result<Vec<u8>, FromHexError> { // This may be an overestimate if there is any whitespace let mut b = Vec::with_capacity(self.len() / 2); let mut modulus = 0; let mut buf = 0; for (idx, byte) in self.bytes().enumerate() { buf <<= 4; match byte { b'A'..=b'F' => buf |= byte - b'A' + 10, b'a'..=b'f' => buf |= byte - b'a' + 10, b'0'..=b'9' => buf |= byte - b'0', b' '|b'\r'|b'\n'|b'\t' => { buf >>= 4; continue } _ => { let ch = self[idx..].chars().next().unwrap(); return Err(InvalidHexCharacter(ch, idx)) } } modulus += 1; if modulus == 2 { modulus = 0; b.push(buf); } } match modulus { 0 => Ok(b), _ => Err(InvalidHexLength), } } } #[cfg(test)] mod tests { extern crate test; use self::test::Bencher; use hex::{FromHex, ToHex}; #[test] pub fn test_to_hex() { assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172"); } #[test] pub fn test_from_hex_okay() { assert_eq!("666f6f626172".from_hex().unwrap(), b"foobar"); assert_eq!("666F6F626172".from_hex().unwrap(), b"foobar"); } #[test] pub fn test_from_hex_odd_len() { assert!("666".from_hex().is_err()); assert!("66 6".from_hex().is_err()); } #[test] pub fn test_from_hex_invalid_char() { assert!("66y6".from_hex().is_err()); } #[test] pub fn test_from_hex_ignores_whitespace() { assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(), b"foobar"); } #[test] pub fn test_to_hex_all_bytes() { for i in 0..256 { assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize)); } } #[test] pub fn test_from_hex_all_bytes() { for i in 0..256 { let ii: &[u8] = &[i as u8]; assert_eq!(format!("{:02x}", i as usize).from_hex() .unwrap(), ii); assert_eq!(format!("{:02X}", i as usize).from_hex() .unwrap(), ii); } } #[bench] pub fn bench_to_hex(b: &mut Bencher) { let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; b.iter(|| { s.as_bytes().to_hex(); }); b.bytes = s.len() as u64; } #[bench] pub fn bench_from_hex(b: &mut Bencher) { let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; let sb = s.as_bytes().to_hex(); b.iter(|| { sb.from_hex().unwrap(); }); b.bytes = sb.len() as u64; } }<|fim▁end|>
InvalidHexCharacter(char, usize),
<|file_name|>PageLoader.java<|end_file_name|><|fim▁begin|>/* * SPDX-License-Identifier: GPL-3.0 * * * (J)ava (M)iscellaneous (U)tilities (L)ibrary * * JMUL is a central repository for utilities which are used in my * other public and private repositories. *<|fim▁hole|> * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * e-mail: [email protected] */ /* * This section contains meta informations. * * $Id$ */ package jmul.web.page; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import jmul.io.NestedStreams; import jmul.io.NestedStreamsImpl; import jmul.misc.exceptions.MultipleCausesException; import static jmul.string.Constants.FILE_SEPARATOR; import static jmul.string.Constants.SLASH; /** * This class represents an entity which loads web content from the file * system. * * @author Kristian Kutin */ public class PageLoader { /** * The base directory of the web content. */ private final File baseDirectory; /** * The file with the page content. */ private final File file; /** * Creates a new instance of a content loader. * * @param aBaseDirectory * a base directory * @param aFile * a file (i.e. file path) */ public PageLoader(File aBaseDirectory, File aFile) { baseDirectory = aBaseDirectory; file = aFile; } /** * Loads the web content. * * @return web content */ public PublishedPage loadContent() { String path = getPath(); NestedStreams nestedStreams = null; try { nestedStreams = openStreams(file); } catch (FileNotFoundException e) { String message = "Unable to load the web content (\"" + file + "\")!"; throw new PageLoaderException(message, e); } byte[] content = null; try { content = loadContent(nestedStreams); } catch (IOException e) { Throwable followupError = null; try { nestedStreams.close(); } catch (IOException f) { followupError = f; } String message = "Error while reading from file (\"" + file + "\")!"; if (followupError != null) { throw new PageLoaderException(message, new MultipleCausesException(e, followupError)); } else { throw new PageLoaderException(message, followupError); } } return new PublishedPage(path, content); } /** * Determines the web path for this file relative to the base directory. * * @param aBaseDirectory * @param aFile * * @return a path * * @throws IOException * is thrown if the specified directory or file cannot be resolved to * absolute paths */ private static String determinePath(File aBaseDirectory, File aFile) throws IOException { String directory = aBaseDirectory.getCanonicalPath(); String fileName = aFile.getCanonicalPath(); String path = fileName.replace(directory, ""); path = path.replace(FILE_SEPARATOR, SLASH); return path; } /** * Opens a stream to read from the specified file. * * @param aFile * * @return an input stream * * @throws FileNotFoundException * is thrown if the specified file doesn't exist */ private static NestedStreams openStreams(File aFile) throws FileNotFoundException { InputStream reader = new FileInputStream(aFile); return new NestedStreamsImpl(reader); } /** * Tries to load the web content from the specified file. * * @param someNestedStreams * * @return some web content * * @throws IOException * is thrown if an error occurred while reading from the file */ private static byte[] loadContent(NestedStreams someNestedStreams) throws IOException { InputStream reader = (InputStream) someNestedStreams.getOuterStream(); List<Byte> buffer = new ArrayList<>(); while (true) { int next = reader.read(); if (next == -1) { break; } buffer.add((byte) next); } int size = buffer.size(); byte[] bytes = new byte[size]; for (int a = 0; a < size; a++) { Byte b = buffer.get(a); bytes[a] = b; } return bytes; } /** * Returns the path of the web page. * * @return a path */ public String getPath() { String path = null; try { path = determinePath(baseDirectory, file); } catch (IOException e) { String message = "Unable to resolve paths (\"" + baseDirectory + "\" & \"" + file + "\")!"; throw new PageLoaderException(message, e); } return path; } }<|fim▁end|>
* Copyright (C) 2016 Kristian Kutin * * This program is free software: you can redistribute it and/or modify
<|file_name|>cms_app.py<|end_file_name|><|fim▁begin|>from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool from django.utils.translation import ugettext_lazy as _ class ProductApphook(CMSApp): name = _("Product Apphook") urls = ["wlansi_store.urls"] <|fim▁hole|>apphook_pool.register(ProductApphook)<|fim▁end|>
<|file_name|>pull_request.go<|end_file_name|><|fim▁begin|>package gitbot import "fmt" // PullRequest object from github API type PullRequest struct { Head *Head `json:"head"` Base *Head `json:"base"` Links *Links `json:"_links"` User *User `json:"user"` MergedBy *User `json:"merged_by"` URL string `json:"url"` HTMLURL string `json:"html_url"` DiffURL string `json:"diff_url"` PatchURL string `json:"patch_url"` IssueURL string `json:"issue_url"` CommitsURL string `json:"commits_url"` ReviewCommentsURL string `json:"review_comments_url"` ReviewCommentURL string `json:"review_comment_url"` CommentsURL string `json:"comments_url"` StatusesURL string `json:"statuses_url"` Number int `json:"number"` State string `json:"state"` Title string `json:"title"` Body string `json:"body"` //CreatedAt NullTime `json:"created_at"` //UpdatedAt NullTime `json:"updated_at"` //ClosedAt NullTime `json:"closed_at"` //MergedAt NullTime `json:"merged_at"` MergeCommitSha string `json:"merge_commit_sha"` Merged bool `json:"merged"` Mergeable bool `json:"mergeable"`<|fim▁hole|> ChangedFiles int `json:"changed_files"` } func (s PullRequest) String() string { return fmt.Sprintf("#%v %s (%s)", s.Number, s.Title, s.HTMLURL) }<|fim▁end|>
Comments int `json:"comments"` Commits int `json:"commits"` Additions int `json:"additions"` Deletions int `json:"deletions"`
<|file_name|>validator.js<|end_file_name|><|fim▁begin|>/*! * Module dependencies. */ 'use strict'; const MongooseError = require('./'); /** * Schema validator error * * @param {Object} properties * @inherits MongooseError * @api private */ function ValidatorError(properties) {<|fim▁hole|> } const message = this.formatMessage(msg, properties); MongooseError.call(this, message); properties = Object.assign({}, properties, { message: message }); this.name = 'ValidatorError'; if (Error.captureStackTrace) { Error.captureStackTrace(this); } else { this.stack = new Error().stack; } this.properties = properties; this.kind = properties.type; this.path = properties.path; this.value = properties.value; this.reason = properties.reason; } /*! * Inherits from MongooseError */ ValidatorError.prototype = Object.create(MongooseError.prototype); ValidatorError.prototype.constructor = MongooseError; /*! * The object used to define this validator. Not enumerable to hide * it from `require('util').inspect()` output re: gh-3925 */ Object.defineProperty(ValidatorError.prototype, 'properties', { enumerable: false, writable: true, value: null }); /*! * Formats error messages */ ValidatorError.prototype.formatMessage = function(msg, properties) { if (typeof msg === 'function') { return msg(properties); } const propertyNames = Object.keys(properties); for (let i = 0; i < propertyNames.length; ++i) { const propertyName = propertyNames[i]; if (propertyName === 'message') { continue; } msg = msg.replace('{' + propertyName.toUpperCase() + '}', properties[propertyName]); } return msg; }; /*! * toString helper */ ValidatorError.prototype.toString = function() { return this.message; }; /*! * exports */ module.exports = ValidatorError;<|fim▁end|>
let msg = properties.message; if (!msg) { msg = MongooseError.messages.general.default;
<|file_name|>grid.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
Simpla CMS 2.3.8 = 0f9e3c6a3ac53725fa229f3620b7ca52
<|file_name|>_helpers.py<|end_file_name|><|fim▁begin|># Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Common logging helpers.""" import logging import requests from google.cloud.logging.entries import LogEntry from google.cloud.logging.entries import ProtobufEntry from google.cloud.logging.entries import StructEntry from google.cloud.logging.entries import TextEntry try: from google.cloud.logging_v2.gapic.enums import LogSeverity except ImportError: # pragma: NO COVER class LogSeverity(object): """Map severities for non-GAPIC usage.""" DEFAULT = 0 DEBUG = 100 INFO = 200<|fim▁hole|> ALERT = 700 EMERGENCY = 800 _NORMALIZED_SEVERITIES = { logging.CRITICAL: LogSeverity.CRITICAL, logging.ERROR: LogSeverity.ERROR, logging.WARNING: LogSeverity.WARNING, logging.INFO: LogSeverity.INFO, logging.DEBUG: LogSeverity.DEBUG, logging.NOTSET: LogSeverity.DEFAULT, } METADATA_URL = "http://metadata.google.internal./computeMetadata/v1/" METADATA_HEADERS = {"Metadata-Flavor": "Google"} def entry_from_resource(resource, client, loggers): """Detect correct entry type from resource and instantiate. :type resource: dict :param resource: One entry resource from API response. :type client: :class:`~google.cloud.logging.client.Client` :param client: Client that owns the log entry. :type loggers: dict :param loggers: A mapping of logger fullnames -> loggers. If the logger that owns the entry is not in ``loggers``, the entry will have a newly-created logger. :rtype: :class:`~google.cloud.logging.entries._BaseEntry` :returns: The entry instance, constructed via the resource """ if "textPayload" in resource: return TextEntry.from_api_repr(resource, client, loggers) if "jsonPayload" in resource: return StructEntry.from_api_repr(resource, client, loggers) if "protoPayload" in resource: return ProtobufEntry.from_api_repr(resource, client, loggers) return LogEntry.from_api_repr(resource, client, loggers) def retrieve_metadata_server(metadata_key): """Retrieve the metadata key in the metadata server. See: https://cloud.google.com/compute/docs/storing-retrieving-metadata :type metadata_key: str :param metadata_key: Key of the metadata which will form the url. You can also supply query parameters after the metadata key. e.g. "tags?alt=json" :rtype: str :returns: The value of the metadata key returned by the metadata server. """ url = METADATA_URL + metadata_key try: response = requests.get(url, headers=METADATA_HEADERS) if response.status_code == requests.codes.ok: return response.text except requests.exceptions.RequestException: # Ignore the exception, connection failed means the attribute does not # exist in the metadata server. pass return None def _normalize_severity(stdlib_level): """Normalize a Python stdlib severity to LogSeverity enum. :type stdlib_level: int :param stdlib_level: 'levelno' from a :class:`logging.LogRecord` :rtype: int :returns: Corresponding Stackdriver severity. """ return _NORMALIZED_SEVERITIES.get(stdlib_level, stdlib_level)<|fim▁end|>
NOTICE = 300 WARNING = 400 ERROR = 500 CRITICAL = 600
<|file_name|>CookieNotice.tsx<|end_file_name|><|fim▁begin|>import * as React from "react" import { useEffect, useState } from "react" import classnames from "classnames" import { FontAwesomeIcon } from "@fortawesome/react-fontawesome" import { faCheck } from "@fortawesome/free-solid-svg-icons/faCheck"<|fim▁hole|> accepted, outdated, dispatch, }: { accepted: boolean outdated: boolean dispatch: any }) => { const [mounted, setMounted] = useState(false) useEffect(() => { setTimeout(() => { setMounted(true) }, 200) }, []) return ( <div className={classnames("cookie-notice", { open: mounted && (!accepted || outdated), })} data-test="cookie-notice" > <div className="wrapper"> <div className="owid-row"> <div className="owid-col owid-col--lg-1 explanation"> <p> We use cookies to give you the best experience on our website. By continuing without changing your cookie settings, we assume you agree to this. </p> </div> <div className="owid-col owid-col--lg-0 actions"> <a href="/privacy-policy" className="button"> Manage preferences </a> <button className="button accept" onClick={() => dispatch({ type: Action.Accept, payload: { date: getTodayDate() }, }) } data-test="accept" data-track-note="cookie-notice" > <span className="icon"> <FontAwesomeIcon icon={faCheck} /> </span> I agree </button> </div> </div> </div> </div> ) }<|fim▁end|>
import { Action, getTodayDate } from "./CookiePreferencesManager" export const CookieNotice = ({
<|file_name|>XML.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>org.json.XML<|fim▁end|>
<|file_name|>app_settings.rs<|end_file_name|><|fim▁begin|>extern crate clap; <|fim▁hole|>#[test] fn sub_command_negate_requred() { App::new("sub_command_negate") .setting(AppSettings::SubcommandsNegateReqs) .arg(Arg::with_name("test") .required(true) .index(1)) .subcommand(SubCommand::with_name("sub1")) .subcommand(SubCommand::with_name("sub1")) .get_matches_from(vec!["", "sub1"]); }<|fim▁end|>
use clap::{App, Arg, SubCommand, AppSettings};
<|file_name|>AdviceWithLambdaTest.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor.interceptor; import org.apache.camel.CamelExecutionException; import org.apache.camel.ContextTestSupport; import org.apache.camel.builder.AdviceWithRouteBuilder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.model.RouteDefinition; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; public class AdviceWithLambdaTest extends ContextTestSupport { @Test public void testNoAdvised() throws Exception { getMockEndpoint("mock:foo").expectedMessageCount(1); getMockEndpoint("mock:result").expectedMessageCount(1); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); } @Test public void testAdvised() throws Exception { AdviceWithRouteBuilder.adviceWith(context, null, a -> { a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised"); }); getMockEndpoint("mock:foo").expectedMessageCount(0); getMockEndpoint("mock:advised").expectedMessageCount(1); getMockEndpoint("mock:result").expectedMessageCount(1); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); } // END SNIPPET: e1 @Test public void testAdvisedNoLog() throws Exception { AdviceWithRouteBuilder.adviceWith(context, null, false, a -> { a.weaveByToUri("mock:result").remove(); a.weaveAddLast().transform().constant("Bye World"); }); getMockEndpoint("mock:foo").expectedMessageCount(1); getMockEndpoint("mock:result").expectedMessageCount(0); Object out = template.requestBody("direct:start", "Hello World"); assertEquals("Bye World", out); assertMockEndpointsSatisfied(); } @Test public void testAdvisedNoNewRoutesAllowed() throws Exception { try { AdviceWithRouteBuilder.adviceWith(context, 0, a -> {<|fim▁hole|> }); fail("Should have thrown exception"); } catch (IllegalArgumentException e) { // expected } } @Test public void testAdvisedThrowException() throws Exception { AdviceWithRouteBuilder.adviceWith(context, "myRoute", a -> { a.interceptSendToEndpoint("mock:foo").to("mock:advised").throwException(new IllegalArgumentException("Damn")); }); getMockEndpoint("mock:foo").expectedMessageCount(0); getMockEndpoint("mock:advised").expectedMessageCount(1); getMockEndpoint("mock:result").expectedMessageCount(0); try { template.sendBody("direct:start", "Hello World"); fail("Should have thrown exception"); } catch (CamelExecutionException e) { assertIsInstanceOf(IllegalArgumentException.class, e.getCause()); assertEquals("Damn", e.getCause().getMessage()); } assertMockEndpointsSatisfied(); } @Test public void testAdvisedRouteDefinition() throws Exception { AdviceWithRouteBuilder.adviceWith(context, context.getRouteDefinitions().get(0), a -> { a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised"); }); getMockEndpoint("mock:foo").expectedMessageCount(0); getMockEndpoint("mock:advised").expectedMessageCount(1); getMockEndpoint("mock:result").expectedMessageCount(1); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); } @Test public void testAdvisedEmptyRouteDefinition() throws Exception { try { AdviceWithRouteBuilder.adviceWith(context, new RouteDefinition(), a -> { a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised"); }); fail("Should throw exception"); } catch (IllegalArgumentException e) { // expected } } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start").id("myRoute").to("mock:foo").to("mock:result"); } }; } }<|fim▁end|>
a.from("direct:bar").to("mock:bar"); a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! A macro that maps unicode names to chars and strings. #![crate_type="dylib"] #![feature(quote, plugin_registrar, plugin, rustc_private)] #![plugin(regex_macros)] <|fim▁hole|> extern crate regex; extern crate unicode_names; use syntax::ast; use syntax::codemap; use syntax::parse::token; use syntax::ext::base::{self, ExtCtxt, MacResult, MacEager, DummyResult}; use syntax::ext::build::AstBuilder; use rustc::plugin::Registry; #[plugin_registrar] #[doc(hidden)] pub fn plugin_registrar(registrar: &mut Registry) { registrar.register_macro("named_char", named_char); registrar.register_macro("named", named); } fn named_char(cx: &mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree]) -> Box<MacResult+'static> { match base::get_single_str_from_tts(cx, sp, tts, "named_char") { None => {} Some(name) => match unicode_names::character(&name) { None => cx.span_err(sp, &format!("`{}` does not name a character", name)), // everything worked! Some(c) => return MacEager::expr(cx.expr_lit(sp, ast::LitChar(c))), } } // failed :( DummyResult::expr(sp) } fn named(cx: &mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree]) -> Box<MacResult+'static> { let string = match base::get_single_str_from_tts(cx, sp, tts, "named") { None => return DummyResult::expr(sp), Some(s) => s }; // make sure unclosed braces don't escape. static NAMES: regex::Regex = regex!(r"\\N\{(.*?)(?:\}|$)"); let new = NAMES.replace_all(&string, |c: &regex::Captures| { let full = c.at(0).unwrap(); if !full.ends_with("}") { cx.span_err(sp, &format!("unclosed escape in `named!`: {}", full)); } else { let name = c.at(1).unwrap(); match unicode_names::character(name) { Some(c) => return c.to_string(), None => { cx.span_err(sp, &format!("`{}` does not name a character", name)); } } } // failed :( String::new() }); MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&new))) }<|fim▁end|>
extern crate syntax; extern crate rustc;
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*- """ desispec.pipeline ==================== Tools for pipeline creation and running. """ from __future__ import absolute_import, division, print_function from . import tasks from .defs import (task_states, prod_options_name, task_state_to_int, task_int_to_state) from .db import (all_task_types, DataBaseSqlite, DataBasePostgres, check_tasks, load_db) from .prod import (update_prod, load_prod) from .run import (run_task, run_task_simple, run_task_list, run_task_list_db, dry_run) from .scriptgen import (batch_shell, batch_nersc)<|fim▁end|>
# # See top-level LICENSE.rst file for Copyright information #
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from datetime import datetime from django.db import models from django.utils.translation import ugettext_lazy as _ class TestModel(models.Model): text = models.CharField(max_length=10, default=_('Anything')) class Company(models.Model): name = models.CharField(max_length=50)<|fim▁hole|> cents_payed = models.DecimalField(max_digits=4, decimal_places=2) products_delivered = models.IntegerField() __test__ = {'API_TESTS': ''' >>> tm = TestModel() >>> tm.save() ''' }<|fim▁end|>
date_added = models.DateTimeField(default=datetime(1799,1,31,23,59,59,0))
<|file_name|>backend.py<|end_file_name|><|fim▁begin|># # Copyright (c) 2008--2016 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. # # # Generic DB backend # import copy import string import sys from spacewalk.common.usix import raise_with_tb from spacewalk.common import rhn_rpm from spacewalk.common.rhnConfig import CFG from spacewalk.common.rhnException import rhnFault from spacewalk.server import rhnSQL, rhnChannel, taskomatic from importLib import Diff, Package, IncompletePackage, Erratum, \ AlreadyUploadedError, InvalidPackageError, TransactionError, \ InvalidSeverityError, SourcePackage from backendLib import TableCollection, sanitizeValue, TableDelete, \ TableUpdate, TableLookup, addHash, TableInsert sequences = { 'rhnPackageCapability': 'rhn_pkg_capability_id_seq', 'rhnPackage': 'rhn_package_id_seq', 'rhnSourceRPM': 'rhn_sourcerpm_id_seq', 'rhnPackageGroup': 'rhn_package_group_id_seq', 'rhnErrata': 'rhn_errata_id_seq', 'rhnChannel': 'rhn_channel_id_seq', 'rhnChannelProduct': 'rhn_channelprod_id_seq', 'rhnPackageSource': 'rhn_package_source_id_seq', 'rhnChannelFamily': 'rhn_channel_family_id_seq', 'rhnCVE': 'rhn_cve_id_seq', 'rhnChannelArch': 'rhn_channel_arch_id_seq', 'rhnPackageArch': 'rhn_package_arch_id_seq', 'rhnServerArch': 'rhn_server_arch_id_seq', 'rhnCPUArch': 'rhn_cpu_arch_id_seq', 'rhnErrataFile': 'rhn_erratafile_id_seq', 'rhnKickstartableTree': 'rhn_kstree_id_seq', 'rhnArchType': 'rhn_archtype_id_seq', 'rhnPackageChangeLogRec': 'rhn_pkg_cl_id_seq', 'rhnPackageChangeLogData': 'rhn_pkg_cld_id_seq', 'rhnContentSource': 'rhn_chan_content_src_id_seq', } class Backend: # This object is initialized by the specific subclasses (e.g. # OracleBackend) tables = TableCollection() # TODO: Some reason why we're passing a module in here? Seems to # always be rhnSQL anyhow... def __init__(self, dbmodule): self.dbmodule = dbmodule self.sequences = {} # TODO: Why is there a pseudo-constructor here instead of just using # __init__? def init(self): # Initializes the database connection objects # This function has to be called on a newly defined Backend object # Initialize sequences for k, v in sequences.items(): self.sequences[k] = self.dbmodule.Sequence(v) # TODO: Why do we return a reference to ourselves? If somebody called # this method they already have a reference... return self def setDateFormat(self, format): sth = self.dbmodule.prepare("alter session set nls_date_format ='%s'" % format) sth.execute() sth = self.dbmodule.prepare("alter session set nls_timestamp_format ='%s'" % format) sth.execute() # Note: postgres-specific implementation overrides this in PostgresBackend def processCapabilities(self, capabilityHash): h = self.dbmodule.prepare("select lookup_package_capability(:name, :version) as id from dual") for name, version in capabilityHash.keys(): ver = version if version is None or version == '': ver = None h.execute(name=name, version=ver) row = h.fetchone_dict() capabilityHash[(name, version)] = row['id'] def processChangeLog(self, changelogHash): sql = "select id from rhnPackageChangeLogData where name = :name and time = :time and text = :text" h = self.dbmodule.prepare(sql) toinsert = [[], [], [], []] for name, time, text in changelogHash.keys(): val = {} _buildExternalValue(val, {'name': name, 'time': time, 'text': text}, self.tables['rhnPackageChangeLogData']) h.execute(name=val['name'], time=val['time'], text=val['text']) row = h.fetchone_dict() if row: changelogHash[(name, time, text)] = row['id'] continue id = self.sequences['rhnPackageChangeLogData'].next() changelogHash[(name, time, text)] = id toinsert[0].append(id) toinsert[1].append(val['name']) toinsert[2].append(val['time']) toinsert[3].append(val['text']) if not toinsert[0]: # Nothing to do return sql = "insert into rhnPackageChangeLogData (id, name, time, text) values (:id, :name, :time, :text)" h = self.dbmodule.prepare(sql) h.executemany(id=toinsert[0], name=toinsert[1], time=toinsert[2], text=toinsert[3]) def processCVEs(self, cveHash): # First figure out which CVE's are already inserted sql = "select id from rhnCVE where name = :name" h = self.dbmodule.prepare(sql) toinsert = [[], []] for cve_name in cveHash.keys(): h.execute(name=cve_name) row = h.fetchone_dict() if row: cveHash[cve_name] = row['id'] continue # Generate an id id = self.sequences['rhnCVE'].next() cveHash[cve_name] = id toinsert[0].append(id) toinsert[1].append(cve_name) if not toinsert[0]: # Nothing to do return sql = "insert into rhnCVE (id, name) values (:id, :name)" h = self.dbmodule.prepare(sql) h.executemany(id=toinsert[0], name=toinsert[1]) def lookupErrataFileTypes(self, hash): hash.clear() h = self.dbmodule.prepare("select id, label from rhnErrataFileType") h.execute() while 1: row = h.fetchone_dict() if not row: break hash[row['label']] = row['id'] return hash def __lookupArches(self, archHash, table): if not archHash: return sql = "select id from %s where label = :name" % table h = self.dbmodule.prepare(sql) for k in archHash.keys(): h.execute(name=str(k)) row = h.fetchone_dict() if row: archHash[k] = row['id'] # Else, it's an unsupported architecture def lookupChannelArches(self, archHash): return self.__lookupArches(archHash, 'rhnChannelArch') def lookupPackageArches(self, archHash): return self.__lookupArches(archHash, 'rhnPackageArch') def lookupServerArches(self, archHash): return self.__lookupArches(archHash, 'rhnServerArch') def lookupArchTypes(self, arch_types_hash): h = self.dbmodule.prepare( "select id, name from rhnArchType where label = :label") seq = self.sequences['rhnArchType'] updates = [[], []] inserts = [[], [], []] results = {} for label, name in arch_types_hash.items(): h.execute(label=label) row = h.fetchone_dict() if not row: next_id = seq.next() inserts[0].append(next_id) inserts[1].append(label) inserts[2].append(name) results[label] = next_id continue aid = row['id'] results[label] = aid if name == row['name']: # Nothing to do continue updates[0].append(aid) updates[1].append(name) if inserts[0]: h = self.dbmodule.prepare(""" insert into rhnArchType (id, label, name) values (:id, :label, :name) """) h.executemany(id=inserts[0], label=inserts[1], name=inserts[2]) if updates[0]: h = self.dbmodule.prepare(""" update rhnArchType set name = :name where id = :id """) h.executemany(id=updates[0], name=updates[1]) # Finally, update the hash arch_types_hash.update(results) def _lookupOrg(self): # Returns the org id sql = "select min(id) as id from web_customer" h = self.dbmodule.prepare(sql) h.execute() rows = h.fetchall_dict() if not rows: raise ValueError("No user is created") return rows[0]['id'] def lookupOrg(self, org_name=None): if not org_name: return self._lookupOrg() # Returns id of the org if found, None otherwise sql = "select id from web_customer where name = :name" h = self.dbmodule.prepare(sql) h.execute(name=org_name) row = h.fetchone_dict() if not row: return None return row['id'] def lookupMaster(self, master_label): # Returns the master record (if it exists) sql = "select * from rhnISSMaster where label = :label" h = self.dbmodule.prepare(sql) h.execute(label=master_label) return h.fetchone_dict() def createMaster(self, master_label): # Creates a master record with label master_label sql = """ insert into rhnISSMaster (id, label) values (sequence_nextval('rhn_issmaster_seq'), :label) """ h = self.dbmodule.prepare(sql) h.execute(label=master_label) def createMasterOrgs(self, master, orgs): # Create master org records insert = [[], [], []] for org in orgs: insert[0].append(master) insert[1].append(org['id']) insert[2].append(org['name']) sql = """ insert into rhnISSMasterOrgs (id, master_id, master_org_id, master_org_name) values (sequence_nextval('rhn_issmasterorgs_seq'), (select id from rhnISSMaster where label = :label), :id, :name) """ h = self.dbmodule.prepare(sql) h.executemany(label=insert[0], id=insert[1], name=insert[2]) def createOrgs(self, orgs): # Create local org records sql = """ insert into web_customer (id, name) values (sequence_nextval('web_customer_id_seq'), :name) """ h = self.dbmodule.prepare(sql) h.executemany(name=orgs) sql = "select id, name from web_customer" h = self.dbmodule.prepare(sql) h.execute() rows = h.fetchall_dict() ret = {} for row in rows: ret[row['name']] = row['id'] return ret def updateMasterOrgs(self, master_orgs): # Update the master org to local org mapping insert = [[], []] for org in master_orgs: insert[0].append(org['master_id']) insert[1].append(org['local_id']) sql = """ update rhnISSMasterOrgs set local_org_id=:local where master_org_id=:master """ h = self.dbmodule.prepare(sql) h.executemany(master=insert[0], local=insert[1]) def lookupOrgTrusts(self): # Return a hash of org trusts sql = "select org_id, org_trust_id from rhnTrustedOrgs" h = self.dbmodule.prepare(sql) h.execute() rows = h.fetchall_dict() ret = {} if rows: for row in rows: if row['org_id'] not in list(ret.keys()): ret[row['org_id']] = [] ret[row['org_id']].append(row['org_trust_id']) return ret def clearOrgTrusts(self, org_id): # Delete all trusts involving this org; trusts are always # bi-directional sql = """ delete from rhnTrustedOrgs where org_id = :org_id or org_trust_id = :org_id """ h = self.dbmodule.prepare(sql) h.execute(org_id=org_id) def createOrgTrusts(self, trusts): # Create org trusts insert = [[], []] for trust in trusts: insert[0].append(trust['org_id']) insert[1].append(trust['trust']) sql = """ insert into rhnTrustedOrgs (org_id, org_trust_id) values (:id, :trust) """ h = self.dbmodule.prepare(sql) h.executemany(id=insert[0], trust=insert[1]) def lookupOrgMap(self, master_label): sql = """ select imo.master_org_id, imo.master_org_name, imo.local_org_id from rhnISSMasterOrgs imo, rhnISSMaster im where im.id = imo.master_id and im.label = :master_label """ h = self.dbmodule.prepare(sql) h.execute(master_label=master_label) rows = h.fetchall_dict() maps = {'master-name-to-master-id': {}, 'master-id-to-local-id': {}} if not rows: return maps mn_to_mi = {} # master org name to master org id map mi_to_li = {} # master org id to local org id map for org in rows: if ('master_org_id' in list(org.keys()) and 'master_org_name' in list(org.keys()) and org['master_org_id'] and org['master_org_name']): mn_to_mi[org['master_org_name']] = org['master_org_id'] if ('master_org_id' in list(org.keys()) and 'local_org_id' in list(org.keys()) and org['master_org_id'] and org['local_org_id']): mi_to_li[org['master_org_id']] = org['local_org_id'] maps['master-name-to-master-id'] = mn_to_mi maps['master-id-to-local-id'] = mi_to_li return maps def lookupChannels(self, hash): if not hash: return sql = "select id, channel_arch_id from rhnChannel where label = :label" h = self.dbmodule.prepare(sql) for k in hash.keys(): h.execute(label=k) row = h.fetchone_dict() if row: hash[k] = row # Else, it's an unsupported channel def lookupChannelPackageArchCompat(self, channelArchHash): # Return all the arches compatible with each key of archHash sql = """ select package_arch_id from rhnChannelPackageArchCompat where channel_arch_id = :channel_arch_id """ h = self.dbmodule.prepare(sql) for channel_arch_id in channelArchHash.keys(): dict = {} h.execute(channel_arch_id=channel_arch_id) while 1: row = h.fetchone_dict() if not row: break dict[row['package_arch_id']] = None channelArchHash[channel_arch_id] = dict def lookupServerGroupTypes(self, entries_hash): sql = """ select id from rhnServerGroupType where label = :label """ h = self.dbmodule.prepare(sql) for sgt in entries_hash.keys(): h.execute(label=sgt) row = h.fetchone_dict() if not row: # server group not found continue entries_hash[sgt] = row['id'] def lookupPackageNames(self, nameHash): if not nameHash: return sql = "select LOOKUP_PACKAGE_NAME(:name) id from dual" h = self.dbmodule.prepare(sql) for k in nameHash.keys(): h.execute(name=k) nameHash[k] = h.fetchone_dict()['id'] def lookupErratum(self, erratum): if not erratum: return None sql = """ select advisory from rhnErrata where advisory_name = :advisory_name """ h = self.dbmodule.prepare(sql) h.execute(advisory_name=erratum['advisory_name']) return h.fetchone_dict() def lookupErrataSeverityId(self, erratum): """ for the given severity type retuns the id associated in the rhnErratSeverity table. """ if not erratum: return None sql = """ select id from rhnErrataSeverity where label = :severity """ h = self.dbmodule.prepare(sql) if erratum['security_impact'] == '': return None # concatenate the severity to reflect the db # bz-204374: rhnErrataSeverity tbl has lower case severity values, # so we convert severity in errata hash to lower case to lookup. severity_label = 'errata.sev.label.' + erratum['security_impact'].lower() h.execute(severity=severity_label) row = h.fetchone_dict() if not row: raise InvalidSeverityError("Invalid severity: %s" % erratum['security_impact']) return row['id'] def lookupEVRs(self, evrHash): sql = "select LOOKUP_EVR(:epoch, :version, :release) id from dual" h = self.dbmodule.prepare(sql) for evr in evrHash.keys(): epoch, version, release = evr if epoch == '' or epoch is None: epoch = None else: epoch = str(epoch) h.execute(epoch=epoch, version=version, release=release) row = h.fetchone_dict() if row: evrHash[evr] = row['id'] # Note: postgres-specific implementation overrides this in PostgresBackend def lookupChecksums(self, checksumHash): if not checksumHash: return sql = "select lookup_checksum(:ctype, :csum) id from dual" h = self.dbmodule.prepare(sql) for k in checksumHash.keys(): ctype, csum = k if csum != '': h.execute(ctype=ctype, csum=csum) row = h.fetchone_dict() if row: checksumHash[k] = row['id'] def lookupChecksumTypes(self, checksumTypeHash): if not checksumTypeHash: return sql = "select id from rhnChecksumType where label = :label" h = self.dbmodule.prepare(sql) for l in checksumTypeHash.keys(): h.execute(label=l) row = h.fetchone_dict() if row: checksumTypeHash[l] = row['id'] def lookupPackageNEVRAs(self, nevraHash): sql = "select LOOKUP_PACKAGE_NEVRA(:name, :evr, :arch) id from dual" h = self.dbmodule.prepare(sql) for nevra in nevraHash: name, evr, arch = nevra if arch is None: arch = '' h.execute(name=name, evr=evr, arch=arch) row = h.fetchone_dict() if row: nevraHash[nevra] = row['id'] def lookupPackagesByNEVRA(self, nevraHash): sql = """ select id from rhnPackage where name_id = :name and evr_id = :evr and package_arch_id = :arch """ h = self.dbmodule.prepare(sql) for nevra in nevraHash: name, evr, arch = nevra h.execute(name=name, evr=evr, arch=arch) row = h.fetchone_dict() if row: nevraHash[nevra] = row['id'] def lookupPackageKeyId(self, header): lookup_keyid_sql = rhnSQL.prepare(""" select pk.id from rhnPackagekey pk, rhnPackageKeyType pkt, rhnPackageProvider pp where pk.key_id = :key_id and pk.key_type_id = pkt.id and pk.provider_id = pp.id """) sigkeys = rhn_rpm.RPM_Header(header).signatures key_id = None # _key_ids(sigkeys)[0] for sig in sigkeys: if sig['signature_type'] == 'gpg': key_id = sig['key_id'] lookup_keyid_sql.execute(key_id=key_id) keyid = lookup_keyid_sql.fetchall_dict() return keyid[0]['id'] def lookupSourceRPMs(self, hash): self.__processHash('lookup_source_name', hash) def lookupPackageGroups(self, hash): self.__processHash('lookup_package_group', hash) def lookupPackages(self, packages, checksums, ignore_missing=0): # If nevra is enabled use checksum as primary key self.validate_pks() for package in packages: if not isinstance(package, IncompletePackage): raise TypeError("Expected an IncompletePackage instance, found %s" % str(type(package))) for package in packages: # here we need to figure out which checksum we have in the database not_found = None for type, chksum in package['checksums'].items(): package['checksum_type'] = type package['checksum'] = chksum package['checksum_id'] = checksums[(type, chksum)] try: self.__lookupObjectCollection([package], 'rhnPackage') not_found = None break except InvalidPackageError: e = sys.exc_info()[1] not_found = (e, sys.exc_info()[2]) if not_found and not ignore_missing: # package is not in database at all raise_with_tb(not_found[0], not_found[1]) def lookupChannelFamilies(self, hash): if not hash: return sql = "select id from rhnChannelFamily where label = :label" h = self.dbmodule.prepare(sql) for k in hash.keys(): h.execute(label=k) row = h.fetchone_dict() if row: hash[k] = row['id'] # Else, it's an unsupported channel def lookup_kstree_types(self, hash): return self._lookup_in_table('rhnKSTreeType', 'rhn_kstree_type_seq', hash) def lookup_ks_install_types(self, hash): return self._lookup_in_table('rhnKSInstallType', 'rhn_ksinstalltype_id_seq', hash) def _lookup_in_table(self, table_name, sequence_name, hash): t = self.dbmodule.Table(table_name, 'label') seq = self.dbmodule.Sequence(sequence_name) to_insert = [] to_update = [] result = {} for label, name in hash.items(): row = t[label] if not row: row_id = seq.next() result[label] = row_id to_insert.append((label, name, row_id)) continue row_id = row['id'] result[label] = row_id if row['name'] != name: to_update.append((label, name)) continue # Entry found in the table - nothing more to do if to_insert: # Have to insert rows row_ids = [] labels = [] names = [] for label, name, row_id in to_insert: row_ids.append(row_id) labels.append(label) names.append(name) sql = """ insert into %s (id, label, name) values (:id, :label, :name) """ h = self.dbmodule.prepare(sql % table_name) h.executemany(id=row_ids, label=labels, name=names) if to_update: labels = [] names = [] for label, name in to_update: labels.append(label) names.append(name) sql = """ update %s set name = :name where label = :label """ h = self.dbmodule.prepare(sql % table_name) h.executemany(label=labels, name=names) # Update the returning value hash.clear() hash.update(result) return hash def processChannelArches(self, arches): self.__processObjectCollection(arches, 'rhnChannelArch', uploadForce=4, ignoreUploaded=1, severityLimit=4) def processPackageArches(self, arches): self.__processObjectCollection(arches, 'rhnPackageArch', uploadForce=4, ignoreUploaded=1, severityLimit=4) def processServerArches(self, arches): self.__processObjectCollection(arches, 'rhnServerArch', uploadForce=4, ignoreUploaded=1, severityLimit=4) def processCPUArches(self, arches): self.__processObjectCollection(arches, 'rhnCPUArch', uploadForce=4, ignoreUploaded=1, severityLimit=4) def processMasterOrgs(self, orgs): self.__processObjectCollection(orgs, 'rhnISSMasterOrgs', uploadForce=4, ignoreUploaded=1, severityLimit=4) def processOrgs(self, orgs): self.__processObjectCollection(orgs, 'web_customer', uploadForce=4, ignoreUploaded=1, severityLimit=4) def processServerPackageArchCompatMap(self, entries): self.__populateTable('rhnServerPackageArchCompat', entries, delete_extra=1) def processServerChannelArchCompatMap(self, entries): self.__populateTable('rhnServerChannelArchCompat', entries, delete_extra=1) def processChannelPackageArchCompatMap(self, entries): self.__populateTable('rhnChannelPackageArchCompat', entries, delete_extra=1) def processServerGroupServerArchCompatMap(self, entries): self.__populateTable('rhnServerServerGroupArchCompat', entries, delete_extra=1) def processPackages(self, packages, uploadForce=0, ignoreUploaded=0, forceVerify=0, transactional=0): # Insert/update the packages self.validate_pks() childTables = { 'rhnPackageProvides': 'package_id', 'rhnPackageRequires': 'package_id', 'rhnPackageConflicts': 'package_id', 'rhnPackageObsoletes': 'package_id', 'rhnPackageRecommends': 'package_id', 'rhnPackageSuggests': 'package_id', 'rhnPackageSupplements': 'package_id', 'rhnPackageEnhances': 'package_id', 'rhnPackageBreaks': 'package_id', 'rhnPackagePredepends': 'package_id', 'rhnPackageFile': 'package_id', 'rhnPackageChangeLogRec': 'package_id', } for package in packages: if not isinstance(package, Package): raise TypeError("Expected a Package instance") tableList = copy.deepcopy(childTables) # older sat packages wont have these fields # avoid Null insertions if package['header_start'] is None: package['header_start'] = -1 package['header_end'] = -1 self.__processObjectCollection__([package, ], 'rhnPackage', tableList, uploadForce=uploadForce, forceVerify=forceVerify, ignoreUploaded=ignoreUploaded, severityLimit=1, transactional=transactional) def processErrata(self, errata): # Insert/update the packages childTables = [ 'rhnChannelErrata', 'rhnErrataBugList', 'rhnErrataFile', 'rhnErrataKeyword', 'rhnErrataPackage', 'rhnErrataCVE', ] for erratum in errata: if not isinstance(erratum, Erratum): raise TypeError("Expected an Erratum instance") return self.__processObjectCollection(errata, 'rhnErrata', childTables, 'errata_id', uploadForce=4, ignoreUploaded=1, forceVerify=1, transactional=1) def update_channels_affected_by_errata(self, dml): # identify errata that were affected affected_errata_ids = {} for op_type in ['insert', 'update', 'delete']: op_values = getattr(dml, op_type) for table_name, values_hash in op_values.items(): if table_name == 'rhnErrata': field = 'id' elif 'errata_id' in values_hash: field = 'errata_id' # Now we know in which field to look for changes for erratum_id in values_hash[field]: affected_errata_ids[erratum_id] = None # Get affected channels affected_channel_ids = {} h = self.dbmodule.prepare(""" select channel_id from rhnChannelErrata where errata_id = :errata_id """) for errata_id in affected_errata_ids.keys(): h.execute(errata_id=errata_id) channel_ids = h.fetchall_dict() or [] channel_ids = [x['channel_id'] for x in channel_ids] for channel_id in channel_ids: affected_channel_ids[channel_id] = errata_id # Now update the channels update_channel = self.dbmodule.Procedure('rhn_channel.update_channel') invalidate_ss = 0 for channel_id in affected_channel_ids.keys(): update_channel(channel_id, invalidate_ss) h = self.dbmodule.prepare(""" select advisory from rhnErrata where id = :errata_id """) h.execute(errata_id=affected_channel_ids[channel_id]) advisory = h.fetchone()[0] channel = rhnChannel.Channel() channel.load_by_id(channel_id) taskomatic.add_to_repodata_queue(channel.get_label(), "errata", advisory) def processKickstartTrees(self, ks_trees): childTables = [ 'rhnKSTreeFile', #'rhnKSTreeType', #'rhnKSInstallType', ] self.__processObjectCollection(ks_trees, 'rhnKickstartableTree', childTables, 'kstree_id', uploadForce=4, forceVerify=1, ignoreUploaded=1, severityLimit=1, transactional=1) def queue_errata(self, errata, timeout=0): # timeout is the numer of seconds we want the execution to be delayed if not errata: return # Figure out the errata ids errata_channel_ids = [] for erratum in errata: if erratum.ignored: # Skip it continue if erratum.diff_result is not None: if erratum.diff_result.level != 0: # New or modified in some way, queue it # XXX we may not want to do this for trivial changes, # but not sure what trivial is for cid in erratum['channels']: errata_channel_ids.append( (erratum.id, cid['channel_id'])) if not errata_channel_ids: # Nothing to do return hdel = self.dbmodule.prepare(""" delete from rhnErrataQueue where errata_id = :errata_id """) h = self.dbmodule.prepare(""" insert into rhnErrataQueue (errata_id, channel_id, next_action) values (:errata_id, :channel_id, current_timestamp + numtodsinterval(:timeout, 'second')) """) errata_ids = [x[0] for x in errata_channel_ids] channel_ids = [x[1] for x in errata_channel_ids] timeouts = [timeout] * len(errata_ids) hdel.executemany(errata_id=errata_ids) return h.executemany(errata_id=errata_ids, channel_id=channel_ids, timeout=timeouts) def processChannels(self, channels, base_channels): childTables = [ 'rhnChannelFamilyMembers', 'rhnReleaseChannelMap', ] if base_channels: childTables.append('rhnDistChannelMap') self.__processObjectCollection(channels, 'rhnChannel', childTables, 'channel_id', uploadForce=4, ignoreUploaded=1, forceVerify=1) def orgTrustExists(self, org_id, trust_id): sql = """ select * from rhnTrustedOrgs where org_id = :org_id and org_trust_id = :trust_id """ h = self.dbmodule.prepare(sql) h.execute(org_id=org_id, trust_id=trust_id) row = h.fetchone_dict() if row: return True return False def clearChannelTrusts(self, label): sql = """ delete from rhnChannelTrust where channel_id = (select id from rhnChannel where label = :label) """ h = self.dbmodule.prepare(sql) h.execute(label=label) def processChannelTrusts(self, channel_trusts): # Create channel trusts insert = [[], []] for trust in channel_trusts: insert[0].append(trust['channel-label']) insert[1].append(trust['org-id']) sql = """ insert into rhnChannelTrust (channel_id, org_trust_id) values ((select id from rhnChannel where label = :label), :org_id) """ h = self.dbmodule.prepare(sql) h.executemany(label=insert[0], org_id=insert[1]) def processChannelFamilies(self, channels): childTables = [] self.__processObjectCollection(channels, 'rhnChannelFamily', childTables, 'channel_family_id', uploadForce=4, ignoreUploaded=1, forceVerify=1) def processChannelFamilyMembers(self, channel_families): # Channel families now contain channel memberships too h_lookup_cfid = self.dbmodule.prepare(""" select channel_family_id from rhnChannelFamilyMembers where channel_id = :channel_id """) cf_ids = [] c_ids = [] for cf in channel_families: if 'private-channel-family' in cf['label']: # Its a private channel family and channel family members # will be different from server as this is most likely ISS # sync. Don't compare and delete custom channel families. continue for cid in cf['channel_ids']: # Look up channel families for this channel h_lookup_cfid.execute(channel_id=cid) row = h_lookup_cfid.fetchone_dict() if row and row['channel_family_id'] == cf.id: # Nothing to do here, we already have this mapping continue # need to delete this entry and add the one for the new # channel family cf_ids.append(cf.id) c_ids.append(cid) if not c_ids: # We're done return hdel = self.dbmodule.prepare(""" delete from rhnChannelFamilyMembers where channel_id = :channel_id """) hins = self.dbmodule.prepare(""" insert into rhnChannelFamilyMembers (channel_id, channel_family_id) values (:channel_id, :channel_family_id) """) hdel.executemany(channel_id=c_ids) hins.executemany(channel_family_id=cf_ids, channel_id=c_ids) def processChannelFamilyPermissions(self, channel_families): # Since this is not evaluated in rhn_entitlements anymore, # make channel families without org globally visible cf_ids = [cf.id for cf in channel_families if 'private-channel-family' not in cf['label']] h_public_sel = self.dbmodule.prepare(""" select channel_family_id from rhnPublicChannelFamily """) h_public_sel.execute() public_cf_in_db = [x['channel_family_id'] for x in h_public_sel.fetchall_dict() or []] public_cf_to_insert = [x for x in cf_ids if x not in public_cf_in_db] h_public_ins = self.dbmodule.prepare(""" insert into rhnPublicChannelFamily (channel_family_id) values (:channel_family_id) """) h_public_ins.executemany(channel_family_id=public_cf_to_insert) def processDistChannelMap(self, dcms): dcmTable = self.tables['rhnDistChannelMap'] lookup = TableLookup(dcmTable, self.dbmodule) dmlobj = DML([dcmTable.name], self.tables) for dcm in dcms: if dcm.ignored: # Skip it continue h = lookup.query(dcm) row = h.fetchone_dict() if not row: extObject = {} _buildExternalValue(extObject, dcm, dcmTable) addHash(dmlobj.insert[dcmTable.name], extObject) # Since this table has all the columns in unique constraints, we # don't care much about updates self.__doDML(dmlobj) def processChannelProduct(self, channel): """ Associate product with channel """ channel['channel_product'] = channel['product_name'] channel['channel_product_version'] = channel['product_version'] channel['channel_product_beta'] = channel['product_beta'] channel['channel_product_id'] = self.lookupChannelProduct(channel) if not channel['channel_product_id']: # If no channel product dont update return statement = self.dbmodule.prepare(""" UPDATE rhnChannel SET channel_product_id = :channel_product_id WHERE id = :id AND (channel_product_id is NULL OR channel_product_id <> :channel_product_id) """) statement.execute(id=channel.id, channel_product_id=channel['channel_product_id']) def processChannelContentSources(self, channel): """ Associate content sources with channel """ # Which content sources are assigned to this channel select_sql = self.dbmodule.prepare(""" select source_id from rhnChannelContentSource where channel_id = :channel_id """) select_sql.execute(channel_id=channel.id) sources_in_db = [x['source_id'] for x in select_sql.fetchall_dict() or []] # Which content sources should be assigned to this channel sources_needed = [] if 'content-sources' in channel and channel['content-sources']: for source in channel['content-sources']: sources_needed.append(self.lookupContentSource(source['label'])) # What to delete and insert sources_to_delete = [x for x in sources_in_db if x not in sources_needed] sources_to_insert = [x for x in sources_needed if x not in sources_in_db] delete_sql = self.dbmodule.prepare(""" delete from rhnChannelContentSource where source_id = :source_id and channel_id = :channel_id """) insert_sql = self.dbmodule.prepare(""" insert into rhnChannelContentSource (source_id, channel_id) values (:source_id, :channel_id) """) for source_id in sources_to_delete: delete_sql.execute(source_id=source_id, channel_id=channel.id) for source_id in sources_to_insert: insert_sql.execute(source_id=source_id, channel_id=channel.id) def processProductNames(self, batch): """ Check if ProductName for channel in batch is already in DB. If not add it there. """ statement = self.dbmodule.prepare(""" insert into rhnProductName (id, label, name) values (sequence_nextval('rhn_productname_id_seq'), :product_label, :product_name) """) for channel in batch: if not self.lookupProductNames(channel['label']): statement.execute(product_label=channel['label'], product_name=channel['name']) def processContentSources(self, batch): """ Insert content source into DB """ childTables = ['rhnContentSourceSsl'] self.__processObjectCollection(batch, 'rhnContentSource', childTables, 'content_source_id', uploadForce=4, ignoreUploaded=1, forceVerify=1) def lookupContentSource(self, label): """ Get id for given content source """ sql = self.dbmodule.prepare(""" select id from rhnContentSource where label = :label and org_id is null """) sql.execute(label=label) content_source = sql.fetchone_dict() if content_source: return content_source['id'] return def lookupContentSourceType(self, label): """ Get id for given content type label """ sql = self.dbmodule.prepare(""" select id from rhnContentSourceType where label = :label """) sql.execute(label=label) source_type = sql.fetchone_dict() if source_type: return source_type['id'] return def lookupProductNames(self, label): """ For given label of product return its id. If product do not exist return None """ statement = self.dbmodule.prepare(""" SELECT id FROM rhnProductName WHERE label = :label """) statement.execute(label=label) product = statement.fetchone_dict() if product: return product['id'] return # bug #528227 def lookupChannelOrg(self, label): """For given label of channel return its org_id. If channel with given label does not exist or is NULL, return None. """ statement = self.dbmodule.prepare(""" SELECT org_id FROM rhnChannel WHERE label = :label """) statement.execute(label=label) org_id = statement.fetchone_dict() if org_id: return org_id return def lookupChannelProduct(self, channel): statement = self.dbmodule.prepare(""" SELECT id FROM rhnChannelProduct WHERE product = :product AND version = :version AND beta = :beta """) statement.execute(product=channel['channel_product'], version=channel['channel_product_version'], beta=channel['channel_product_beta']) product = statement.fetchone_dict() if product: return product['id'] return self.createChannelProduct(channel) def createChannelProduct(self, channel): id = self.sequences['rhnChannelProduct'].next() statement = self.dbmodule.prepare(""" INSERT INTO rhnChannelProduct (id, product, version, beta) VALUES (:id, :product, :version, :beta) """) statement.execute(id=id, product=channel['channel_product'], version=channel['channel_product_version'], beta=channel['channel_product_beta']) return id def subscribeToChannels(self, packages, strict=0): hash = { 'package_id': [], 'channel_id': [], } # Keep a list of packages for a channel too, so we can easily compare # what's extra, if strict is 1 channel_packages = {} sql = """ select channel_id from rhnChannelPackage where package_id = :package_id""" affected_channels = {} statement = self.dbmodule.prepare(sql) for package in packages: if package.ignored: # Skip it continue if package.id is None: raise InvalidPackageError(package, "Invalid package") # Look it up first statement.execute(package_id=package.id) channels = {} while 1: row = statement.fetchone_dict() if not row: break channels[row['channel_id']] = None for channelId in package['channels'].keys(): # Build the channel-package list if channelId in channel_packages: cp = channel_packages[channelId] else: channel_packages[channelId] = cp = {} cp[package.id] = None if channelId in channels: # Already subscribed continue dict = { 'package_id': package.id, 'channel_id': channelId, } if channelId not in affected_channels: modified_packages = ([], []) affected_channels[channelId] = modified_packages else: modified_packages = affected_channels[channelId] # Package was added to this channel modified_packages[0].append(package.id) addHash(hash, dict) # Packages we'd have to delete extra_cp = { 'package_id': [], 'channel_id': [], } if strict: # if strict remove the extra packages from the DB sql = """ select package_id from rhnChannelPackage where channel_id = :channel_id """ else: # or at least we should delete packages from different org sql = """ select package_id from rhnChannelPackage cp join rhnPackage p on p.id = cp.package_id join rhnChannel c on c.id = cp.channel_id where cp.channel_id = :channel_id and c.org_id != p.org_id """ statement = self.dbmodule.prepare(sql) for channel_id, pid_hash in channel_packages.items(): statement.execute(channel_id=channel_id) while 1: row = statement.fetchone_dict() if not row: break package_id = row['package_id'] if package_id not in pid_hash: # Have to remove it extra_cp['package_id'].append(package_id) extra_cp['channel_id'].append(channel_id) # And mark this channel as being affected if channel_id not in affected_channels: modified_packages = ([], []) affected_channels[channel_id] = modified_packages else: modified_packages = affected_channels[channel_id] # Package was deletef from this channel modified_packages[1].append(package_id) self.__doDeleteTable('rhnChannelPackage', extra_cp) self.__doInsertTable('rhnChannelPackage', hash) # This function returns the channels that were affected return affected_channels def update_newest_package_cache(self, caller, affected_channels, name_ids=[]): # affected_channels is a hash keyed on the channel id, and with a # tuple (added_package_list, deleted_package_list) as values refresh_newest_package = self.dbmodule.Procedure('rhn_channel.refresh_newest_package') update_channel = self.dbmodule.Procedure('rhn_channel.update_channel') for channel_id, (added_packages_list, deleted_packages_list) in affected_channels.items(): try: if name_ids: for id in name_ids: refresh_newest_package(channel_id, caller, id) else: refresh_newest_package(channel_id, caller, None) except rhnSQL.SQLError: e = sys.exc_info()[1] raise_with_tb(rhnFault(23, str(e[1]), explain=0), sys.exc_info()[2]) if deleted_packages_list: invalidate_ss = 1 else: invalidate_ss = 0 update_channel(channel_id, invalidate_ss) <|fim▁hole|> childTables = [] for package in packages: if not isinstance(package, SourcePackage): raise TypeError("Expected a Package instance") # Process the packages self.__processObjectCollection(packages, 'rhnPackageSource', childTables, 'package_id', uploadForce=uploadForce, forceVerify=forceVerify, ignoreUploaded=ignoreUploaded, severityLimit=1, transactional=transactional) def commit(self): self.dbmodule.commit() def rollback(self): self.dbmodule.rollback() def __processHash(self, lookup, hash): if not hash: # Nothing to do return h = rhnSQL.prepare("select " + lookup + "(:name) from dual") for k in hash.keys(): h.execute(name=k) # saving id hash[k] = h.fetchone_dict().popitem()[1] def __buildQueries(self, childTables): childTableLookups = {} queryTempl = "select * from %s where %s = :id" for childTableName in childTables: childTableLookups[childTableName] = self.dbmodule.prepare( queryTempl % (childTableName, childTables[childTableName])) return childTableLookups def __processObjectCollection(self, objColl, parentTable, childTables=[], colname=None, **kwargs): # Returns the DML object that was processed # This helps identify what the changes were # XXX this is a total hack keeping tranlating the old interface into the # new interface to keep me from having to change all the places in the # code that call this method, as there are 10 of them... childDict = {} for tbl in childTables: childDict[tbl] = colname return self.__processObjectCollection__(objColl, parentTable, childDict, **kwargs) def __processObjectCollection__(self, objColl, parentTable, childTables={}, **kwargs): # Returns the DML object that was processed # This helps identify what the changes were # FIXME I need to break this method apart into smaller method calls that # will allow *different* colname fields for different childTables # NOTE objColl == packages # Process the object collection, starting with parentTable, having # colname as a link column between the parent and child tables # # We create a DML object for the operations we're supposed to perform # on the database kwparams = { # The 'upload force' 'uploadForce': 0, # Raises exceptions when the object is already uploaded 'ignoreUploaded': 0, # Forces a full object verification - including the child tables 'forceVerify': 0, # When the severity is below this limit, the object is not # updated 'severityLimit': 0, # All-or-nothing 'transactional': 0, } for k, v in kwargs.items(): if k not in kwparams: raise TypeError("Unknown keyword parameter %s" % k) if v is not None: # Leave the default values in case of a None kwparams[k] = v uploadForce = kwparams['uploadForce'] ignoreUploaded = kwparams['ignoreUploaded'] severityLimit = kwparams['severityLimit'] transactional = kwparams['transactional'] forceVerify = kwparams['forceVerify'] # All the tables affected tables = [parentTable] + list(childTables.keys()) # Build the hash for the operations on the tables dml = DML(tables, self.tables) # Reverse hash: object id to object for already-uploaded objects uploadedObjects = {} # Information related to the parent table parentTableObj = self.tables[parentTable] ptFields = parentTableObj.getFields() severityHash = parentTableObj.getSeverityHash() # A flag that indicates if something has to be modified beyond the # current severity limit brokenTransaction = 0 # Lookup object lookup = TableLookup(parentTableObj, self.dbmodule) # XXX childTableLookups = self.__buildQueries(childTables) # For each valid object in the collection, look it up # if it doesn't exist, insert all the associated information # if it already exists: # save it in the uploadedObjects hash for later processing # the object's diff member will contain data if that object # failed to push; the content should be explicit enough about # what failed # The object's diff_result should reliably say if the object was # different in any way, or if it was new. Each field that gets # compared will present its won severity field (or the default # one if not explicitly specified). The "global" severity is the # max of all severities. # New objects will have a diff level of -1 for object in objColl: if object.ignored: # Skip it continue h = lookup.query(object) row = h.fetchone_dict() if not row: # Object does not exist id = self.sequences[parentTable].next() object.id = id extObject = {'id': id} _buildExternalValue(extObject, object, parentTableObj) addHash(dml.insert[parentTable], extObject) # Insert child table information for tname in childTables: tbl = self.tables[tname] # Get the list of objects for this package entry_list = object[tbl.getAttribute()] if entry_list is None: continue for entry in entry_list: extObject = {childTables[tname]: id} seq_col = tbl.sequenceColumn if seq_col: # This table has to insert values in a sequenced # column; since it's a child table and the entry # in the master table is not created yet, there # shouldn't be a problem with uniqueness # constraints new_id = self.sequences[tbl.name].next() extObject[seq_col] = new_id # Make sure we initialize the object's sequenced # column as well entry[seq_col] = new_id _buildExternalValue(extObject, entry, tbl) addHash(dml.insert[tname], extObject) object.diff_result = Diff() # New object object.diff_result.level = -1 continue # Already uploaded if not ignoreUploaded: raise AlreadyUploadedError(object, "Already uploaded") # XXX package id set here!!!!!!!!!! object.id = row['id'] # And save the object and the row for later processing uploadedObjects[row['id']] = [object, row] # Deal with already-uploaded objects for objid, (object, row) in uploadedObjects.items(): # Build the external value extObject = {'id': row['id']} _buildExternalValue(extObject, object, parentTableObj) # Build the DB value row = _buildDatabaseValue(row, ptFields) # compare them object.diff = object.diff_result = Diff() diffval = computeDiff(extObject, row, severityHash, object.diff) if not forceVerify: # If there is enough karma, force the full object check # maybe they want the object overwritten if uploadForce < object.diff.level and diffval <= severityLimit: # Same object, or not different enough # not enough karma either continue localDML = self.__processUploaded(objid, object, childTables, childTableLookups) if uploadForce < object.diff.level: # Not enough karma if object.diff.level > severityLimit: # Broken transaction - object is too different brokenTransaction = 1 continue # Clean up the object diff since we pushed the package object.diff = None if diffval: # Different parent object localDML['update'][parentTable] = [extObject] # And transfer the local DML to the global one for k, tablehash in localDML.items(): dmlhash = getattr(dml, k) for tname, vallist in tablehash.items(): for val in vallist: addHash(dmlhash[tname], val) if transactional and brokenTransaction: raise TransactionError("Error uploading package source batch") return self.__doDML(dml) def __processUploaded(self, objid, object, childTables, childTableLookups): # Store the DML operations locally localDML = { 'insert': {}, 'update': {}, 'delete': {}, } # Grab the rest of the information childTablesInfo = self.__getChildTablesInfo(objid, list(childTables.keys()), childTableLookups) # Start computing deltas for childTableName in childTables: # Init the local hashes for k in ['insert', 'update', 'delete']: localDML[k][childTableName] = [] dbside = childTablesInfo[childTableName] # The child table object childTableObj = self.tables[childTableName] # The name of the attribute in the parent object parentattr = childTableObj.getAttribute() # The list of entries associated with the attribute linked to # this table entrylist = object[parentattr] fields = childTableObj.getFields() pks = childTableObj.getPK() childSeverityHash = childTableObj.getSeverityHash() if entrylist is None: continue for ent in entrylist: # Build the primary key key = [] for f in pks: if f == childTables[childTableName]: # Special-case it key.append(objid) continue datatype = fields[f] # Get the proper attribute name for this column attr = childTableObj.getObjectAttribute(f) key.append(sanitizeValue(ent[attr], datatype)) key = tuple(key) # Build the value val = {childTables[childTableName]: objid} if childTableObj.sequenceColumn: # Initialize the sequenced column with a dummy value ent[childTableObj.sequenceColumn] = None _buildExternalValue(val, ent, childTableObj) # Look this value up if key not in dbside: if childTableObj.sequenceColumn: # Initialize the sequence column too sc = childTableObj.sequenceColumn nextid = self.sequences[childTableName].next() val[sc] = ent[sc] = nextid # This entry has to be inserted object.diff.append((parentattr, val, None)) # XXX change to a default value object.diff.setLevel(4) localDML['insert'][childTableName].append(val) continue # Already exists in the DB dbval = _buildDatabaseValue(dbside[key], fields) if childTableObj.sequenceColumn: # Copy the sequenced value - we dpn't want it updated sc = childTableObj.sequenceColumn val[sc] = ent[sc] = dbval[sc] # check for updates diffval = computeDiff(val, dbval, childSeverityHash, object.diff, parentattr) if not diffval: # Same value del dbside[key] continue # Different value; have to update the entry localDML['update'][childTableName].append(val) del dbside[key] if childTableName == 'rhnErrataPackage': continue # Anything else should be deleted for key, val in dbside.items(): # Send only the PKs hash = {} for k in pks: hash[k] = val[k] # XXX change to a default value object.diff.setLevel(4) localDML['delete'][childTableName].append(hash) object.diff.append((parentattr, None, val)) return localDML def __doDML(self, dml): self.__doDelete(dml.delete, dml.tables) self.__doUpdate(dml.update, dml.tables) self.__doInsert(dml.insert, dml.tables) return dml def __doInsert(self, hash, tables): for tname in tables: dict = hash[tname] try: self.__doInsertTable(tname, dict) except rhnSQL.SQLError: e = sys.exc_info()[1] raise_with_tb(rhnFault(54, str(e[1]), explain=0), sys.exc_info()[2]) def __doInsertTable(self, table, hash): if not hash: return tab = self.tables[table] k = list(hash.keys())[0] if not hash[k]: # Nothing to do return insertObj = TableInsert(tab, self.dbmodule) insertObj.query(hash) return def __doDelete(self, hash, tables): for tname in tables: dict = hash[tname] self.__doDeleteTable(tname, dict) def __doDeleteTable(self, tname, hash): if not hash: return tab = self.tables[tname] # Need to extract the primary keys and look for items to delete only # in those columns, the other ones may not be populated # See bug 154216 for details (misa 2005-04-08) pks = tab.getPK() k = pks[0] if not hash[k]: # Nothing to do return deleteObj = TableDelete(tab, self.dbmodule) deleteObj.query(hash) def __doUpdate(self, hash, tables): for tname in tables: dict = hash[tname] self.__doUpdateTable(tname, dict) def __doUpdateTable(self, tname, hash): if not hash: return tab = self.tables[tname] # See bug 154216 for details (misa 2005-04-08) pks = tab.getPK() k = pks[0] if not hash[k]: # Nothing to do return updateObj = TableUpdate(tab, self.dbmodule) updateObj.query(hash) return def __lookupObjectCollection(self, objColl, tableName, ignore_missing=0): # Looks the object up in tableName, and fills in its id lookup = TableLookup(self.tables[tableName], self.dbmodule) for object in objColl: if object.ignored: # Skip it continue h = lookup.query(object) row = h.fetchone_dict() if not row: if ignore_missing: # Ignore the missing objects object.ignored = 1 continue # Invalid raise InvalidPackageError(object, "Could not find object %s in table %s" % (object, tableName)) object.id = row['id'] def __getChildTablesInfo(self, id, tables, queries): # Returns a hash with the information about package id from tables result = {} for tname in tables: tableobj = self.tables[tname] fields = tableobj.getFields() q = queries[tname] q.execute(id=id) hash = {} while 1: row = q.fetchone_dict() if not row: break pks = tableobj.getPK() key = [] for f in pks: value = row[f] datatype = fields[f] value = sanitizeValue(value, datatype) key.append(value) val = {} for f, datatype in fields.items(): value = row[f] value = sanitizeValue(value, datatype) val[f] = value hash[tuple(key)] = val result[tname] = hash return result def __populateTable(self, table_name, data, delete_extra=1): table = self.tables[table_name] fields = table.getFields() # Build a hash with the incoming data incoming = {} for entry in data: t = hash2tuple(entry, fields) incoming[t] = entry # Build the query to dump the table's contents h = self.dbmodule.prepare("select * from %s" % table.name) h.execute() deletes = {} inserts = {} for f in fields.keys(): inserts[f] = [] deletes[f] = [] while 1: row = h.fetchone_dict() if not row: break t = hash2tuple(row, fields) if t in incoming: # we already have this value uploaded del incoming[t] continue addHash(deletes, row) for row in incoming.values(): addHash(inserts, row) if delete_extra: self.__doDeleteTable(table.name, deletes) self.__doInsertTable(table.name, inserts) # This function does a diff on the specified table name for the presented # data, using pk_fields as unique fields def _do_diff(self, data, table_name, uq_fields, fields): first_uq_col = uq_fields[0] uq_col_values = {} all_fields = uq_fields + fields for entry in data: for f in all_fields: if f not in entry: raise Exception("Missing field %s" % f) val = entry[first_uq_col] if val not in uq_col_values: valhash = {} uq_col_values[val] = valhash else: valhash = uq_col_values[val] key = build_key(entry, uq_fields) valhash[key] = entry query = "select %s from %s where %s = :%s" % ( string.join(all_fields, ", "), table_name, first_uq_col, first_uq_col, ) h = self.dbmodule.prepare(query) updates = [] deletes = [] for val, valhash in uq_col_values.items(): params = {first_uq_col: val} h.execute(**params) while 1: row = h.fetchone_dict() if not row: break key = build_key(row, uq_fields) if key not in valhash: # Need to delete this one deletes.append(row) continue entry = valhash[key] for f in fields: if entry[f] != row[f]: # Different, we have to update break else: # Same value, remove it from valhash del valhash[key] continue # Need to update updates.append(entry) inserts = [] list(map(inserts.extend, [list(x.values()) for x in list(uq_col_values.values())])) if deletes: params = transpose(deletes, uq_fields) query = "delete from %s where %s" % ( table_name, string.join(["%s = :%s" % (x, x) for x in uq_fields], ' and '), ) h = self.dbmodule.prepare(query) h.executemany(**params) if inserts: params = transpose(inserts, all_fields) query = "insert into %s (%s) values (%s)" % ( table_name, string.join(all_fields, ', '), string.join([":" + x for x in all_fields], ', '), ) h = self.dbmodule.prepare(query) h.executemany(**params) if updates: params = transpose(updates, all_fields) query = "update % set %s where %s" % ( table_name, string.join(["%s = :s" + (x, x) for x in fields], ', '), string.join(["%s = :%s" % (x, x) for x in uq_fields], ' and '), ) h = self.dbmodule.prepare(query) h.executemany(**params) def validate_pks(self): # If nevra is enabled use checksum as primary key tbs = self.tables['rhnPackage'] if not CFG.ENABLE_NVREA: # remove checksum from a primary key if nevra is disabled. if 'checksum_id' in tbs.pk: tbs.pk.remove('checksum_id') # Returns a tuple for the hash's values def build_key(hash, fields): return tuple(map(lambda x, h=hash: h[x], fields)) def transpose(arrhash, fields): params = {} for f in fields: params[f] = [] for h in arrhash: for f in fields: params[f].append(h[f]) return params def hash2tuple(hash, fields): # Converts the hash into a tuple, with the fields ordered as presented in # the fields list result = [] for fname, ftype in fields.items(): result.append(sanitizeValue(hash[fname], ftype)) return tuple(result) class DML: def __init__(self, tables, tableHash): self.update = {} self.delete = {} self.insert = {} self.tables = tables for k in ('insert', 'update', 'delete'): dmlhash = {} setattr(self, k, dmlhash) for tname in tables: hash = {} for f in tableHash[tname].getFields().keys(): hash[f] = [] dmlhash[tname] = hash def _buildDatabaseValue(row, fieldsHash): # Returns a dictionary containing the interesting values of the row, # sanitized dict = {} for f, datatype in fieldsHash.items(): dict[f] = sanitizeValue(row[f], datatype) return dict def _buildExternalValue(dict, entry, tableObj): # updates dict with values from entry # entry is a hash-like object (non-db) for f, datatype in tableObj.getFields().items(): if f in dict: # initialized somewhere else continue # Get the attribute's name attr = tableObj.getObjectAttribute(f) # Sanitize the value according to its datatype if attr not in entry: entry[attr] = None dict[f] = sanitizeValue(entry[attr], datatype) def computeDiff(hash1, hash2, diffHash, diffobj, prefix=None): # Compare if the key-values of hash1 are a subset of hash2's difference = 0 ignore_keys = ['last_modified'] for k, v in hash1.items(): if k in ignore_keys: # Dont decide the diff based on last_modified # as this obviously wont match due to our db # other triggers. continue if hash2[k] == v: # Same values continue if k == 'installed_size' and v is not None and hash2[k] is None: # Skip installed_size which might not have been populated continue if k in diffHash: diffval = diffHash[k] if diffval == 0: # Completely ignore this key continue else: diffval = diffobj.level + 1 if prefix: diffkey = prefix + '::' + k else: diffkey = k diffobj.setLevel(diffval) diffobj.append((diffkey, v, hash2[k])) difference = diffobj.level return difference<|fim▁end|>
def processSourcePackages(self, packages, uploadForce=0, ignoreUploaded=0, forceVerify=0, transactional=0): # Insert/update the packages
<|file_name|>rax.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # (c) 2013, Jesse Keating <[email protected],<|fim▁hole|># # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. """ Rackspace Cloud Inventory Authors: Jesse Keating <[email protected], Paul Durivage <[email protected]>, Matt Martz <[email protected]> Description: Generates inventory that Ansible can understand by making API request to Rackspace Public Cloud API When run against a specific host, this script returns variables similar to: rax_os-ext-sts_task_state rax_addresses rax_links rax_image rax_os-ext-sts_vm_state rax_flavor rax_id rax_rax-bandwidth_bandwidth rax_user_id rax_os-dcf_diskconfig rax_accessipv4 rax_accessipv6 rax_progress rax_os-ext-sts_power_state rax_metadata rax_status rax_updated rax_hostid rax_name rax_created rax_tenant_id rax_loaded Configuration: rax.py can be configured using a rax.ini file or via environment variables. The rax.ini file should live in the same directory along side this script. The section header for configuration values related to this inventory plugin is [rax] [rax] creds_file = ~/.rackspace_cloud_credentials regions = IAD,ORD,DFW env = prod meta_prefix = meta access_network = public access_ip_version = 4 Each of these configurations also has a corresponding environment variable. An environment variable will override a configuration file value. creds_file: Environment Variable: RAX_CREDS_FILE An optional configuration that points to a pyrax-compatible credentials file. If not supplied, rax.py will look for a credentials file at ~/.rackspace_cloud_credentials. It uses the Rackspace Python SDK, and therefore requires a file formatted per the SDK's specifications. https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md regions: Environment Variable: RAX_REGION An optional environment variable to narrow inventory search scope. If used, needs a value like ORD, DFW, SYD (a Rackspace datacenter) and optionally accepts a comma-separated list. environment: Environment Variable: RAX_ENV A configuration that will use an environment as configured in ~/.pyrax.cfg, see https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md meta_prefix: Environment Variable: RAX_META_PREFIX Default: meta A configuration that changes the prefix used for meta key/value groups. For compatibility with ec2.py set to "tag" access_network: Environment Variable: RAX_ACCESS_NETWORK Default: public A configuration that will tell the inventory script to use a specific server network to determine the ansible_ssh_host value. If no address is found, ansible_ssh_host will not be set. Accepts a comma-separated list of network names, the first found wins. access_ip_version: Environment Variable: RAX_ACCESS_IP_VERSION Default: 4 A configuration related to "access_network" that will attempt to determine the ansible_ssh_host value for either IPv4 or IPv6. If no address is found, ansible_ssh_host will not be set. Acceptable values are: 4 or 6. Values other than 4 or 6 will be ignored, and 4 will be used. Accepts a comma-separated list, the first found wins. Examples: List server instances $ RAX_CREDS_FILE=~/.raxpub rax.py --list List servers in ORD datacenter only $ RAX_CREDS_FILE=~/.raxpub RAX_REGION=ORD rax.py --list List servers in ORD and DFW datacenters $ RAX_CREDS_FILE=~/.raxpub RAX_REGION=ORD,DFW rax.py --list Get server details for server named "server.example.com" $ RAX_CREDS_FILE=~/.raxpub rax.py --host server.example.com Use the instance private IP to connect (instead of public IP) $ RAX_CREDS_FILE=~/.raxpub RAX_ACCESS_NETWORK=private rax.py --list """ import os import re import sys import argparse import warnings import collections from ansible.module_utils.six import iteritems from ansible.module_utils.six.moves import configparser as ConfigParser import json try: import pyrax from pyrax.utils import slugify except ImportError: sys.exit('pyrax is required for this module') from time import time from ansible.constants import get_config from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.six import text_type NON_CALLABLES = (text_type, str, bool, dict, int, list, type(None)) def load_config_file(): p = ConfigParser.ConfigParser() config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'rax.ini') try: p.read(config_file) except ConfigParser.Error: return None else: return p def rax_slugify(value): return 'rax_%s' % (re.sub(r'[^\w-]', '_', value).lower().lstrip('_')) def to_dict(obj): instance = {} for key in dir(obj): value = getattr(obj, key) if isinstance(value, NON_CALLABLES) and not key.startswith('_'): key = rax_slugify(key) instance[key] = value return instance def host(regions, hostname): hostvars = {} for region in regions: # Connect to the region cs = pyrax.connect_to_cloudservers(region=region) for server in cs.servers.list(): if server.name == hostname: for key, value in to_dict(server).items(): hostvars[key] = value # And finally, add an IP address hostvars['ansible_ssh_host'] = server.accessIPv4 print(json.dumps(hostvars, sort_keys=True, indent=4)) def _list_into_cache(regions): groups = collections.defaultdict(list) hostvars = collections.defaultdict(dict) images = {} cbs_attachments = collections.defaultdict(dict) prefix = get_config(p, 'rax', 'meta_prefix', 'RAX_META_PREFIX', 'meta') try: # Ansible 2.3+ networks = get_config(p, 'rax', 'access_network', 'RAX_ACCESS_NETWORK', 'public', value_type='list') except TypeError: # Ansible 2.2.x and below # pylint: disable=unexpected-keyword-arg networks = get_config(p, 'rax', 'access_network', 'RAX_ACCESS_NETWORK', 'public', islist=True) try: try: # Ansible 2.3+ ip_versions = map(int, get_config(p, 'rax', 'access_ip_version', 'RAX_ACCESS_IP_VERSION', 4, value_type='list')) except TypeError: # Ansible 2.2.x and below # pylint: disable=unexpected-keyword-arg ip_versions = map(int, get_config(p, 'rax', 'access_ip_version', 'RAX_ACCESS_IP_VERSION', 4, islist=True)) except Exception: ip_versions = [4] else: ip_versions = [v for v in ip_versions if v in [4, 6]] if not ip_versions: ip_versions = [4] # Go through all the regions looking for servers for region in regions: # Connect to the region cs = pyrax.connect_to_cloudservers(region=region) if cs is None: warnings.warn( 'Connecting to Rackspace region "%s" has caused Pyrax to ' 'return None. Is this a valid region?' % region, RuntimeWarning) continue for server in cs.servers.list(): # Create a group on region groups[region].append(server.name) # Check if group metadata key in servers' metadata group = server.metadata.get('group') if group: groups[group].append(server.name) for extra_group in server.metadata.get('groups', '').split(','): if extra_group: groups[extra_group].append(server.name) # Add host metadata for key, value in to_dict(server).items(): hostvars[server.name][key] = value hostvars[server.name]['rax_region'] = region for key, value in iteritems(server.metadata): groups['%s_%s_%s' % (prefix, key, value)].append(server.name) groups['instance-%s' % server.id].append(server.name) groups['flavor-%s' % server.flavor['id']].append(server.name) # Handle boot from volume if not server.image: if not cbs_attachments[region]: cbs = pyrax.connect_to_cloud_blockstorage(region) for vol in cbs.list(): if boolean(vol.bootable, strict=False): for attachment in vol.attachments: metadata = vol.volume_image_metadata server_id = attachment['server_id'] cbs_attachments[region][server_id] = { 'id': metadata['image_id'], 'name': slugify(metadata['image_name']) } image = cbs_attachments[region].get(server.id) if image: server.image = {'id': image['id']} hostvars[server.name]['rax_image'] = server.image hostvars[server.name]['rax_boot_source'] = 'volume' images[image['id']] = image['name'] else: hostvars[server.name]['rax_boot_source'] = 'local' try: imagegroup = 'image-%s' % images[server.image['id']] groups[imagegroup].append(server.name) groups['image-%s' % server.image['id']].append(server.name) except KeyError: try: image = cs.images.get(server.image['id']) except cs.exceptions.NotFound: groups['image-%s' % server.image['id']].append(server.name) else: images[image.id] = image.human_id groups['image-%s' % image.human_id].append(server.name) groups['image-%s' % server.image['id']].append(server.name) # And finally, add an IP address ansible_ssh_host = None # use accessIPv[46] instead of looping address for 'public' for network_name in networks: if ansible_ssh_host: break if network_name == 'public': for version_name in ip_versions: if ansible_ssh_host: break if version_name == 6 and server.accessIPv6: ansible_ssh_host = server.accessIPv6 elif server.accessIPv4: ansible_ssh_host = server.accessIPv4 if not ansible_ssh_host: addresses = server.addresses.get(network_name, []) for address in addresses: for version_name in ip_versions: if ansible_ssh_host: break if address.get('version') == version_name: ansible_ssh_host = address.get('addr') break if ansible_ssh_host: hostvars[server.name]['ansible_ssh_host'] = ansible_ssh_host if hostvars: groups['_meta'] = {'hostvars': hostvars} with open(get_cache_file_path(regions), 'w') as cache_file: json.dump(groups, cache_file) def get_cache_file_path(regions): regions_str = '.'.join([reg.strip().lower() for reg in regions]) ansible_tmp_path = os.path.join(os.path.expanduser("~"), '.ansible', 'tmp') if not os.path.exists(ansible_tmp_path): os.makedirs(ansible_tmp_path) return os.path.join(ansible_tmp_path, 'ansible-rax-%s-%s.cache' % ( pyrax.identity.username, regions_str)) def _list(regions, refresh_cache=True): cache_max_age = int(get_config(p, 'rax', 'cache_max_age', 'RAX_CACHE_MAX_AGE', 600)) if (not os.path.exists(get_cache_file_path(regions)) or refresh_cache or (time() - os.stat(get_cache_file_path(regions))[-1]) > cache_max_age): # Cache file doesn't exist or older than 10m or refresh cache requested _list_into_cache(regions) with open(get_cache_file_path(regions), 'r') as cache_file: groups = json.load(cache_file) print(json.dumps(groups, sort_keys=True, indent=4)) def parse_args(): parser = argparse.ArgumentParser(description='Ansible Rackspace Cloud ' 'inventory module') group = parser.add_mutually_exclusive_group(required=True) group.add_argument('--list', action='store_true', help='List active servers') group.add_argument('--host', help='List details about the specific host') parser.add_argument('--refresh-cache', action='store_true', default=False, help=('Force refresh of cache, making API requests to' 'RackSpace (default: False - use cache files)')) return parser.parse_args() def setup(): default_creds_file = os.path.expanduser('~/.rackspace_cloud_credentials') env = get_config(p, 'rax', 'environment', 'RAX_ENV', None) if env: pyrax.set_environment(env) keyring_username = pyrax.get_setting('keyring_username') # Attempt to grab credentials from environment first creds_file = get_config(p, 'rax', 'creds_file', 'RAX_CREDS_FILE', None) if creds_file is not None: creds_file = os.path.expanduser(creds_file) else: # But if that fails, use the default location of # ~/.rackspace_cloud_credentials if os.path.isfile(default_creds_file): creds_file = default_creds_file elif not keyring_username: sys.exit('No value in environment variable %s and/or no ' 'credentials file at %s' % ('RAX_CREDS_FILE', default_creds_file)) identity_type = pyrax.get_setting('identity_type') pyrax.set_setting('identity_type', identity_type or 'rackspace') region = pyrax.get_setting('region') try: if keyring_username: pyrax.keyring_auth(keyring_username, region=region) else: pyrax.set_credential_file(creds_file, region=region) except Exception as e: sys.exit("%s: %s" % (e, e.message)) regions = [] if region: regions.append(region) else: try: # Ansible 2.3+ region_list = get_config(p, 'rax', 'regions', 'RAX_REGION', 'all', value_type='list') except TypeError: # Ansible 2.2.x and below # pylint: disable=unexpected-keyword-arg region_list = get_config(p, 'rax', 'regions', 'RAX_REGION', 'all', islist=True) for region in region_list: region = region.strip().upper() if region == 'ALL': regions = pyrax.regions break elif region not in pyrax.regions: sys.exit('Unsupported region %s' % region) elif region not in regions: regions.append(region) return regions def main(): args = parse_args() regions = setup() if args.list: _list(regions, refresh_cache=args.refresh_cache) elif args.host: host(regions, args.host) sys.exit(0) p = load_config_file() if __name__ == '__main__': main()<|fim▁end|>
# Paul Durivage <[email protected]>, # Matt Martz <[email protected]> # # This file is part of Ansible.
<|file_name|>error_handling.py<|end_file_name|><|fim▁begin|>from itty import * @error(500) def my_great_500(request, exception): html_output = """ <html> <head> <title>Application Error! OH NOES!</title> </head> <body> <h1>OH NOES!</h1> <p>Yep, you broke it.</p> <p>Exception: %s</p> </body> </html> """ % exception response = Response(html_output, status=500) return response.send(request._start_response) @get('/hello') def hello(request): return 'Hello errors!' @get('/test_404') def test_404(request): raise NotFound('Not here, sorry.') return 'This should never happen.' @get('/test_500') def test_500(request): raise AppError('Oops.') return 'This should never happen either.' @get('/test_other')<|fim▁hole|>@get('/test_403') def test_403(request): raise Forbidden('No soup for you!') return 'This should never happen either either either.' @get('/test_redirect') def test_redirect(request): raise Redirect('/hello') run_itty()<|fim▁end|>
def test_other(request): raise RuntimeError('Oops.') return 'This should never happen either either.'
<|file_name|>kendo.culture.zh-SG.js<|end_file_name|><|fim▁begin|>module.exports = /******/ (function(modules) { // webpackBootstrap /******/ // The module cache /******/ var installedModules = {}; /******/ // The require function /******/ function __webpack_require__(moduleId) { /******/ // Check if module is in cache /******/ if(installedModules[moduleId]) /******/ return installedModules[moduleId].exports; /******/ // Create a new module (and put it into the cache) /******/ var module = installedModules[moduleId] = { /******/ exports: {}, /******/ id: moduleId, /******/ loaded: false /******/ }; /******/ // Execute the module function /******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); /******/ // Flag the module as loaded /******/ module.loaded = true; /******/ // Return the exports of the module /******/ return module.exports; /******/ } /******/ // expose the modules object (__webpack_modules__) /******/ __webpack_require__.m = modules; /******/ // expose the module cache<|fim▁hole|>/******/ __webpack_require__.p = ""; /******/ // Load entry module and return exports /******/ return __webpack_require__(0); /******/ }) /************************************************************************/ /******/ ({ /***/ 0: /***/ (function(module, exports, __webpack_require__) { __webpack_require__(430); module.exports = __webpack_require__(430); /***/ }), /***/ 430: /***/ (function(module, exports) { (function( window, undefined ) { kendo.cultures["zh-SG"] = { name: "zh-SG", numberFormat: { pattern: ["-n"], decimals: 2, ",": ",", ".": ".", groupSize: [3], percent: { pattern: ["-n%","n%"], decimals: 2, ",": ",", ".": ".", groupSize: [3], symbol: "%" }, currency: { name: "Singapore Dollar", abbr: "SGD", pattern: ["($n)","$n"], decimals: 2, ",": ",", ".": ".", groupSize: [3], symbol: "$" } }, calendars: { standard: { days: { names: ["星期日","星期一","星期二","星期三","星期四","星期五","星期六"], namesAbbr: ["周日","周一","周二","周三","周四","周五","周六"], namesShort: ["日","一","二","三","四","五","六"] }, months: { names: ["一月","二月","三月","四月","五月","六月","七月","八月","九月","十月","十一月","十二月"], namesAbbr: ["一月","二月","三月","四月","五月","六月","七月","八月","九月","十月","十一月","十二月"] }, AM: ["上午","上午","上午"], PM: ["下午","下午","下午"], patterns: { d: "d/M/yyyy", D: "yyyy'年'M'月'd'日'", F: "yyyy'年'M'月'd'日' tt h:mm:ss", g: "d/M/yyyy tt h:mm", G: "d/M/yyyy tt h:mm:ss", m: "M'月'd'日'", M: "M'月'd'日'", s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss", t: "tt h:mm", T: "tt h:mm:ss", u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'", y: "yyyy'年'M'月'", Y: "yyyy'年'M'月'" }, "/": "/", ":": ":", firstDay: 0 } } } })(this); /***/ }) /******/ });<|fim▁end|>
/******/ __webpack_require__.c = installedModules; /******/ // __webpack_public_path__
<|file_name|>blocking.rs<|end_file_name|><|fim▁begin|>// Copyright 2021 The Chromium OS Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. mod block_on; mod pool; <|fim▁hole|><|fim▁end|>
pub use block_on::*; pub use pool::*;
<|file_name|>axle.cpp<|end_file_name|><|fim▁begin|>/*************************************************************************** file : axle.cpp created : Sun Mar 19 00:05:09 CET 2000 copyright : (C) 2000 by Eric Espie email : [email protected] version : $Id$ ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ #include "sim.h" static const char *AxleSect[2] = {SECT_FRNTAXLE, SECT_REARAXLE}; void SimAxleConfig(tCar *car, int index) { void *hdle = car->params; tdble rollCenter; tAxle *axle = &(car->axle[index]); axle->xpos = GfParmGetNum(hdle, AxleSect[index], PRM_XPOS, (char*)NULL, 0); axle->I = GfParmGetNum(hdle, AxleSect[index], PRM_INERTIA, (char*)NULL, 0.15); rollCenter = GfParmGetNum(hdle, AxleSect[index], PRM_ROLLCENTER, (char*)NULL, 0.15); car->wheel[index*2].rollCenter = car->wheel[index*2+1].rollCenter = rollCenter; if (index == 0) { SimSuspConfig(hdle, SECT_FRNTARB, &(axle->arbSusp), 0, 0); } else { SimSuspConfig(hdle, SECT_REARARB, &(axle->arbSusp), 0, 0); } car->wheel[index*2].feedBack.I += axle->I / 2.0; car->wheel[index*2+1].feedBack.I += axle->I / 2.0; } void SimAxleUpdate(tCar *car, int index) { tAxle *axle = &(car->axle[index]); tdble str, stl, sgn; str = car->wheel[index*2].susp.x; stl = car->wheel[index*2+1].susp.x;<|fim▁hole|> SimSuspCheckIn(&(axle->arbSusp)); SimSuspUpdate(&(axle->arbSusp)); #else axle->arbSusp.x = fabs(stl - str); if (axle->arbSusp.x > axle->arbSusp.spring.xMax) { axle->arbSusp.x = axle->arbSusp.spring.xMax; } axle->arbSusp.force = - axle->arbSusp.x *axle->arbSusp.spring.K; //axle->arbSusp.force = pow (axle->arbSusp.x *axle->arbSusp.spring.K , 4.0); #endif car->wheel[index*2].axleFz = sgn * axle->arbSusp.force; car->wheel[index*2+1].axleFz = - sgn * axle->arbSusp.force; // printf ("%f %f %f ", stl, str, axle->arbSusp.force); // if (index==0) { // printf ("# SUSP\n"); // } }<|fim▁end|>
sgn = SIGN(stl - str); #if 0 axle->arbSusp.x = fabs(stl - str);
<|file_name|>buster-test.js<|end_file_name|><|fim▁begin|>/** @license MIT License (c) copyright B Cavalier & J Hann */ /** * when * A lightweight CommonJS Promises/A and when() implementation * * when is part of the cujo.js family of libraries (http://cujojs.com/) * * Licensed under the MIT License at: * http://www.opensource.org/licenses/mit-license.php * * @version 1.3.0 */ (function(define) { define(function() { var freeze, reduceArray, slice, undef; // // Public API // when.defer = defer; when.reject = reject; when.isPromise = isPromise; when.all = all; when.some = some; when.any = any; when.map = map; when.reduce = reduce; when.chain = chain; /** Object.freeze */ freeze = Object.freeze || function(o) { return o; }; /** * Trusted Promise constructor. A Promise created from this constructor is * a trusted when.js promise. Any other duck-typed promise is considered * untrusted. * * @constructor */ function Promise() {} Promise.prototype = freeze({ always: function(alwaysback, progback) { return this.then(alwaysback, alwaysback, progback); }, otherwise: function(errback) { return this.then(undef, errback); } }); /** * Create an already-resolved promise for the supplied value * @private * * @param value anything * @return {Promise} */ function resolved(value) { var p = new Promise(); p.then = function(callback) { try { return promise(callback ? callback(value) : value); } catch(e) { return rejected(e); } }; return freeze(p); } /** * Create an already-rejected {@link Promise} with the supplied * rejection reason. * @private * * @param reason rejection reason * @return {Promise} */ function rejected(reason) { var p = new Promise(); p.then = function(callback, errback) { try { return errback ? promise(errback(reason)) : rejected(reason); } catch(e) { return rejected(e); } }; return freeze(p); } /** * Returns a rejected promise for the supplied promiseOrValue. If * promiseOrValue is a value, it will be the rejection value of the * returned promise. If promiseOrValue is a promise, its * completion value will be the rejected value of the returned promise * * @param promiseOrValue {*} the rejected value of the returned {@link Promise} * * @return {Promise} rejected {@link Promise} */ function reject(promiseOrValue) { return when(promiseOrValue, function(value) { return rejected(value); }); } /** * Creates a new, CommonJS compliant, Deferred with fully isolated * resolver and promise parts, either or both of which may be given out * safely to consumers. * The Deferred itself has the full API: resolve, reject, progress, and * then. The resolver has resolve, reject, and progress. The promise * only has then. * * @memberOf when * @function * * @returns {Deferred} */ function defer() { var deferred, promise, listeners, progressHandlers, _then, _progress, complete; listeners = []; progressHandlers = []; /** * Pre-resolution then() that adds the supplied callback, errback, and progback * functions to the registered listeners * * @private * * @param [callback] {Function} resolution handler * @param [errback] {Function} rejection handler * @param [progback] {Function} progress handler * * @throws {Error} if any argument is not null, undefined, or a Function */ _then = function unresolvedThen(callback, errback, progback) { var deferred = defer(); listeners.push(function(promise) { promise.then(callback, errback) .then(deferred.resolve, deferred.reject, deferred.progress); }); progback && progressHandlers.push(progback); return deferred.promise; }; /** * Registers a handler for this {@link Deferred}'s {@link Promise}. Even though all arguments * are optional, each argument that *is* supplied must be null, undefined, or a Function. * Any other value will cause an Error to be thrown. * * @memberOf Promise * * @param [callback] {Function} resolution handler * @param [errback] {Function} rejection handler * @param [progback] {Function} progress handler * * @throws {Error} if any argument is not null, undefined, or a Function */ function then(callback, errback, progback) { return _then(callback, errback, progback); } /** * Resolves this {@link Deferred}'s {@link Promise} with val as the * resolution value. * * @memberOf Resolver * * @param val anything */ function resolve(val) { complete(resolved(val)); } /** * Rejects this {@link Deferred}'s {@link Promise} with err as the * reason. * * @memberOf Resolver * * @param err anything */ function reject(err) { complete(rejected(err)); } /** * @private * @param update */ _progress = function(update) { var progress, i = 0; while (progress = progressHandlers[i++]) progress(update); }; /** * Emits a progress update to all progress observers registered with * this {@link Deferred}'s {@link Promise} * * @memberOf Resolver * * @param update anything */ function progress(update) { _progress(update); } /** * Transition from pre-resolution state to post-resolution state, notifying * all listeners of the resolution or rejection * * @private * * @param completed {Promise} the completed value of this deferred */ complete = function(completed) { var listener, i = 0; // Replace _then with one that directly notifies with the result. _then = completed.then; // Replace complete so that this Deferred can only be completed // once. Also Replace _progress, so that subsequent attempts to issue // progress throw. complete = _progress = function alreadyCompleted() { // TODO: Consider silently returning here so that parties who // have a reference to the resolver cannot tell that the promise // has been resolved using try/catch throw new Error("already completed"); }; // Free progressHandlers array since we'll never issue progress events // for this promise again now that it's completed progressHandlers = undef; // Notify listeners // Traverse all listeners registered directly with this Deferred while (listener = listeners[i++]) { listener(completed); } listeners = []; }; /** * The full Deferred object, with both {@link Promise} and {@link Resolver} * parts * @class Deferred * @name Deferred */ deferred = {}; // Promise and Resolver parts // Freeze Promise and Resolver APIs promise = new Promise(); promise.then = deferred.then = then; /** * The {@link Promise} for this {@link Deferred} * @memberOf Deferred * @name promise * @type {Promise} */ deferred.promise = freeze(promise); /** * The {@link Resolver} for this {@link Deferred} * @memberOf Deferred * @name resolver * @class Resolver */ deferred.resolver = freeze({ resolve: (deferred.resolve = resolve), reject: (deferred.reject = reject), progress: (deferred.progress = progress) }); return deferred; } /** * Determines if promiseOrValue is a promise or not. Uses the feature * test from http://wiki.commonjs.org/wiki/Promises/A to determine if * promiseOrValue is a promise. * * @param promiseOrValue anything * * @returns {Boolean} true if promiseOrValue is a {@link Promise} */ function isPromise(promiseOrValue) { return promiseOrValue && typeof promiseOrValue.then === 'function'; } /** * Register an observer for a promise or immediate value. * * @function * @name when * @namespace * * @param promiseOrValue anything * @param {Function} [callback] callback to be called when promiseOrValue is * successfully resolved. If promiseOrValue is an immediate value, callback * will be invoked immediately. * @param {Function} [errback] callback to be called when promiseOrValue is * rejected. * @param {Function} [progressHandler] callback to be called when progress updates * are issued for promiseOrValue. * * @returns {Promise} a new {@link Promise} that will complete with the return * value of callback or errback or the completion value of promiseOrValue if * callback and/or errback is not supplied. */ function when(promiseOrValue, callback, errback, progressHandler) { // Get a promise for the input promiseOrValue // See promise() var trustedPromise = promise(promiseOrValue); // Register promise handlers return trustedPromise.then(callback, errback, progressHandler); } /** * Returns promiseOrValue if promiseOrValue is a {@link Promise}, a new Promise if * promiseOrValue is a foreign promise, or a new, already-resolved {@link Promise} * whose resolution value is promiseOrValue if promiseOrValue is an immediate value. * * Note that this function is not safe to export since it will return its * input when promiseOrValue is a {@link Promise} * * @private * * @param promiseOrValue anything * * @returns Guaranteed to return a trusted Promise. If promiseOrValue is a when.js {@link Promise} * returns promiseOrValue, otherwise, returns a new, already-resolved, when.js {@link Promise} * whose resolution value is: * * the resolution value of promiseOrValue if it's a foreign promise, or * * promiseOrValue if it's a value */ function promise(promiseOrValue) { var promise, deferred; if(promiseOrValue instanceof Promise) { // It's a when.js promise, so we trust it promise = promiseOrValue; } else { // It's not a when.js promise. Check to see if it's a foreign promise // or a value. deferred = defer(); if(isPromise(promiseOrValue)) { // It's a compliant promise, but we don't know where it came from, // so we don't trust its implementation entirely. Introduce a trusted // middleman when.js promise // IMPORTANT: This is the only place when.js should ever call .then() on // an untrusted promise. promiseOrValue.then(deferred.resolve, deferred.reject, deferred.progress); promise = deferred.promise; } else { // It's a value, not a promise. Create an already-resolved promise // for it. deferred.resolve(promiseOrValue); promise = deferred.promise; } } return promise; } /** * Return a promise that will resolve when howMany of the supplied promisesOrValues * have resolved. The resolution value of the returned promise will be an array of * length howMany containing the resolutions values of the triggering promisesOrValues. * * @memberOf when * * @param promisesOrValues {Array} array of anything, may contain a mix * of {@link Promise}s and values * @param howMany * @param [callback] * @param [errback] * @param [progressHandler] * * @returns {Promise} */ function some(promisesOrValues, howMany, callback, errback, progressHandler) { checkCallbacks(2, arguments); return when(promisesOrValues, function(promisesOrValues) { var toResolve, results, ret, deferred, resolver, rejecter, handleProgress, len, i; len = promisesOrValues.length >>> 0; toResolve = Math.max(0, Math.min(howMany, len)); results = []; deferred = defer(); ret = when(deferred, callback, errback, progressHandler); // Wrapper so that resolver can be replaced function resolve(val) { resolver(val); } // Wrapper so that rejecter can be replaced function reject(err) { rejecter(err); } // Wrapper so that progress can be replaced function progress(update) { handleProgress(update); } function complete() { resolver = rejecter = handleProgress = noop; } // No items in the input, resolve immediately if (!toResolve) { deferred.resolve(results); } else { // Resolver for promises. Captures the value and resolves // the returned promise when toResolve reaches zero. // Overwrites resolver var with a noop once promise has // be resolved to cover case where n < promises.length resolver = function(val) { // This orders the values based on promise resolution order // Another strategy would be to use the original position of // the corresponding promise. results.push(val); if (!--toResolve) { complete(); deferred.resolve(results); } }; // Rejecter for promises. Rejects returned promise // immediately, and overwrites rejecter var with a noop // once promise to cover case where n < promises.length. // TODO: Consider rejecting only when N (or promises.length - N?) // promises have been rejected instead of only one? rejecter = function(err) { complete(); deferred.reject(err); }; handleProgress = deferred.progress; // TODO: Replace while with forEach for(i = 0; i < len; ++i) { if(i in promisesOrValues) { when(promisesOrValues[i], resolve, reject, progress); } } } return ret; }); } /** * Return a promise that will resolve only once all the supplied promisesOrValues * have resolved. The resolution value of the returned promise will be an array * containing the resolution values of each of the promisesOrValues. * * @memberOf when * * @param promisesOrValues {Array|Promise} array of anything, may contain a mix * of {@link Promise}s and values * @param [callback] {Function} * @param [errback] {Function} * @param [progressHandler] {Function} * * @returns {Promise} */ function all(promisesOrValues, callback, errback, progressHandler) { checkCallbacks(1, arguments); return when(promisesOrValues, function(promisesOrValues) { return _reduce(promisesOrValues, reduceIntoArray, []); }).then(callback, errback, progressHandler); } function reduceIntoArray(current, val, i) { current[i] = val; return current; } /** * Return a promise that will resolve when any one of the supplied promisesOrValues * has resolved. The resolution value of the returned promise will be the resolution * value of the triggering promiseOrValue. * * @memberOf when * * @param promisesOrValues {Array|Promise} array of anything, may contain a mix * of {@link Promise}s and values * @param [callback] {Function} * @param [errback] {Function} * @param [progressHandler] {Function} * * @returns {Promise} */ function any(promisesOrValues, callback, errback, progressHandler) { function unwrapSingleResult(val) { return callback ? callback(val[0]) : val[0]; } return some(promisesOrValues, 1, unwrapSingleResult, errback, progressHandler); } /** * Traditional map function, similar to `Array.prototype.map()`, but allows * input to contain {@link Promise}s and/or values, and mapFunc may return * either a value or a {@link Promise} * * @memberOf when * * @param promise {Array|Promise} array of anything, may contain a mix * of {@link Promise}s and values * @param mapFunc {Function} mapping function mapFunc(value) which may return * either a {@link Promise} or value * * @returns {Promise} a {@link Promise} that will resolve to an array containing * the mapped output values. */ function map(promise, mapFunc) { return when(promise, function(array) { return _map(array, mapFunc); }); } /** * Private map helper to map an array of promises * @private * * @param promisesOrValues {Array} * @param mapFunc {Function} * @return {Promise} */ function _map(promisesOrValues, mapFunc) { var results, len, i; // Since we know the resulting length, we can preallocate the results // array to avoid array expansions. len = promisesOrValues.length >>> 0; results = new Array(len); // Since mapFunc may be async, get all invocations of it into flight // asap, and then use reduce() to collect all the results for(i = 0; i < len; i++) { if(i in promisesOrValues) results[i] = when(promisesOrValues[i], mapFunc); } // Could use all() here, but that would result in another array // being allocated, i.e. map() would end up allocating 2 arrays // of size len instead of just 1. Since all() uses reduce() // anyway, avoid the additional allocation by calling reduce // directly. return _reduce(results, reduceIntoArray, results); } /** * Traditional reduce function, similar to `Array.prototype.reduce()`, but * input may contain {@link Promise}s and/or values, and reduceFunc * may return either a value or a {@link Promise}, *and* initialValue may * be a {@link Promise} for the starting value. * * @memberOf when * * @param promise {Array|Promise} array of anything, may contain a mix * of {@link Promise}s and values. May also be a {@link Promise} for * an array. * @param reduceFunc {Function} reduce function reduce(currentValue, nextValue, index, total), * where total is the total number of items being reduced, and will be the same * in each call to reduceFunc. * @param initialValue starting value, or a {@link Promise} for the starting value * * @returns {Promise} that will resolve to the final reduced value */ function reduce(promise, reduceFunc, initialValue) { var args = slice.call(arguments, 1); return when(promise, function(array) { return _reduce.apply(undef, [array].concat(args)); }); } /** * Private reduce to reduce an array of promises * @private * * @param promisesOrValues {Array} * @param reduceFunc {Function} * @param initialValue {*} * @return {Promise} */ function _reduce(promisesOrValues, reduceFunc, initialValue) { var total, args; total = promisesOrValues.length; // Skip promisesOrValues, since it will be used as 'this' in the call // to the actual reduce engine below. // Wrap the supplied reduceFunc with one that handles promises and then // delegates to the supplied. args = [ function (current, val, i) { return when(current, function (c) { return when(val, function (value) { return reduceFunc(c, value, i, total); }); }); } ]; if (arguments.length > 2) args.push(initialValue); return reduceArray.apply(promisesOrValues, args); } /** * Ensure that resolution of promiseOrValue will complete resolver with the completion * value of promiseOrValue, or instead with resolveValue if it is provided. * * @memberOf when * * @param promiseOrValue * @param resolver {Resolver} * @param [resolveValue] anything * * @returns {Promise} */ function chain(promiseOrValue, resolver, resolveValue) { var useResolveValue = arguments.length > 2; return when(promiseOrValue, function(val) { if(useResolveValue) val = resolveValue; resolver.resolve(val); return val; }, function(e) { resolver.reject(e); return rejected(e); }, resolver.progress ); } // // Utility functions // /** * Helper that checks arrayOfCallbacks to ensure that each element is either * a function, or null or undefined. * * @private * * @param arrayOfCallbacks {Array} array to check * @throws {Error} if any element of arrayOfCallbacks is something other than * a Functions, null, or undefined. */ function checkCallbacks(start, arrayOfCallbacks) { var arg, i = arrayOfCallbacks.length; while(i > start) { arg = arrayOfCallbacks[--i]; if (arg != null && typeof arg != 'function') throw new Error('callback is not a function'); } } /** * No-Op function used in method replacement * @private */ function noop() {} slice = [].slice; // ES5 reduce implementation if native not available // See: http://es5.github.com/#x15.4.4.21 as there are many // specifics and edge cases. reduceArray = [].reduce || function(reduceFunc /*, initialValue */) { // ES5 dictates that reduce.length === 1 // This implementation deviates from ES5 spec in the following ways: // 1. It does not check if reduceFunc is a Callable var arr, args, reduced, len, i; i = 0; arr = Object(this); len = arr.length >>> 0; args = arguments; // If no initialValue, use first item of array (we know length !== 0 here) // and adjust i to start at second item if(args.length <= 1) { // Skip to the first real element in the array for(;;) { if(i in arr) { reduced = arr[i++]; break; } // If we reached the end of the array without finding any real // elements, it's a TypeError if(++i >= len) { throw new TypeError(); } } } else { // If initialValue provided, use it reduced = args[1]; } // Do the actual reduce for(;i < len; ++i) { // Skip holes if(i in arr) reduced = reduceFunc(reduced, arr[i], i, arr); } return reduced; }; return when; }); })(typeof define == 'function' ? define : function (factory) { typeof module != 'undefined' ? (module.exports = factory()) : (this.when = factory()); } // Boilerplate for AMD, Node, and browser global ); /** * @license * Lo-Dash 1.2.0 <http://lodash.com/> * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.4.4 <http://underscorejs.org/> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud Inc. * Available under MIT license <http://lodash.com/license> */ ;(function(window) { /** Used as a safe reference for `undefined` in pre ES5 environments */ var undefined; /** Detect free variable `exports` */ var freeExports = typeof exports == 'object' && exports; /** Detect free variable `module` */ var freeModule = typeof module == 'object' && module && module.exports == freeExports && module; /** Detect free variable `global`, from Node.js or Browserified code, and use it as `window` */ var freeGlobal = typeof global == 'object' && global; if (freeGlobal.global === freeGlobal || freeGlobal.window === freeGlobal) { window = freeGlobal; } /** Used to generate unique IDs */ var idCounter = 0; /** Used internally to indicate various things */ var indicatorObject = {}; /** Used to prefix keys to avoid issues with `__proto__` and properties on `Object.prototype` */ var keyPrefix = +new Date + ''; /** Used as the size when optimizations are enabled for large arrays */ var largeArraySize = 200; /** Used to match empty string literals in compiled template source */ var reEmptyStringLeading = /\b__p \+= '';/g, reEmptyStringMiddle = /\b(__p \+=) '' \+/g, reEmptyStringTrailing = /(__e\(.*?\)|\b__t\)) \+\n'';/g; /** Used to match HTML entities */ var reEscapedHtml = /&(?:amp|lt|gt|quot|#39);/g; /** * Used to match ES6 template delimiters * http://people.mozilla.org/~jorendorff/es6-draft.html#sec-7.8.6 */ var reEsTemplate = /\$\{([^\\}]*(?:\\.[^\\}]*)*)\}/g; /** Used to match regexp flags from their coerced string values */ var reFlags = /\w*$/; /** Used to match "interpolate" template delimiters */ var reInterpolate = /<%=([\s\S]+?)%>/g; /** Used to match leading zeros to be removed */ var reLeadingZeros = /^0+(?=.$)/; /** Used to ensure capturing order of template delimiters */ var reNoMatch = /($^)/; /** Used to match HTML characters */ var reUnescapedHtml = /[&<>"']/g; /** Used to match unescaped characters in compiled string literals */ var reUnescapedString = /['\n\r\t\u2028\u2029\\]/g; /** Used to assign default `context` object properties */ var contextProps = [ 'Array', 'Boolean', 'Date', 'Function', 'Math', 'Number', 'Object', 'RegExp', 'String', '_', 'attachEvent', 'clearTimeout', 'isFinite', 'isNaN', 'parseInt', 'setImmediate', 'setTimeout' ]; /** Used to fix the JScript [[DontEnum]] bug */ var shadowedProps = [ 'constructor', 'hasOwnProperty', 'isPrototypeOf', 'propertyIsEnumerable', 'toLocaleString', 'toString', 'valueOf' ]; /** Used to make template sourceURLs easier to identify */ var templateCounter = 0; /** `Object#toString` result shortcuts */ var argsClass = '[object Arguments]', arrayClass = '[object Array]', boolClass = '[object Boolean]', dateClass = '[object Date]', funcClass = '[object Function]', numberClass = '[object Number]', objectClass = '[object Object]', regexpClass = '[object RegExp]', stringClass = '[object String]'; /** Used to identify object classifications that `_.clone` supports */ var cloneableClasses = {}; cloneableClasses[funcClass] = false; cloneableClasses[argsClass] = cloneableClasses[arrayClass] = cloneableClasses[boolClass] = cloneableClasses[dateClass] = cloneableClasses[numberClass] = cloneableClasses[objectClass] = cloneableClasses[regexpClass] = cloneableClasses[stringClass] = true; /** Used to determine if values are of the language type Object */ var objectTypes = { 'boolean': false, 'function': true, 'object': true, 'number': false, 'string': false, 'undefined': false }; /** Used to escape characters for inclusion in compiled string literals */ var stringEscapes = { '\\': '\\', "'": "'", '\n': 'n', '\r': 'r', '\t': 't', '\u2028': 'u2028', '\u2029': 'u2029' }; /*--------------------------------------------------------------------------*/ /** * Create a new `lodash` function using the given `context` object. * * @static * @memberOf _ * @category Utilities * @param {Object} [context=window] The context object. * @returns {Function} Returns the `lodash` function. */ function runInContext(context) { // Avoid issues with some ES3 environments that attempt to use values, named // after built-in constructors like `Object`, for the creation of literals. // ES5 clears this up by stating that literals must use built-in constructors. // See http://es5.github.com/#x11.1.5. context = context ? _.defaults(window.Object(), context, _.pick(window, contextProps)) : window; /** Native constructor references */ var Array = context.Array, Boolean = context.Boolean, Date = context.Date, Function = context.Function, Math = context.Math, Number = context.Number, Object = context.Object, RegExp = context.RegExp, String = context.String, TypeError = context.TypeError; /** Used for `Array` and `Object` method references */ var arrayRef = Array(), objectRef = Object(); /** Used to restore the original `_` reference in `noConflict` */ var oldDash = context._; /** Used to detect if a method is native */ var reNative = RegExp('^' + String(objectRef.valueOf) .replace(/[.*+?^${}()|[\]\\]/g, '\\$&') .replace(/valueOf|for [^\]]+/g, '.+?') + '$' ); /** Native method shortcuts */ var ceil = Math.ceil, clearTimeout = context.clearTimeout, concat = arrayRef.concat, floor = Math.floor, getPrototypeOf = reNative.test(getPrototypeOf = Object.getPrototypeOf) && getPrototypeOf, hasOwnProperty = objectRef.hasOwnProperty, push = arrayRef.push, setImmediate = context.setImmediate, setTimeout = context.setTimeout, toString = objectRef.toString; /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeBind = reNative.test(nativeBind = toString.bind) && nativeBind, nativeIsArray = reNative.test(nativeIsArray = Array.isArray) && nativeIsArray, nativeIsFinite = context.isFinite, nativeIsNaN = context.isNaN, nativeKeys = reNative.test(nativeKeys = Object.keys) && nativeKeys, nativeMax = Math.max, nativeMin = Math.min, nativeParseInt = context.parseInt, nativeRandom = Math.random, nativeSlice = arrayRef.slice; /** Detect various environments */ var isIeOpera = reNative.test(context.attachEvent), isV8 = nativeBind && !/\n|true/.test(nativeBind + isIeOpera); /** Used to lookup a built-in constructor by [[Class]] */ var ctorByClass = {}; ctorByClass[arrayClass] = Array; ctorByClass[boolClass] = Boolean; ctorByClass[dateClass] = Date; ctorByClass[objectClass] = Object; ctorByClass[numberClass] = Number; ctorByClass[regexpClass] = RegExp; ctorByClass[stringClass] = String; /*--------------------------------------------------------------------------*/ /** * Creates a `lodash` object, which wraps the given `value`, to enable method * chaining. * * In addition to Lo-Dash methods, wrappers also have the following `Array` methods: * `concat`, `join`, `pop`, `push`, `reverse`, `shift`, `slice`, `sort`, `splice`, * and `unshift` * * Chaining is supported in custom builds as long as the `value` method is * implicitly or explicitly included in the build. * * The chainable wrapper functions are: * `after`, `assign`, `bind`, `bindAll`, `bindKey`, `chain`, `compact`, * `compose`, `concat`, `countBy`, `createCallback`, `debounce`, `defaults`, * `defer`, `delay`, `difference`, `filter`, `flatten`, `forEach`, `forIn`, * `forOwn`, `functions`, `groupBy`, `initial`, `intersection`, `invert`, * `invoke`, `keys`, `map`, `max`, `memoize`, `merge`, `min`, `object`, `omit`, * `once`, `pairs`, `partial`, `partialRight`, `pick`, `pluck`, `push`, `range`, * `reject`, `rest`, `reverse`, `shuffle`, `slice`, `sort`, `sortBy`, `splice`, * `tap`, `throttle`, `times`, `toArray`, `union`, `uniq`, `unshift`, `unzip`, * `values`, `where`, `without`, `wrap`, and `zip` * * The non-chainable wrapper functions are: * `clone`, `cloneDeep`, `contains`, `escape`, `every`, `find`, `has`, * `identity`, `indexOf`, `isArguments`, `isArray`, `isBoolean`, `isDate`, * `isElement`, `isEmpty`, `isEqual`, `isFinite`, `isFunction`, `isNaN`, * `isNull`, `isNumber`, `isObject`, `isPlainObject`, `isRegExp`, `isString`, * `isUndefined`, `join`, `lastIndexOf`, `mixin`, `noConflict`, `parseInt`, * `pop`, `random`, `reduce`, `reduceRight`, `result`, `shift`, `size`, `some`, * `sortedIndex`, `runInContext`, `template`, `unescape`, `uniqueId`, and `value` * * The wrapper functions `first` and `last` return wrapped values when `n` is * passed, otherwise they return unwrapped values. * * @name _ * @constructor * @category Chaining * @param {Mixed} value The value to wrap in a `lodash` instance. * @returns {Object} Returns a `lodash` instance. * @example * * var wrapped = _([1, 2, 3]); * * // returns an unwrapped value * wrapped.reduce(function(sum, num) { * return sum + num; * }); * // => 6 * * // returns a wrapped value * var squares = wrapped.map(function(num) { * return num * num; * }); * * _.isArray(squares); * // => false * * _.isArray(squares.value()); * // => true */ function lodash(value) { // don't wrap if already wrapped, even if wrapped by a different `lodash` constructor return (value && typeof value == 'object' && !isArray(value) && hasOwnProperty.call(value, '__wrapped__')) ? value : new lodashWrapper(value); } /** * An object used to flag environments features. * * @static * @memberOf _ * @type Object */ var support = lodash.support = {}; (function() { var ctor = function() { this.x = 1; }, object = { '0': 1, 'length': 1 }, props = []; ctor.prototype = { 'valueOf': 1, 'y': 1 }; for (var prop in new ctor) { props.push(prop); } for (prop in arguments) { } /** * Detect if `arguments` objects are `Object` objects (all but Narwhal and Opera < 10.5). * * @memberOf _.support * @type Boolean */ support.argsObject = arguments.constructor == Object && !(arguments instanceof Array); /** * Detect if an `arguments` object's [[Class]] is resolvable (all but Firefox < 4, IE < 9). * * @memberOf _.support * @type Boolean */ support.argsClass = isArguments(arguments); /** * Detect if `prototype` properties are enumerable by default. * * Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1 * (if the prototype or a property on the prototype has been set) * incorrectly sets a function's `prototype` property [[Enumerable]] * value to `true`. * * @memberOf _.support * @type Boolean */ support.enumPrototypes = ctor.propertyIsEnumerable('prototype'); /** * Detect if `Function#bind` exists and is inferred to be fast (all but V8). * * @memberOf _.support * @type Boolean */ support.fastBind = nativeBind && !isV8; /** * Detect if own properties are iterated after inherited properties (all but IE < 9). * * @memberOf _.support * @type Boolean */ support.ownLast = props[0] != 'x'; /** * Detect if `arguments` object indexes are non-enumerable * (Firefox < 4, IE < 9, PhantomJS, Safari < 5.1). * * @memberOf _.support * @type Boolean */ support.nonEnumArgs = prop != 0; /** * Detect if properties shadowing those on `Object.prototype` are non-enumerable. * * In IE < 9 an objects own properties, shadowing non-enumerable ones, are * made non-enumerable as well (a.k.a the JScript [[DontEnum]] bug). * * @memberOf _.support * @type Boolean */ support.nonEnumShadows = !/valueOf/.test(props); /** * Detect if `Array#shift` and `Array#splice` augment array-like objects correctly. * * Firefox < 10, IE compatibility mode, and IE < 9 have buggy Array `shift()` * and `splice()` functions that fail to remove the last element, `value[0]`, * of array-like objects even though the `length` property is set to `0`. * The `shift()` method is buggy in IE 8 compatibility mode, while `splice()` * is buggy regardless of mode in IE < 9 and buggy in compatibility mode in IE 9. * * @memberOf _.support * @type Boolean */ support.spliceObjects = (arrayRef.splice.call(object, 0, 1), !object[0]); /** * Detect lack of support for accessing string characters by index. * * IE < 8 can't access characters by index and IE 8 can only access * characters by index on string literals. * * @memberOf _.support * @type Boolean */ support.unindexedChars = ('x'[0] + Object('x')[0]) != 'xx'; /** * Detect if a DOM node's [[Class]] is resolvable (all but IE < 9) * and that the JS engine errors when attempting to coerce an object to * a string without a `toString` function. * * @memberOf _.support * @type Boolean */ try { support.nodeClass = !(toString.call(document) == objectClass && !({ 'toString': 0 } + '')); } catch(e) { support.nodeClass = true; } }(1)); /** * By default, the template delimiters used by Lo-Dash are similar to those in * embedded Ruby (ERB). Change the following template settings to use alternative * delimiters. * * @static * @memberOf _ * @type Object */ lodash.templateSettings = { /** * Used to detect `data` property values to be HTML-escaped. * * @memberOf _.templateSettings * @type RegExp */ 'escape': /<%-([\s\S]+?)%>/g, /** * Used to detect code to be evaluated. * * @memberOf _.templateSettings * @type RegExp */ 'evaluate': /<%([\s\S]+?)%>/g, /** * Used to detect `data` property values to inject. * * @memberOf _.templateSettings * @type RegExp */ 'interpolate': reInterpolate, /** * Used to reference the data object in the template text. * * @memberOf _.templateSettings * @type String */ 'variable': '', /** * Used to import variables into the compiled template. * * @memberOf _.templateSettings * @type Object */ 'imports': { /** * A reference to the `lodash` function. * * @memberOf _.templateSettings.imports * @type Function */ '_': lodash } }; /*--------------------------------------------------------------------------*/ /** * The template used to create iterator functions. * * @private * @param {Object} data The data object used to populate the text. * @returns {String} Returns the interpolated text. */ var iteratorTemplate = template( // the `iterable` may be reassigned by the `top` snippet 'var index, iterable = <%= firstArg %>, ' + // assign the `result` variable an initial value 'result = <%= init %>;\n' + // exit early if the first argument is falsey 'if (!iterable) return result;\n' + // add code before the iteration branches '<%= top %>;\n' + // array-like iteration: '<% if (arrays) { %>' + 'var length = iterable.length; index = -1;\n' + 'if (<%= arrays %>) {' + // add support for accessing string characters by index if needed ' <% if (support.unindexedChars) { %>\n' + ' if (isString(iterable)) {\n' + " iterable = iterable.split('')\n" + ' }' + ' <% } %>\n' + // iterate over the array-like value ' while (++index < length) {\n' + ' <%= loop %>\n' + ' }\n' + '}\n' + 'else {' + // object iteration: // add support for iterating over `arguments` objects if needed ' <% } else if (support.nonEnumArgs) { %>\n' + ' var length = iterable.length; index = -1;\n' + ' if (length && isArguments(iterable)) {\n' + ' while (++index < length) {\n' + " index += '';\n" + ' <%= loop %>\n' + ' }\n' + ' } else {' + ' <% } %>' + // avoid iterating over `prototype` properties in older Firefox, Opera, and Safari ' <% if (support.enumPrototypes) { %>\n' + " var skipProto = typeof iterable == 'function';\n" + ' <% } %>' + // iterate own properties using `Object.keys` if it's fast ' <% if (useHas && useKeys) { %>\n' + ' var ownIndex = -1,\n' + ' ownProps = objectTypes[typeof iterable] ? keys(iterable) : [],\n' + ' length = ownProps.length;\n\n' + ' while (++ownIndex < length) {\n' + ' index = ownProps[ownIndex];\n' + " <% if (support.enumPrototypes) { %>if (!(skipProto && index == 'prototype')) {\n <% } %>" + ' <%= loop %>\n' + ' <% if (support.enumPrototypes) { %>}\n<% } %>' + ' }' + // else using a for-in loop ' <% } else { %>\n' + ' for (index in iterable) {<%' + ' if (support.enumPrototypes || useHas) { %>\n if (<%' + " if (support.enumPrototypes) { %>!(skipProto && index == 'prototype')<% }" + ' if (support.enumPrototypes && useHas) { %> && <% }' + ' if (useHas) { %>hasOwnProperty.call(iterable, index)<% }' + ' %>) {' + ' <% } %>\n' + ' <%= loop %>;' + ' <% if (support.enumPrototypes || useHas) { %>\n }<% } %>\n' + ' }' + // Because IE < 9 can't set the `[[Enumerable]]` attribute of an // existing property and the `constructor` property of a prototype // defaults to non-enumerable, Lo-Dash skips the `constructor` // property when it infers it's iterating over a `prototype` object. ' <% if (support.nonEnumShadows) { %>\n\n' + ' var ctor = iterable.constructor;\n' + ' <% for (var k = 0; k < 7; k++) { %>\n' + " index = '<%= shadowedProps[k] %>';\n" + ' if (<%' + " if (shadowedProps[k] == 'constructor') {" + ' %>!(ctor && ctor.prototype === iterable) && <%' + ' } %>hasOwnProperty.call(iterable, index)) {\n' + ' <%= loop %>\n' + ' }' + ' <% } %>' + ' <% } %>' + ' <% } %>' + ' <% if (arrays || support.nonEnumArgs) { %>\n}<% } %>\n' + // add code to the bottom of the iteration function '<%= bottom %>;\n' + // finally, return the `result` 'return result' ); /** Reusable iterator options for `assign` and `defaults` */ var defaultsIteratorOptions = { 'args': 'object, source, guard', 'top': 'var args = arguments,\n' + ' argsIndex = 0,\n' + " argsLength = typeof guard == 'number' ? 2 : args.length;\n" + 'while (++argsIndex < argsLength) {\n' + ' iterable = args[argsIndex];\n' + ' if (iterable && objectTypes[typeof iterable]) {', 'loop': "if (typeof result[index] == 'undefined') result[index] = iterable[index]", 'bottom': ' }\n}' }; /** Reusable iterator options shared by `each`, `forIn`, and `forOwn` */ var eachIteratorOptions = { 'args': 'collection, callback, thisArg', 'top': "callback = callback && typeof thisArg == 'undefined' ? callback : lodash.createCallback(callback, thisArg)", 'arrays': "typeof length == 'number'", 'loop': 'if (callback(iterable[index], index, collection) === false) return result' }; /** Reusable iterator options for `forIn` and `forOwn` */ var forOwnIteratorOptions = { 'top': 'if (!objectTypes[typeof iterable]) return result;\n' + eachIteratorOptions.top, 'arrays': false }; /*--------------------------------------------------------------------------*/ /** * Creates a function optimized to search large arrays for a given `value`, * starting at `fromIndex`, using strict equality for comparisons, i.e. `===`. * * @private * @param {Array} array The array to search. * @param {Mixed} value The value to search for. * @returns {Boolean} Returns `true`, if `value` is found, else `false`. */ function cachedContains(array) { var length = array.length, isLarge = length >= largeArraySize; if (isLarge) { var cache = {}, index = -1; while (++index < length) { var key = keyPrefix + array[index]; (cache[key] || (cache[key] = [])).push(array[index]); } } return function(value) { if (isLarge) { var key = keyPrefix + value; return cache[key] && indexOf(cache[key], value) > -1; } return indexOf(array, value) > -1; } } /** * Used by `_.max` and `_.min` as the default `callback` when a given * `collection` is a string value. * * @private * @param {String} value The character to inspect. * @returns {Number} Returns the code unit of given character. */ function charAtCallback(value) { return value.charCodeAt(0); } /** * Used by `sortBy` to compare transformed `collection` values, stable sorting * them in ascending order. * * @private * @param {Object} a The object to compare to `b`. * @param {Object} b The object to compare to `a`. * @returns {Number} Returns the sort order indicator of `1` or `-1`. */ function compareAscending(a, b) { var ai = a.index, bi = b.index; a = a.criteria; b = b.criteria; // ensure a stable sort in V8 and other engines // http://code.google.com/p/v8/issues/detail?id=90 if (a !== b) { if (a > b || typeof a == 'undefined') { return 1; } if (a < b || typeof b == 'undefined') { return -1; } } return ai < bi ? -1 : 1; } /** * Creates a function that, when called, invokes `func` with the `this` binding * of `thisArg` and prepends any `partialArgs` to the arguments passed to the * bound function. * * @private * @param {Function|String} func The function to bind or the method name. * @param {Mixed} [thisArg] The `this` binding of `func`. * @param {Array} partialArgs An array of arguments to be partially applied. * @param {Object} [idicator] Used to indicate binding by key or partially * applying arguments from the right. * @returns {Function} Returns the new bound function. */ function createBound(func, thisArg, partialArgs, indicator) { var isFunc = isFunction(func), isPartial = !partialArgs, key = thisArg; // juggle arguments if (isPartial) { var rightIndicator = indicator; partialArgs = thisArg; } else if (!isFunc) { if (!indicator) { throw new TypeError; } thisArg = func; } function bound() { // `Function#bind` spec // http://es5.github.com/#x15.3.4.5 var args = arguments, thisBinding = isPartial ? this : thisArg; if (!isFunc) { func = thisArg[key]; } if (partialArgs.length) { args = args.length ? (args = nativeSlice.call(args), rightIndicator ? args.concat(partialArgs) : partialArgs.concat(args)) : partialArgs; } if (this instanceof bound) { // ensure `new bound` is an instance of `func` noop.prototype = func.prototype; thisBinding = new noop; noop.prototype = null; // mimic the constructor's `return` behavior // http://es5.github.com/#x13.2.2 var result = func.apply(thisBinding, args); return isObject(result) ? result : thisBinding; } return func.apply(thisBinding, args); } return bound; } /** * Creates compiled iteration functions. * * @private * @param {Object} [options1, options2, ...] The compile options object(s). * arrays - A string of code to determine if the iterable is an array or array-like. * useHas - A boolean to specify using `hasOwnProperty` checks in the object loop. * useKeys - A boolean to specify using `_.keys` for own property iteration. * args - A string of comma separated arguments the iteration function will accept. * top - A string of code to execute before the iteration branches. * loop - A string of code to execute in the object loop. * bottom - A string of code to execute after the iteration branches. * @returns {Function} Returns the compiled function. */ function createIterator() { var data = { // data properties 'shadowedProps': shadowedProps, 'support': support, // iterator options 'arrays': 'isArray(iterable)', 'bottom': '', 'init': 'iterable', 'loop': '', 'top': '', 'useHas': true, 'useKeys': !!keys }; // merge options into a template data object for (var object, index = 0; object = arguments[index]; index++) { for (var key in object) { data[key] = object[key]; } } var args = data.args; data.firstArg = /^[^,]+/.exec(args)[0]; // create the function factory var factory = Function( 'hasOwnProperty, isArguments, isArray, isString, keys, ' + 'lodash, objectTypes', 'return function(' + args + ') {\n' + iteratorTemplate(data) + '\n}' ); // return the compiled function return factory( hasOwnProperty, isArguments, isArray, isString, keys, lodash, objectTypes ); } /** * Used by `template` to escape characters for inclusion in compiled * string literals. * * @private * @param {String} match The matched character to escape. * @returns {String} Returns the escaped character. */ function escapeStringChar(match) { return '\\' + stringEscapes[match]; } /** * Used by `escape` to convert characters to HTML entities. * * @private * @param {String} match The matched character to escape. * @returns {String} Returns the escaped character. */ function escapeHtmlChar(match) { return htmlEscapes[match]; } /** * Checks if `value` is a DOM node in IE < 9. * * @private * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true` if the `value` is a DOM node, else `false`. */ function isNode(value) { // IE < 9 presents DOM nodes as `Object` objects except they have `toString` // methods that are `typeof` "string" and still can coerce nodes to strings return typeof value.toString != 'function' && typeof (value + '') == 'string'; } /** * A fast path for creating `lodash` wrapper objects. * * @private * @param {Mixed} value The value to wrap in a `lodash` instance. * @returns {Object} Returns a `lodash` instance. */ function lodashWrapper(value) { this.__wrapped__ = value; } // ensure `new lodashWrapper` is an instance of `lodash` lodashWrapper.prototype = lodash.prototype; /** * A no-operation function. * * @private */ function noop() { // no operation performed } /** * A fallback implementation of `isPlainObject` which checks if a given `value` * is an object created by the `Object` constructor, assuming objects created * by the `Object` constructor have no inherited enumerable properties and that * there are no `Object.prototype` extensions. * * @private * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if `value` is a plain object, else `false`. */ function shimIsPlainObject(value) { // avoid non-objects and false positives for `arguments` objects var result = false; if (!(value && toString.call(value) == objectClass) || (!support.argsClass && isArguments(value))) { return result; } // check that the constructor is `Object` (i.e. `Object instanceof Object`) var ctor = value.constructor; if (isFunction(ctor) ? ctor instanceof ctor : (support.nodeClass || !isNode(value))) { // IE < 9 iterates inherited properties before own properties. If the first // iterated property is an object's own property then there are no inherited // enumerable properties. if (support.ownLast) { forIn(value, function(value, key, object) { result = hasOwnProperty.call(object, key); return false; }); return result === true; } // In most environments an object's own properties are iterated before // its inherited properties. If the last iterated property is an object's // own property then there are no inherited enumerable properties. forIn(value, function(value, key) { result = key; }); return result === false || hasOwnProperty.call(value, result); } return result; } /** * Slices the `collection` from the `start` index up to, but not including, * the `end` index. * * Note: This function is used, instead of `Array#slice`, to support node lists * in IE < 9 and to ensure dense arrays are returned. * * @private * @param {Array|Object|String} collection The collection to slice. * @param {Number} start The start index. * @param {Number} end The end index. * @returns {Array} Returns the new array. */ function slice(array, start, end) { start || (start = 0); if (typeof end == 'undefined') { end = array ? array.length : 0; } var index = -1, length = end - start || 0, result = Array(length < 0 ? 0 : length); while (++index < length) { result[index] = array[start + index]; } return result; } /** * Used by `unescape` to convert HTML entities to characters. * * @private * @param {String} match The matched character to unescape. * @returns {String} Returns the unescaped character. */ function unescapeHtmlChar(match) { return htmlUnescapes[match]; } /*--------------------------------------------------------------------------*/ /** * Checks if `value` is an `arguments` object. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is an `arguments` object, else `false`. * @example * * (function() { return _.isArguments(arguments); })(1, 2, 3); * // => true * * _.isArguments([1, 2, 3]); * // => false */ function isArguments(value) { return toString.call(value) == argsClass; } // fallback for browsers that can't detect `arguments` objects by [[Class]] if (!support.argsClass) { isArguments = function(value) { return value ? hasOwnProperty.call(value, 'callee') : false; }; } /** * A fallback implementation of `Object.keys` which produces an array of the * given object's own enumerable property names. * * @private * @type Function * @param {Object} object The object to inspect. * @returns {Array} Returns a new array of property names. */ var shimKeys = createIterator({ 'args': 'object', 'init': '[]', 'top': 'if (!(objectTypes[typeof object])) return result', 'loop': 'result.push(index)', 'arrays': false }); /** * Creates an array composed of the own enumerable property names of `object`. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to inspect. * @returns {Array} Returns a new array of property names. * @example * * _.keys({ 'one': 1, 'two': 2, 'three': 3 }); * // => ['one', 'two', 'three'] (order is not guaranteed) */ var keys = !nativeKeys ? shimKeys : function(object) { if (!isObject(object)) { return []; } if ((support.enumPrototypes && typeof object == 'function') || (support.nonEnumArgs && object.length && isArguments(object))) { return shimKeys(object); } return nativeKeys(object); }; /** * A function compiled to iterate `arguments` objects, arrays, objects, and * strings consistenly across environments, executing the `callback` for each * element in the `collection`. The `callback` is bound to `thisArg` and invoked * with three arguments; (value, index|key, collection). Callbacks may exit * iteration early by explicitly returning `false`. * * @private * @type Function * @param {Array|Object|String} collection The collection to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Array|Object|String} Returns `collection`. */ var each = createIterator(eachIteratorOptions); /** * Used to convert characters to HTML entities: * * Though the `>` character is escaped for symmetry, characters like `>` and `/` * don't require escaping in HTML and have no special meaning unless they're part * of a tag or an unquoted attribute value. * http://mathiasbynens.be/notes/ambiguous-ampersands (under "semi-related fun fact") */ var htmlEscapes = { '&': '&amp;', '<': '&lt;', '>': '&gt;', '"': '&quot;', "'": '&#39;' }; /** Used to convert HTML entities to characters */ var htmlUnescapes = invert(htmlEscapes); /*--------------------------------------------------------------------------*/ /** * Assigns own enumerable properties of source object(s) to the destination * object. Subsequent sources will overwrite property assignments of previous * sources. If a `callback` function is passed, it will be executed to produce * the assigned values. The `callback` is bound to `thisArg` and invoked with * two arguments; (objectValue, sourceValue). * * @static * @memberOf _ * @type Function * @alias extend * @category Objects * @param {Object} object The destination object. * @param {Object} [source1, source2, ...] The source objects. * @param {Function} [callback] The function to customize assigning values. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns the destination object. * @example * * _.assign({ 'name': 'moe' }, { 'age': 40 }); * // => { 'name': 'moe', 'age': 40 } * * var defaults = _.partialRight(_.assign, function(a, b) { * return typeof a == 'undefined' ? b : a; * }); * * var food = { 'name': 'apple' }; * defaults(food, { 'name': 'banana', 'type': 'fruit' }); * // => { 'name': 'apple', 'type': 'fruit' } */ var assign = createIterator(defaultsIteratorOptions, { 'top': defaultsIteratorOptions.top.replace(';', ';\n' + "if (argsLength > 3 && typeof args[argsLength - 2] == 'function') {\n" + ' var callback = lodash.createCallback(args[--argsLength - 1], args[argsLength--], 2);\n' + "} else if (argsLength > 2 && typeof args[argsLength - 1] == 'function') {\n" + ' callback = args[--argsLength];\n' + '}' ), 'loop': 'result[index] = callback ? callback(result[index], iterable[index]) : iterable[index]' }); /** * Creates a clone of `value`. If `deep` is `true`, nested objects will also * be cloned, otherwise they will be assigned by reference. If a `callback` * function is passed, it will be executed to produce the cloned values. If * `callback` returns `undefined`, cloning will be handled by the method instead. * The `callback` is bound to `thisArg` and invoked with one argument; (value). * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to clone. * @param {Boolean} [deep=false] A flag to indicate a deep clone. * @param {Function} [callback] The function to customize cloning values. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @param- {Array} [stackA=[]] Tracks traversed source objects. * @param- {Array} [stackB=[]] Associates clones with source counterparts. * @returns {Mixed} Returns the cloned `value`. * @example * * var stooges = [ * { 'name': 'moe', 'age': 40 }, * { 'name': 'larry', 'age': 50 } * ]; * * var shallow = _.clone(stooges); * shallow[0] === stooges[0]; * // => true * * var deep = _.clone(stooges, true); * deep[0] === stooges[0]; * // => false * * _.mixin({ * 'clone': _.partialRight(_.clone, function(value) { * return _.isElement(value) ? value.cloneNode(false) : undefined; * }) * }); * * var clone = _.clone(document.body); * clone.childNodes.length; * // => 0 */ function clone(value, deep, callback, thisArg, stackA, stackB) { var result = value; // allows working with "Collections" methods without using their `callback` // argument, `index|key`, for this method's `callback` if (typeof deep == 'function') { thisArg = callback; callback = deep; deep = false; } if (typeof callback == 'function') { callback = (typeof thisArg == 'undefined') ? callback : lodash.createCallback(callback, thisArg, 1); result = callback(result); if (typeof result != 'undefined') { return result; } result = value; } // inspect [[Class]] var isObj = isObject(result); if (isObj) { var className = toString.call(result); if (!cloneableClasses[className] || (!support.nodeClass && isNode(result))) { return result; } var isArr = isArray(result); } // shallow clone if (!isObj || !deep) { return isObj ? (isArr ? slice(result) : assign({}, result)) : result; } var ctor = ctorByClass[className]; switch (className) { case boolClass: case dateClass: return new ctor(+result); case numberClass: case stringClass: return new ctor(result); case regexpClass: return ctor(result.source, reFlags.exec(result)); } // check for circular references and return corresponding clone stackA || (stackA = []); stackB || (stackB = []); var length = stackA.length; while (length--) { if (stackA[length] == value) { return stackB[length]; } } // init cloned object result = isArr ? ctor(result.length) : {}; // add array properties assigned by `RegExp#exec` if (isArr) { if (hasOwnProperty.call(value, 'index')) { result.index = value.index; } if (hasOwnProperty.call(value, 'input')) { result.input = value.input; } } // add the source value to the stack of traversed objects // and associate it with its clone stackA.push(value); stackB.push(result); // recursively populate clone (susceptible to call stack limits) (isArr ? forEach : forOwn)(value, function(objValue, key) { result[key] = clone(objValue, deep, callback, undefined, stackA, stackB); }); return result; } /** * Creates a deep clone of `value`. If a `callback` function is passed, * it will be executed to produce the cloned values. If `callback` returns * `undefined`, cloning will be handled by the method instead. The `callback` * is bound to `thisArg` and invoked with one argument; (value). * * Note: This function is loosely based on the structured clone algorithm. Functions * and DOM nodes are **not** cloned. The enumerable properties of `arguments` objects and * objects created by constructors other than `Object` are cloned to plain `Object` objects. * See http://www.w3.org/TR/html5/infrastructure.html#internal-structured-cloning-algorithm. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to deep clone. * @param {Function} [callback] The function to customize cloning values. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Mixed} Returns the deep cloned `value`. * @example * * var stooges = [ * { 'name': 'moe', 'age': 40 }, * { 'name': 'larry', 'age': 50 } * ]; * * var deep = _.cloneDeep(stooges); * deep[0] === stooges[0]; * // => false * * var view = { * 'label': 'docs', * 'node': element * }; * * var clone = _.cloneDeep(view, function(value) { * return _.isElement(value) ? value.cloneNode(true) : undefined; * }); * * clone.node == view.node; * // => false */ function cloneDeep(value, callback, thisArg) { return clone(value, true, callback, thisArg); } /** * Assigns own enumerable properties of source object(s) to the destination * object for all destination properties that resolve to `undefined`. Once a * property is set, additional defaults of the same property will be ignored. * * @static * @memberOf _ * @type Function * @category Objects * @param {Object} object The destination object. * @param {Object} [source1, source2, ...] The source objects. * @param- {Object} [guard] Allows working with `_.reduce` without using its * callback's `key` and `object` arguments as sources. * @returns {Object} Returns the destination object. * @example * * var food = { 'name': 'apple' }; * _.defaults(food, { 'name': 'banana', 'type': 'fruit' }); * // => { 'name': 'apple', 'type': 'fruit' } */ var defaults = createIterator(defaultsIteratorOptions); /** * This method is similar to `_.find`, except that it returns the key of the * element that passes the callback check, instead of the element itself. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to search. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Mixed} Returns the key of the found element, else `undefined`. * @example * * _.findKey({ 'a': 1, 'b': 2, 'c': 3, 'd': 4 }, function(num) { * return num % 2 == 0; * }); * // => 'b' */ function findKey(object, callback, thisArg) { var result; callback = lodash.createCallback(callback, thisArg); forOwn(object, function(value, key, object) { if (callback(value, key, object)) { result = key; return false; } }); return result; } /** * Iterates over `object`'s own and inherited enumerable properties, executing * the `callback` for each property. The `callback` is bound to `thisArg` and * invoked with three arguments; (value, key, object). Callbacks may exit iteration * early by explicitly returning `false`. * * @static * @memberOf _ * @type Function * @category Objects * @param {Object} object The object to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns `object`. * @example * * function Dog(name) { * this.name = name; * } * * Dog.prototype.bark = function() { * alert('Woof, woof!'); * }; * * _.forIn(new Dog('Dagny'), function(value, key) { * alert(key); * }); * // => alerts 'name' and 'bark' (order is not guaranteed) */ var forIn = createIterator(eachIteratorOptions, forOwnIteratorOptions, { 'useHas': false }); /** * Iterates over an object's own enumerable properties, executing the `callback` * for each property. The `callback` is bound to `thisArg` and invoked with three * arguments; (value, key, object). Callbacks may exit iteration early by explicitly * returning `false`. * * @static * @memberOf _ * @type Function * @category Objects * @param {Object} object The object to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns `object`. * @example * * _.forOwn({ '0': 'zero', '1': 'one', 'length': 2 }, function(num, key) { * alert(key); * }); * // => alerts '0', '1', and 'length' (order is not guaranteed) */ var forOwn = createIterator(eachIteratorOptions, forOwnIteratorOptions); /** * Creates a sorted array of all enumerable properties, own and inherited, * of `object` that have function values. * * @static * @memberOf _ * @alias methods * @category Objects * @param {Object} object The object to inspect. * @returns {Array} Returns a new array of property names that have function values. * @example * * _.functions(_); * // => ['all', 'any', 'bind', 'bindAll', 'clone', 'compact', 'compose', ...] */ function functions(object) { var result = []; forIn(object, function(value, key) { if (isFunction(value)) { result.push(key); } }); return result.sort(); } /** * Checks if the specified object `property` exists and is a direct property, * instead of an inherited property. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to check. * @param {String} property The property to check for. * @returns {Boolean} Returns `true` if key is a direct property, else `false`. * @example * * _.has({ 'a': 1, 'b': 2, 'c': 3 }, 'b'); * // => true */ function has(object, property) { return object ? hasOwnProperty.call(object, property) : false; } /** * Creates an object composed of the inverted keys and values of the given `object`. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to invert. * @returns {Object} Returns the created inverted object. * @example * * _.invert({ 'first': 'moe', 'second': 'larry' }); * // => { 'moe': 'first', 'larry': 'second' } */ function invert(object) { var index = -1, props = keys(object), length = props.length, result = {}; while (++index < length) { var key = props[index]; result[object[key]] = key; } return result; } /** * Checks if `value` is an array. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is an array, else `false`. * @example * * (function() { return _.isArray(arguments); })(); * // => false * * _.isArray([1, 2, 3]); * // => true */ function isArray(value) { // `instanceof` may cause a memory leak in IE 7 if `value` is a host object // http://ajaxian.com/archives/working-aroung-the-instanceof-memory-leak return (support.argsObject && value instanceof Array) || (nativeIsArray ? nativeIsArray(value) : toString.call(value) == arrayClass); } /** * Checks if `value` is a boolean value. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is a boolean value, else `false`. * @example * * _.isBoolean(null); * // => false */ function isBoolean(value) { return value === true || value === false || toString.call(value) == boolClass; } /** * Checks if `value` is a date. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is a date, else `false`. * @example * * _.isDate(new Date); * // => true */ function isDate(value) { return value instanceof Date || toString.call(value) == dateClass; } /** * Checks if `value` is a DOM element. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is a DOM element, else `false`. * @example * * _.isElement(document.body); * // => true */ function isElement(value) { return value ? value.nodeType === 1 : false; } /** * Checks if `value` is empty. Arrays, strings, or `arguments` objects with a * length of `0` and objects with no own enumerable properties are considered * "empty". * * @static * @memberOf _ * @category Objects * @param {Array|Object|String} value The value to inspect. * @returns {Boolean} Returns `true`, if the `value` is empty, else `false`. * @example * * _.isEmpty([1, 2, 3]); * // => false * * _.isEmpty({}); * // => true * * _.isEmpty(''); * // => true */ function isEmpty(value) { var result = true; if (!value) { return result; } var className = toString.call(value), length = value.length; if ((className == arrayClass || className == stringClass || (support.argsClass ? className == argsClass : isArguments(value))) || (className == objectClass && typeof length == 'number' && isFunction(value.splice))) { return !length; } forOwn(value, function() { return (result = false); }); return result; } /** * Performs a deep comparison between two values to determine if they are * equivalent to each other. If `callback` is passed, it will be executed to * compare values. If `callback` returns `undefined`, comparisons will be handled * by the method instead. The `callback` is bound to `thisArg` and invoked with * two arguments; (a, b). * * @static * @memberOf _ * @category Objects * @param {Mixed} a The value to compare. * @param {Mixed} b The other value to compare. * @param {Function} [callback] The function to customize comparing values. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @param- {Array} [stackA=[]] Tracks traversed `a` objects. * @param- {Array} [stackB=[]] Tracks traversed `b` objects. * @returns {Boolean} Returns `true`, if the values are equivalent, else `false`. * @example * * var moe = { 'name': 'moe', 'age': 40 }; * var copy = { 'name': 'moe', 'age': 40 }; * * moe == copy; * // => false * * _.isEqual(moe, copy); * // => true * * var words = ['hello', 'goodbye']; * var otherWords = ['hi', 'goodbye']; * * _.isEqual(words, otherWords, function(a, b) { * var reGreet = /^(?:hello|hi)$/i, * aGreet = _.isString(a) && reGreet.test(a), * bGreet = _.isString(b) && reGreet.test(b); * * return (aGreet || bGreet) ? (aGreet == bGreet) : undefined; * }); * // => true */ function isEqual(a, b, callback, thisArg, stackA, stackB) { // used to indicate that when comparing objects, `a` has at least the properties of `b` var whereIndicator = callback === indicatorObject; if (typeof callback == 'function' && !whereIndicator) { callback = lodash.createCallback(callback, thisArg, 2); var result = callback(a, b); if (typeof result != 'undefined') { return !!result; } } // exit early for identical values if (a === b) { // treat `+0` vs. `-0` as not equal return a !== 0 || (1 / a == 1 / b); } var type = typeof a, otherType = typeof b; // exit early for unlike primitive values if (a === a && (!a || (type != 'function' && type != 'object')) && (!b || (otherType != 'function' && otherType != 'object'))) { return false; } // exit early for `null` and `undefined`, avoiding ES3's Function#call behavior // http://es5.github.com/#x15.3.4.4 if (a == null || b == null) { return a === b; } // compare [[Class]] names var className = toString.call(a), otherClass = toString.call(b); if (className == argsClass) { className = objectClass; } if (otherClass == argsClass) { otherClass = objectClass; } if (className != otherClass) { return false; } switch (className) { case boolClass: case dateClass: // coerce dates and booleans to numbers, dates to milliseconds and booleans // to `1` or `0`, treating invalid dates coerced to `NaN` as not equal return +a == +b; case numberClass: // treat `NaN` vs. `NaN` as equal return (a != +a) ? b != +b // but treat `+0` vs. `-0` as not equal : (a == 0 ? (1 / a == 1 / b) : a == +b); case regexpClass: case stringClass: // coerce regexes to strings (http://es5.github.com/#x15.10.6.4) // treat string primitives and their corresponding object instances as equal return a == String(b); } var isArr = className == arrayClass; if (!isArr) { // unwrap any `lodash` wrapped values if (hasOwnProperty.call(a, '__wrapped__ ') || hasOwnProperty.call(b, '__wrapped__')) { return isEqual(a.__wrapped__ || a, b.__wrapped__ || b, callback, thisArg, stackA, stackB); } // exit for functions and DOM nodes if (className != objectClass || (!support.nodeClass && (isNode(a) || isNode(b)))) { return false; } // in older versions of Opera, `arguments` objects have `Array` constructors var ctorA = !support.argsObject && isArguments(a) ? Object : a.constructor, ctorB = !support.argsObject && isArguments(b) ? Object : b.constructor; // non `Object` object instances with different constructors are not equal if (ctorA != ctorB && !( isFunction(ctorA) && ctorA instanceof ctorA && isFunction(ctorB) && ctorB instanceof ctorB )) { return false; } } // assume cyclic structures are equal // the algorithm for detecting cyclic structures is adapted from ES 5.1 // section 15.12.3, abstract operation `JO` (http://es5.github.com/#x15.12.3) stackA || (stackA = []); stackB || (stackB = []); var length = stackA.length; while (length--) { if (stackA[length] == a) { return stackB[length] == b; } } var size = 0; result = true; // add `a` and `b` to the stack of traversed objects stackA.push(a); stackB.push(b); // recursively compare objects and arrays (susceptible to call stack limits) if (isArr) { length = a.length; size = b.length; // compare lengths to determine if a deep comparison is necessary result = size == a.length; if (!result && !whereIndicator) { return result; } // deep compare the contents, ignoring non-numeric properties while (size--) { var index = length, value = b[size]; if (whereIndicator) { while (index--) { if ((result = isEqual(a[index], value, callback, thisArg, stackA, stackB))) { break; } } } else if (!(result = isEqual(a[size], value, callback, thisArg, stackA, stackB))) { break; } } return result; } // deep compare objects using `forIn`, instead of `forOwn`, to avoid `Object.keys` // which, in this case, is more costly forIn(b, function(value, key, b) { if (hasOwnProperty.call(b, key)) { // count the number of properties. size++; // deep compare each property value. return (result = hasOwnProperty.call(a, key) && isEqual(a[key], value, callback, thisArg, stackA, stackB)); } }); if (result && !whereIndicator) { // ensure both objects have the same number of properties forIn(a, function(value, key, a) { if (hasOwnProperty.call(a, key)) { // `size` will be `-1` if `a` has more properties than `b` return (result = --size > -1); } }); } return result; } /** * Checks if `value` is, or can be coerced to, a finite number. * * Note: This is not the same as native `isFinite`, which will return true for * booleans and empty strings. See http://es5.github.com/#x15.1.2.5. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is finite, else `false`. * @example * * _.isFinite(-101); * // => true * * _.isFinite('10'); * // => true * * _.isFinite(true); * // => false * * _.isFinite(''); * // => false * * _.isFinite(Infinity); * // => false */ function isFinite(value) { return nativeIsFinite(value) && !nativeIsNaN(parseFloat(value)); } /** * Checks if `value` is a function. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is a function, else `false`. * @example * * _.isFunction(_); * // => true */ function isFunction(value) { return typeof value == 'function'; } // fallback for older versions of Chrome and Safari if (isFunction(/x/)) { isFunction = function(value) { return value instanceof Function || toString.call(value) == funcClass; }; } /** * Checks if `value` is the language type of Object. * (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is an object, else `false`. * @example * * _.isObject({}); * // => true * * _.isObject([1, 2, 3]); * // => true * * _.isObject(1); * // => false */ function isObject(value) { // check if the value is the ECMAScript language type of Object // http://es5.github.com/#x8 // and avoid a V8 bug // http://code.google.com/p/v8/issues/detail?id=2291 return value ? objectTypes[typeof value] : false; } /** * Checks if `value` is `NaN`. * * Note: This is not the same as native `isNaN`, which will return `true` for * `undefined` and other values. See http://es5.github.com/#x15.1.2.4. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is `NaN`, else `false`. * @example * * _.isNaN(NaN); * // => true * * _.isNaN(new Number(NaN)); * // => true * * isNaN(undefined); * // => true * * _.isNaN(undefined); * // => false */ function isNaN(value) { // `NaN` as a primitive is the only value that is not equal to itself // (perform the [[Class]] check first to avoid errors with some host objects in IE) return isNumber(value) && value != +value } /** * Checks if `value` is `null`. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is `null`, else `false`. * @example * * _.isNull(null); * // => true * * _.isNull(undefined); * // => false */ function isNull(value) { return value === null; } /** * Checks if `value` is a number. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is a number, else `false`. * @example * * _.isNumber(8.4 * 5); * // => true */ function isNumber(value) { return typeof value == 'number' || toString.call(value) == numberClass; } /** * Checks if a given `value` is an object created by the `Object` constructor. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if `value` is a plain object, else `false`. * @example * * function Stooge(name, age) { * this.name = name; * this.age = age; * } * * _.isPlainObject(new Stooge('moe', 40)); * // => false * * _.isPlainObject([1, 2, 3]); * // => false * * _.isPlainObject({ 'name': 'moe', 'age': 40 }); * // => true */ var isPlainObject = !getPrototypeOf ? shimIsPlainObject : function(value) { if (!(value && toString.call(value) == objectClass) || (!support.argsClass && isArguments(value))) { return false; } var valueOf = value.valueOf, objProto = typeof valueOf == 'function' && (objProto = getPrototypeOf(valueOf)) && getPrototypeOf(objProto); return objProto ? (value == objProto || getPrototypeOf(value) == objProto) : shimIsPlainObject(value); }; /** * Checks if `value` is a regular expression. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is a regular expression, else `false`. * @example * * _.isRegExp(/moe/); * // => true */ function isRegExp(value) { return value instanceof RegExp || toString.call(value) == regexpClass; } /** * Checks if `value` is a string. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is a string, else `false`. * @example * * _.isString('moe'); * // => true */ function isString(value) { return typeof value == 'string' || toString.call(value) == stringClass; } /** * Checks if `value` is `undefined`. * * @static * @memberOf _ * @category Objects * @param {Mixed} value The value to check. * @returns {Boolean} Returns `true`, if the `value` is `undefined`, else `false`. * @example * * _.isUndefined(void 0); * // => true */ function isUndefined(value) { return typeof value == 'undefined'; } /** * Recursively merges own enumerable properties of the source object(s), that * don't resolve to `undefined`, into the destination object. Subsequent sources * will overwrite property assignments of previous sources. If a `callback` function * is passed, it will be executed to produce the merged values of the destination * and source properties. If `callback` returns `undefined`, merging will be * handled by the method instead. The `callback` is bound to `thisArg` and * invoked with two arguments; (objectValue, sourceValue). * * @static * @memberOf _ * @category Objects * @param {Object} object The destination object. * @param {Object} [source1, source2, ...] The source objects. * @param {Function} [callback] The function to customize merging properties. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @param- {Object} [deepIndicator] Indicates that `stackA` and `stackB` are * arrays of traversed objects, instead of source objects. * @param- {Array} [stackA=[]] Tracks traversed source objects. * @param- {Array} [stackB=[]] Associates values with source counterparts. * @returns {Object} Returns the destination object. * @example * * var names = { * 'stooges': [ * { 'name': 'moe' }, * { 'name': 'larry' } * ] * }; * * var ages = { * 'stooges': [ * { 'age': 40 }, * { 'age': 50 } * ] * }; * * _.merge(names, ages); * // => { 'stooges': [{ 'name': 'moe', 'age': 40 }, { 'name': 'larry', 'age': 50 }] } * * var food = { * 'fruits': ['apple'], * 'vegetables': ['beet'] * }; * * var otherFood = { * 'fruits': ['banana'], * 'vegetables': ['carrot'] * }; * * _.merge(food, otherFood, function(a, b) { * return _.isArray(a) ? a.concat(b) : undefined; * }); * // => { 'fruits': ['apple', 'banana'], 'vegetables': ['beet', 'carrot] } */ function merge(object, source, deepIndicator) { var args = arguments, index = 0, length = 2; if (!isObject(object)) { return object; } if (deepIndicator === indicatorObject) { var callback = args[3], stackA = args[4], stackB = args[5]; } else { stackA = []; stackB = []; // allows working with `_.reduce` and `_.reduceRight` without // using their `callback` arguments, `index|key` and `collection` if (typeof deepIndicator != 'number') { length = args.length; } if (length > 3 && typeof args[length - 2] == 'function') { callback = lodash.createCallback(args[--length - 1], args[length--], 2); } else if (length > 2 && typeof args[length - 1] == 'function') { callback = args[--length]; } } while (++index < length) { (isArray(args[index]) ? forEach : forOwn)(args[index], function(source, key) { var found, isArr, result = source, value = object[key]; if (source && ((isArr = isArray(source)) || isPlainObject(source))) { // avoid merging previously merged cyclic sources var stackLength = stackA.length; while (stackLength--) { if ((found = stackA[stackLength] == source)) { value = stackB[stackLength]; break; } } if (!found) { var isShallow; if (callback) { result = callback(value, source); if ((isShallow = typeof result != 'undefined')) { value = result; } } if (!isShallow) { value = isArr ? (isArray(value) ? value : []) : (isPlainObject(value) ? value : {}); } // add `source` and associated `value` to the stack of traversed objects stackA.push(source); stackB.push(value); // recursively merge objects and arrays (susceptible to call stack limits) if (!isShallow) { value = merge(value, source, indicatorObject, callback, stackA, stackB); } } } else { if (callback) { result = callback(value, source); if (typeof result == 'undefined') { result = source; } } if (typeof result != 'undefined') { value = result; } } object[key] = value; }); } return object; } /** * Creates a shallow clone of `object` excluding the specified properties. * Property names may be specified as individual arguments or as arrays of * property names. If a `callback` function is passed, it will be executed * for each property in the `object`, omitting the properties `callback` * returns truthy for. The `callback` is bound to `thisArg` and invoked * with three arguments; (value, key, object). * * @static * @memberOf _ * @category Objects * @param {Object} object The source object. * @param {Function|String} callback|[prop1, prop2, ...] The properties to omit * or the function called per iteration. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns an object without the omitted properties. * @example * * _.omit({ 'name': 'moe', 'age': 40 }, 'age'); * // => { 'name': 'moe' } * * _.omit({ 'name': 'moe', 'age': 40 }, function(value) { * return typeof value == 'number'; * }); * // => { 'name': 'moe' } */ function omit(object, callback, thisArg) { var isFunc = typeof callback == 'function', result = {}; if (isFunc) { callback = lodash.createCallback(callback, thisArg); } else { var props = concat.apply(arrayRef, nativeSlice.call(arguments, 1)); } forIn(object, function(value, key, object) { if (isFunc ? !callback(value, key, object) : indexOf(props, key) < 0 ) { result[key] = value; } }); return result; } /** * Creates a two dimensional array of the given object's key-value pairs, * i.e. `[[key1, value1], [key2, value2]]`. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to inspect. * @returns {Array} Returns new array of key-value pairs. * @example * * _.pairs({ 'moe': 30, 'larry': 40 }); * // => [['moe', 30], ['larry', 40]] (order is not guaranteed) */ function pairs(object) { var index = -1, props = keys(object), length = props.length, result = Array(length); while (++index < length) { var key = props[index]; result[index] = [key, object[key]]; } return result; } /** * Creates a shallow clone of `object` composed of the specified properties. * Property names may be specified as individual arguments or as arrays of property * names. If `callback` is passed, it will be executed for each property in the * `object`, picking the properties `callback` returns truthy for. The `callback` * is bound to `thisArg` and invoked with three arguments; (value, key, object). * * @static * @memberOf _ * @category Objects * @param {Object} object The source object. * @param {Array|Function|String} callback|[prop1, prop2, ...] The function called * per iteration or properties to pick, either as individual arguments or arrays. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns an object composed of the picked properties. * @example * * _.pick({ 'name': 'moe', '_userid': 'moe1' }, 'name'); * // => { 'name': 'moe' } * * _.pick({ 'name': 'moe', '_userid': 'moe1' }, function(value, key) { * return key.charAt(0) != '_'; * }); * // => { 'name': 'moe' } */ function pick(object, callback, thisArg) { var result = {}; if (typeof callback != 'function') { var index = -1, props = concat.apply(arrayRef, nativeSlice.call(arguments, 1)), length = isObject(object) ? props.length : 0; while (++index < length) { var key = props[index]; if (key in object) { result[key] = object[key]; } } } else { callback = lodash.createCallback(callback, thisArg); forIn(object, function(value, key, object) { if (callback(value, key, object)) { result[key] = value; } }); } return result; } /** * Creates an array composed of the own enumerable property values of `object`. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to inspect. * @returns {Array} Returns a new array of property values. * @example * * _.values({ 'one': 1, 'two': 2, 'three': 3 }); * // => [1, 2, 3] (order is not guaranteed) */ function values(object) { var index = -1, props = keys(object), length = props.length, result = Array(length); while (++index < length) { result[index] = object[props[index]]; } return result; } /*--------------------------------------------------------------------------*/ /** * Creates an array of elements from the specified indexes, or keys, of the * `collection`. Indexes may be specified as individual arguments or as arrays * of indexes. * * @static * @memberOf _ * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Array|Number|String} [index1, index2, ...] The indexes of * `collection` to retrieve, either as individual arguments or arrays. * @returns {Array} Returns a new array of elements corresponding to the * provided indexes. * @example * * _.at(['a', 'b', 'c', 'd', 'e'], [0, 2, 4]); * // => ['a', 'c', 'e'] * * _.at(['moe', 'larry', 'curly'], 0, 2); * // => ['moe', 'curly'] */ function at(collection) { var index = -1, props = concat.apply(arrayRef, nativeSlice.call(arguments, 1)), length = props.length, result = Array(length); if (support.unindexedChars && isString(collection)) { collection = collection.split(''); } while(++index < length) { result[index] = collection[props[index]]; } return result; } /** * Checks if a given `target` element is present in a `collection` using strict * equality for comparisons, i.e. `===`. If `fromIndex` is negative, it is used * as the offset from the end of the collection. * * @static * @memberOf _ * @alias include * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Mixed} target The value to check for. * @param {Number} [fromIndex=0] The index to search from. * @returns {Boolean} Returns `true` if the `target` element is found, else `false`. * @example * * _.contains([1, 2, 3], 1); * // => true * * _.contains([1, 2, 3], 1, 2); * // => false * * _.contains({ 'name': 'moe', 'age': 40 }, 'moe'); * // => true * * _.contains('curly', 'ur'); * // => true */ function contains(collection, target, fromIndex) { var index = -1, length = collection ? collection.length : 0, result = false; fromIndex = (fromIndex < 0 ? nativeMax(0, length + fromIndex) : fromIndex) || 0; if (typeof length == 'number') { result = (isString(collection) ? collection.indexOf(target, fromIndex) : indexOf(collection, target, fromIndex) ) > -1; } else { each(collection, function(value) { if (++index >= fromIndex) { return !(result = value === target); } }); } return result; } /** * Creates an object composed of keys returned from running each element of the * `collection` through the given `callback`. The corresponding value of each key * is the number of times the key was returned by the `callback`. The `callback` * is bound to `thisArg` and invoked with three arguments; (value, index|key, collection). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns the composed aggregate object. * @example * * _.countBy([4.3, 6.1, 6.4], function(num) { return Math.floor(num); }); * // => { '4': 1, '6': 2 } * * _.countBy([4.3, 6.1, 6.4], function(num) { return this.floor(num); }, Math); * // => { '4': 1, '6': 2 } * * _.countBy(['one', 'two', 'three'], 'length'); * // => { '3': 2, '5': 1 } */ function countBy(collection, callback, thisArg) { var result = {}; callback = lodash.createCallback(callback, thisArg); forEach(collection, function(value, key, collection) { key = String(callback(value, key, collection)); (hasOwnProperty.call(result, key) ? result[key]++ : result[key] = 1); }); return result; } /** * Checks if the `callback` returns a truthy value for **all** elements of a * `collection`. The `callback` is bound to `thisArg` and invoked with three * arguments; (value, index|key, collection). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias all * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Boolean} Returns `true` if all elements pass the callback check, * else `false`. * @example * * _.every([true, 1, null, 'yes'], Boolean); * // => false * * var stooges = [ * { 'name': 'moe', 'age': 40 }, * { 'name': 'larry', 'age': 50 } * ]; * * // using "_.pluck" callback shorthand * _.every(stooges, 'age'); * // => true * * // using "_.where" callback shorthand * _.every(stooges, { 'age': 50 }); * // => false */ function every(collection, callback, thisArg) { var result = true; callback = lodash.createCallback(callback, thisArg); if (isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { if (!(result = !!callback(collection[index], index, collection))) { break; } } } else { each(collection, function(value, index, collection) { return (result = !!callback(value, index, collection)); }); } return result; } /** * Examines each element in a `collection`, returning an array of all elements * the `callback` returns truthy for. The `callback` is bound to `thisArg` and * invoked with three arguments; (value, index|key, collection). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias select * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new array of elements that passed the callback check. * @example * * var evens = _.filter([1, 2, 3, 4, 5, 6], function(num) { return num % 2 == 0; }); * // => [2, 4, 6] * * var food = [ * { 'name': 'apple', 'organic': false, 'type': 'fruit' }, * { 'name': 'carrot', 'organic': true, 'type': 'vegetable' } * ]; * * // using "_.pluck" callback shorthand * _.filter(food, 'organic'); * // => [{ 'name': 'carrot', 'organic': true, 'type': 'vegetable' }] * * // using "_.where" callback shorthand * _.filter(food, { 'type': 'fruit' }); * // => [{ 'name': 'apple', 'organic': false, 'type': 'fruit' }] */ function filter(collection, callback, thisArg) { var result = []; callback = lodash.createCallback(callback, thisArg); if (isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { var value = collection[index]; if (callback(value, index, collection)) { result.push(value); } } } else { each(collection, function(value, index, collection) { if (callback(value, index, collection)) { result.push(value); } }); } return result; } /** * Examines each element in a `collection`, returning the first that the `callback` * returns truthy for. The `callback` is bound to `thisArg` and invoked with three * arguments; (value, index|key, collection). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias detect * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Mixed} Returns the found element, else `undefined`. * @example * * _.find([1, 2, 3, 4], function(num) { * return num % 2 == 0; * }); * // => 2 * * var food = [ * { 'name': 'apple', 'organic': false, 'type': 'fruit' }, * { 'name': 'banana', 'organic': true, 'type': 'fruit' }, * { 'name': 'beet', 'organic': false, 'type': 'vegetable' } * ]; * * // using "_.where" callback shorthand * _.find(food, { 'type': 'vegetable' }); * // => { 'name': 'beet', 'organic': false, 'type': 'vegetable' } * * // using "_.pluck" callback shorthand * _.find(food, 'organic'); * // => { 'name': 'banana', 'organic': true, 'type': 'fruit' } */ function find(collection, callback, thisArg) { callback = lodash.createCallback(callback, thisArg); if (isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { var value = collection[index]; if (callback(value, index, collection)) { return value; } } } else { var result; each(collection, function(value, index, collection) { if (callback(value, index, collection)) { result = value; return false; } }); return result; } } /** * Iterates over a `collection`, executing the `callback` for each element in * the `collection`. The `callback` is bound to `thisArg` and invoked with three * arguments; (value, index|key, collection). Callbacks may exit iteration early * by explicitly returning `false`. * * @static * @memberOf _ * @alias each * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Array|Object|String} Returns `collection`. * @example * * _([1, 2, 3]).forEach(alert).join(','); * // => alerts each number and returns '1,2,3' * * _.forEach({ 'one': 1, 'two': 2, 'three': 3 }, alert); * // => alerts each number value (order is not guaranteed) */ function forEach(collection, callback, thisArg) { if (callback && typeof thisArg == 'undefined' && isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { if (callback(collection[index], index, collection) === false) { break; } } } else { each(collection, callback, thisArg); } return collection; } /** * Creates an object composed of keys returned from running each element of the * `collection` through the `callback`. The corresponding value of each key is * an array of elements passed to `callback` that returned the key. The `callback` * is bound to `thisArg` and invoked with three arguments; (value, index|key, collection). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false` * * @static * @memberOf _ * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns the composed aggregate object. * @example * * _.groupBy([4.2, 6.1, 6.4], function(num) { return Math.floor(num); }); * // => { '4': [4.2], '6': [6.1, 6.4] } * * _.groupBy([4.2, 6.1, 6.4], function(num) { return this.floor(num); }, Math); * // => { '4': [4.2], '6': [6.1, 6.4] } * * // using "_.pluck" callback shorthand * _.groupBy(['one', 'two', 'three'], 'length'); * // => { '3': ['one', 'two'], '5': ['three'] } */ function groupBy(collection, callback, thisArg) { var result = {}; callback = lodash.createCallback(callback, thisArg); forEach(collection, function(value, key, collection) { key = String(callback(value, key, collection)); (hasOwnProperty.call(result, key) ? result[key] : result[key] = []).push(value); }); return result; } /** * Invokes the method named by `methodName` on each element in the `collection`, * returning an array of the results of each invoked method. Additional arguments * will be passed to each invoked method. If `methodName` is a function, it will * be invoked for, and `this` bound to, each element in the `collection`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|String} methodName The name of the method to invoke or * the function invoked per iteration. * @param {Mixed} [arg1, arg2, ...] Arguments to invoke the method with. * @returns {Array} Returns a new array of the results of each invoked method. * @example * * _.invoke([[5, 1, 7], [3, 2, 1]], 'sort'); * // => [[1, 5, 7], [1, 2, 3]] * * _.invoke([123, 456], String.prototype.split, ''); * // => [['1', '2', '3'], ['4', '5', '6']] */ function invoke(collection, methodName) { var args = nativeSlice.call(arguments, 2), index = -1, isFunc = typeof methodName == 'function', length = collection ? collection.length : 0, result = Array(typeof length == 'number' ? length : 0); forEach(collection, function(value) { result[++index] = (isFunc ? methodName : value[methodName]).apply(value, args); }); return result; } /** * Creates an array of values by running each element in the `collection` * through the `callback`. The `callback` is bound to `thisArg` and invoked with * three arguments; (value, index|key, collection). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias collect * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new array of the results of each `callback` execution. * @example * * _.map([1, 2, 3], function(num) { return num * 3; }); * // => [3, 6, 9] * * _.map({ 'one': 1, 'two': 2, 'three': 3 }, function(num) { return num * 3; }); * // => [3, 6, 9] (order is not guaranteed) * * var stooges = [ * { 'name': 'moe', 'age': 40 }, * { 'name': 'larry', 'age': 50 } * ]; * * // using "_.pluck" callback shorthand * _.map(stooges, 'name'); * // => ['moe', 'larry'] */ function map(collection, callback, thisArg) { var index = -1, length = collection ? collection.length : 0, result = Array(typeof length == 'number' ? length : 0); callback = lodash.createCallback(callback, thisArg); if (isArray(collection)) { while (++index < length) { result[index] = callback(collection[index], index, collection); } } else { each(collection, function(value, key, collection) { result[++index] = callback(value, key, collection); }); } return result; } /** * Retrieves the maximum value of an `array`. If `callback` is passed, * it will be executed for each value in the `array` to generate the * criterion by which the value is ranked. The `callback` is bound to * `thisArg` and invoked with three arguments; (value, index, collection). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Mixed} Returns the maximum value. * @example * * _.max([4, 2, 8, 6]); * // => 8 * * var stooges = [ * { 'name': 'moe', 'age': 40 }, * { 'name': 'larry', 'age': 50 } * ]; * * _.max(stooges, function(stooge) { return stooge.age; }); * // => { 'name': 'larry', 'age': 50 }; * * // using "_.pluck" callback shorthand * _.max(stooges, 'age'); * // => { 'name': 'larry', 'age': 50 }; */ function max(collection, callback, thisArg) { var computed = -Infinity, result = computed; if (!callback && isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { var value = collection[index]; if (value > result) { result = value; } } } else { callback = (!callback && isString(collection)) ? charAtCallback : lodash.createCallback(callback, thisArg); each(collection, function(value, index, collection) { var current = callback(value, index, collection); if (current > computed) { computed = current; result = value; } }); } return result; } /** * Retrieves the minimum value of an `array`. If `callback` is passed, * it will be executed for each value in the `array` to generate the * criterion by which the value is ranked. The `callback` is bound to `thisArg` * and invoked with three arguments; (value, index, collection). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Mixed} Returns the minimum value. * @example * * _.min([4, 2, 8, 6]); * // => 2 * * var stooges = [ * { 'name': 'moe', 'age': 40 }, * { 'name': 'larry', 'age': 50 } * ]; * * _.min(stooges, function(stooge) { return stooge.age; }); * // => { 'name': 'moe', 'age': 40 }; * * // using "_.pluck" callback shorthand * _.min(stooges, 'age'); * // => { 'name': 'moe', 'age': 40 }; */ function min(collection, callback, thisArg) { var computed = Infinity, result = computed; if (!callback && isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { var value = collection[index]; if (value < result) { result = value; } } } else { callback = (!callback && isString(collection)) ? charAtCallback : lodash.createCallback(callback, thisArg); each(collection, function(value, index, collection) { var current = callback(value, index, collection); if (current < computed) { computed = current; result = value; } }); } return result; } /** * Retrieves the value of a specified property from all elements in the `collection`. * * @static * @memberOf _ * @type Function * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {String} property The property to pluck. * @returns {Array} Returns a new array of property values. * @example * * var stooges = [ * { 'name': 'moe', 'age': 40 }, * { 'name': 'larry', 'age': 50 } * ]; * * _.pluck(stooges, 'name'); * // => ['moe', 'larry'] */ var pluck = map; /** * Reduces a `collection` to a value which is the accumulated result of running * each element in the `collection` through the `callback`, where each successive * `callback` execution consumes the return value of the previous execution. * If `accumulator` is not passed, the first element of the `collection` will be * used as the initial `accumulator` value. The `callback` is bound to `thisArg` * and invoked with four arguments; (accumulator, value, index|key, collection). * * @static * @memberOf _ * @alias foldl, inject * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {Mixed} [accumulator] Initial value of the accumulator. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Mixed} Returns the accumulated value. * @example * * var sum = _.reduce([1, 2, 3], function(sum, num) { * return sum + num; * }); * // => 6 * * var mapped = _.reduce({ 'a': 1, 'b': 2, 'c': 3 }, function(result, num, key) { * result[key] = num * 3; * return result; * }, {}); * // => { 'a': 3, 'b': 6, 'c': 9 } */ function reduce(collection, callback, accumulator, thisArg) { var noaccum = arguments.length < 3; callback = lodash.createCallback(callback, thisArg, 4); if (isArray(collection)) { var index = -1, length = collection.length; if (noaccum) { accumulator = collection[++index]; } while (++index < length) { accumulator = callback(accumulator, collection[index], index, collection); } } else { each(collection, function(value, index, collection) { accumulator = noaccum ? (noaccum = false, value) : callback(accumulator, value, index, collection) }); } return accumulator; } /** * This method is similar to `_.reduce`, except that it iterates over a * `collection` from right to left. * * @static * @memberOf _ * @alias foldr * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {Mixed} [accumulator] Initial value of the accumulator. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Mixed} Returns the accumulated value. * @example * * var list = [[0, 1], [2, 3], [4, 5]]; * var flat = _.reduceRight(list, function(a, b) { return a.concat(b); }, []); * // => [4, 5, 2, 3, 0, 1] */ function reduceRight(collection, callback, accumulator, thisArg) { var iterable = collection, length = collection ? collection.length : 0, noaccum = arguments.length < 3; if (typeof length != 'number') { var props = keys(collection); length = props.length; } else if (support.unindexedChars && isString(collection)) { iterable = collection.split(''); } callback = lodash.createCallback(callback, thisArg, 4); forEach(collection, function(value, index, collection) { index = props ? props[--length] : --length; accumulator = noaccum ? (noaccum = false, iterable[index]) : callback(accumulator, iterable[index], index, collection); }); return accumulator; } /** * The opposite of `_.filter`, this method returns the elements of a * `collection` that `callback` does **not** return truthy for. * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new array of elements that did **not** pass the * callback check. * @example * * var odds = _.reject([1, 2, 3, 4, 5, 6], function(num) { return num % 2 == 0; }); * // => [1, 3, 5] * * var food = [ * { 'name': 'apple', 'organic': false, 'type': 'fruit' }, * { 'name': 'carrot', 'organic': true, 'type': 'vegetable' } * ]; * * // using "_.pluck" callback shorthand * _.reject(food, 'organic'); * // => [{ 'name': 'apple', 'organic': false, 'type': 'fruit' }] * * // using "_.where" callback shorthand * _.reject(food, { 'type': 'fruit' }); * // => [{ 'name': 'carrot', 'organic': true, 'type': 'vegetable' }] */ function reject(collection, callback, thisArg) { callback = lodash.createCallback(callback, thisArg); return filter(collection, function(value, index, collection) { return !callback(value, index, collection); }); } /** * Creates an array of shuffled `array` values, using a version of the * Fisher-Yates shuffle. See http://en.wikipedia.org/wiki/Fisher-Yates_shuffle. * * @static * @memberOf _ * @category Collections * @param {Array|Object|String} collection The collection to shuffle. * @returns {Array} Returns a new shuffled collection. * @example * * _.shuffle([1, 2, 3, 4, 5, 6]); * // => [4, 1, 6, 3, 5, 2] */ function shuffle(collection) { var index = -1, length = collection ? collection.length : 0, result = Array(typeof length == 'number' ? length : 0); forEach(collection, function(value) { var rand = floor(nativeRandom() * (++index + 1)); result[index] = result[rand]; result[rand] = value; }); return result; } /** * Gets the size of the `collection` by returning `collection.length` for arrays * and array-like objects or the number of own enumerable properties for objects. * * @static * @memberOf _ * @category Collections * @param {Array|Object|String} collection The collection to inspect. * @returns {Number} Returns `collection.length` or number of own enumerable properties. * @example * * _.size([1, 2]); * // => 2 * * _.size({ 'one': 1, 'two': 2, 'three': 3 }); * // => 3 * * _.size('curly'); * // => 5 */ function size(collection) { var length = collection ? collection.length : 0; return typeof length == 'number' ? length : keys(collection).length; } /** * Checks if the `callback` returns a truthy value for **any** element of a * `collection`. The function returns as soon as it finds passing value, and * does not iterate over the entire `collection`. The `callback` is bound to * `thisArg` and invoked with three arguments; (value, index|key, collection). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias any * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Boolean} Returns `true` if any element passes the callback check, * else `false`. * @example * * _.some([null, 0, 'yes', false], Boolean); * // => true * * var food = [ * { 'name': 'apple', 'organic': false, 'type': 'fruit' }, * { 'name': 'carrot', 'organic': true, 'type': 'vegetable' } * ]; * * // using "_.pluck" callback shorthand * _.some(food, 'organic'); * // => true * * // using "_.where" callback shorthand * _.some(food, { 'type': 'meat' }); * // => false */ function some(collection, callback, thisArg) { var result; callback = lodash.createCallback(callback, thisArg); if (isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { if ((result = callback(collection[index], index, collection))) { break; } } } else { each(collection, function(value, index, collection) { return !(result = callback(value, index, collection)); }); } return !!result; } /** * Creates an array of elements, sorted in ascending order by the results of * running each element in the `collection` through the `callback`. This method * performs a stable sort, that is, it will preserve the original sort order of * equal elements. The `callback` is bound to `thisArg` and invoked with three * arguments; (value, index|key, collection). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new array of sorted elements. * @example * * _.sortBy([1, 2, 3], function(num) { return Math.sin(num); }); * // => [3, 1, 2] * * _.sortBy([1, 2, 3], function(num) { return this.sin(num); }, Math); * // => [3, 1, 2] * * // using "_.pluck" callback shorthand * _.sortBy(['banana', 'strawberry', 'apple'], 'length'); * // => ['apple', 'banana', 'strawberry'] */ function sortBy(collection, callback, thisArg) { var index = -1, length = collection ? collection.length : 0, result = Array(typeof length == 'number' ? length : 0); callback = lodash.createCallback(callback, thisArg); forEach(collection, function(value, key, collection) { result[++index] = { 'criteria': callback(value, key, collection), 'index': index, 'value': value }; }); length = result.length; result.sort(compareAscending); while (length--) { result[length] = result[length].value; } return result; } /** * Converts the `collection` to an array. * * @static * @memberOf _ * @category Collections * @param {Array|Object|String} collection The collection to convert. * @returns {Array} Returns the new converted array. * @example * * (function() { return _.toArray(arguments).slice(1); })(1, 2, 3, 4); * // => [2, 3, 4] */ function toArray(collection) { if (collection && typeof collection.length == 'number') { return (support.unindexedChars && isString(collection)) ? collection.split('') : slice(collection); } return values(collection); } /** * Examines each element in a `collection`, returning an array of all elements * that have the given `properties`. When checking `properties`, this method * performs a deep comparison between values to determine if they are equivalent * to each other. * * @static * @memberOf _ * @type Function * @category Collections * @param {Array|Object|String} collection The collection to iterate over. * @param {Object} properties The object of property values to filter by. * @returns {Array} Returns a new array of elements that have the given `properties`. * @example * * var stooges = [ * { 'name': 'moe', 'age': 40 }, * { 'name': 'larry', 'age': 50 } * ]; * * _.where(stooges, { 'age': 40 }); * // => [{ 'name': 'moe', 'age': 40 }] */ var where = filter; /*--------------------------------------------------------------------------*/ /** * Creates an array with all falsey values of `array` removed. The values * `false`, `null`, `0`, `""`, `undefined` and `NaN` are all falsey. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to compact. * @returns {Array} Returns a new filtered array. * @example * * _.compact([0, 1, false, 2, '', 3]); * // => [1, 2, 3] */ function compact(array) { var index = -1, length = array ? array.length : 0, result = []; while (++index < length) { var value = array[index]; if (value) { result.push(value); } } return result; } /** * Creates an array of `array` elements not present in the other arrays * using strict equality for comparisons, i.e. `===`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to process. * @param {Array} [array1, array2, ...] Arrays to check. * @returns {Array} Returns a new array of `array` elements not present in the * other arrays. * @example * * _.difference([1, 2, 3, 4, 5], [5, 2, 10]); * // => [1, 3, 4] */ function difference(array) { var index = -1, length = array ? array.length : 0, flattened = concat.apply(arrayRef, nativeSlice.call(arguments, 1)), contains = cachedContains(flattened), result = []; while (++index < length) { var value = array[index]; if (!contains(value)) { result.push(value); } } return result; } /** * This method is similar to `_.find`, except that it returns the index of * the element that passes the callback check, instead of the element itself. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to search. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Mixed} Returns the index of the found element, else `-1`. * @example * * _.findIndex(['apple', 'banana', 'beet'], function(food) { * return /^b/.test(food); * }); * // => 1 */ function findIndex(array, callback, thisArg) { var index = -1, length = array ? array.length : 0; callback = lodash.createCallback(callback, thisArg); while (++index < length) { if (callback(array[index], index, array)) { return index; } } return -1; } /** * Gets the first element of the `array`. If a number `n` is passed, the first * `n` elements of the `array` are returned. If a `callback` function is passed, * elements at the beginning of the array are returned as long as the `callback` * returns truthy. The `callback` is bound to `thisArg` and invoked with three * arguments; (value, index, array). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias head, take * @category Arrays * @param {Array} array The array to query. * @param {Function|Object|Number|String} [callback|n] The function called * per element or the number of elements to return. If a property name or * object is passed, it will be used to create a "_.pluck" or "_.where" * style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Mixed} Returns the first element(s) of `array`. * @example * * _.first([1, 2, 3]); * // => 1 * * _.first([1, 2, 3], 2); * // => [1, 2] * * _.first([1, 2, 3], function(num) { * return num < 3; * }); * // => [1, 2] * * var food = [ * { 'name': 'banana', 'organic': true }, * { 'name': 'beet', 'organic': false }, * ]; * * // using "_.pluck" callback shorthand * _.first(food, 'organic'); * // => [{ 'name': 'banana', 'organic': true }] * * var food = [ * { 'name': 'apple', 'type': 'fruit' }, * { 'name': 'banana', 'type': 'fruit' }, * { 'name': 'beet', 'type': 'vegetable' } * ]; * * // using "_.where" callback shorthand * _.first(food, { 'type': 'fruit' }); * // => [{ 'name': 'apple', 'type': 'fruit' }, { 'name': 'banana', 'type': 'fruit' }] */ function first(array, callback, thisArg) { if (array) { var n = 0, length = array.length; if (typeof callback != 'number' && callback != null) { var index = -1; callback = lodash.createCallback(callback, thisArg); while (++index < length && callback(array[index], index, array)) { n++; } } else { n = callback; if (n == null || thisArg) { return array[0]; } } return slice(array, 0, nativeMin(nativeMax(0, n), length)); } } /** * Flattens a nested array (the nesting can be to any depth). If `isShallow` * is truthy, `array` will only be flattened a single level. If `callback` * is passed, each element of `array` is passed through a callback` before * flattening. The `callback` is bound to `thisArg` and invoked with three * arguments; (value, index, array). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to flatten. * @param {Boolean} [isShallow=false] A flag to indicate only flattening a single level. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new flattened array. * @example * * _.flatten([1, [2], [3, [[4]]]]); * // => [1, 2, 3, 4]; * * _.flatten([1, [2], [3, [[4]]]], true); * // => [1, 2, 3, [[4]]]; * * var stooges = [ * { 'name': 'curly', 'quotes': ['Oh, a wise guy, eh?', 'Poifect!'] }, * { 'name': 'moe', 'quotes': ['Spread out!', 'You knucklehead!'] } * ]; * * // using "_.pluck" callback shorthand * _.flatten(stooges, 'quotes'); * // => ['Oh, a wise guy, eh?', 'Poifect!', 'Spread out!', 'You knucklehead!'] */ function flatten(array, isShallow, callback, thisArg) { var index = -1, length = array ? array.length : 0, result = []; // juggle arguments if (typeof isShallow != 'boolean' && isShallow != null) { thisArg = callback; callback = isShallow; isShallow = false; } if (callback != null) { callback = lodash.createCallback(callback, thisArg); } while (++index < length) { var value = array[index]; if (callback) { value = callback(value, index, array); } // recursively flatten arrays (susceptible to call stack limits) if (isArray(value)) { push.apply(result, isShallow ? value : flatten(value)); } else { result.push(value); } } return result; } /** * Gets the index at which the first occurrence of `value` is found using * strict equality for comparisons, i.e. `===`. If the `array` is already * sorted, passing `true` for `fromIndex` will run a faster binary search. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to search. * @param {Mixed} value The value to search for. * @param {Boolean|Number} [fromIndex=0] The index to search from or `true` to * perform a binary search on a sorted `array`. * @returns {Number} Returns the index of the matched value or `-1`. * @example * * _.indexOf([1, 2, 3, 1, 2, 3], 2); * // => 1 * * _.indexOf([1, 2, 3, 1, 2, 3], 2, 3); * // => 4 * * _.indexOf([1, 1, 2, 2, 3, 3], 2, true); * // => 2 */ function indexOf(array, value, fromIndex) { var index = -1, length = array ? array.length : 0; if (typeof fromIndex == 'number') { index = (fromIndex < 0 ? nativeMax(0, length + fromIndex) : fromIndex || 0) - 1; } else if (fromIndex) { index = sortedIndex(array, value); return array[index] === value ? index : -1; } while (++index < length) { if (array[index] === value) { return index; } } return -1; } /** * Gets all but the last element of `array`. If a number `n` is passed, the * last `n` elements are excluded from the result. If a `callback` function * is passed, elements at the end of the array are excluded from the result * as long as the `callback` returns truthy. The `callback` is bound to * `thisArg` and invoked with three arguments; (value, index, array). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to query. * @param {Function|Object|Number|String} [callback|n=1] The function called * per element or the number of elements to exclude. If a property name or * object is passed, it will be used to create a "_.pluck" or "_.where" * style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a slice of `array`. * @example * * _.initial([1, 2, 3]); * // => [1, 2] * * _.initial([1, 2, 3], 2); * // => [1] * * _.initial([1, 2, 3], function(num) { * return num > 1; * }); * // => [1] * * var food = [ * { 'name': 'beet', 'organic': false }, * { 'name': 'carrot', 'organic': true } * ]; * * // using "_.pluck" callback shorthand * _.initial(food, 'organic'); * // => [{ 'name': 'beet', 'organic': false }] * * var food = [ * { 'name': 'banana', 'type': 'fruit' }, * { 'name': 'beet', 'type': 'vegetable' }, * { 'name': 'carrot', 'type': 'vegetable' } * ]; * * // using "_.where" callback shorthand * _.initial(food, { 'type': 'vegetable' }); * // => [{ 'name': 'banana', 'type': 'fruit' }] */ function initial(array, callback, thisArg) { if (!array) { return []; } var n = 0, length = array.length; if (typeof callback != 'number' && callback != null) { var index = length; callback = lodash.createCallback(callback, thisArg); while (index-- && callback(array[index], index, array)) { n++; } } else { n = (callback == null || thisArg) ? 1 : callback || n; } return slice(array, 0, nativeMin(nativeMax(0, length - n), length)); } /** * Computes the intersection of all the passed-in arrays using strict equality * for comparisons, i.e. `===`. * * @static * @memberOf _ * @category Arrays * @param {Array} [array1, array2, ...] Arrays to process. * @returns {Array} Returns a new array of unique elements that are present * in **all** of the arrays. * @example * * _.intersection([1, 2, 3], [101, 2, 1, 10], [2, 1]); * // => [1, 2] */ function intersection(array) { var args = arguments, argsLength = args.length, cache = { '0': {} }, index = -1, length = array ? array.length : 0, isLarge = length >= largeArraySize, result = [], seen = result; outer: while (++index < length) { var value = array[index]; if (isLarge) { var key = keyPrefix + value; var inited = cache[0][key] ? !(seen = cache[0][key]) : (seen = cache[0][key] = []); } if (inited || indexOf(seen, value) < 0) { if (isLarge) { seen.push(value); } var argsIndex = argsLength; while (--argsIndex) { if (!(cache[argsIndex] || (cache[argsIndex] = cachedContains(args[argsIndex])))(value)) { continue outer; } } result.push(value); } } return result; } /** * Gets the last element of the `array`. If a number `n` is passed, the * last `n` elements of the `array` are returned. If a `callback` function * is passed, elements at the end of the array are returned as long as the * `callback` returns truthy. The `callback` is bound to `thisArg` and * invoked with three arguments;(value, index, array). * * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to query. * @param {Function|Object|Number|String} [callback|n] The function called * per element or the number of elements to return. If a property name or * object is passed, it will be used to create a "_.pluck" or "_.where" * style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Mixed} Returns the last element(s) of `array`. * @example * * _.last([1, 2, 3]); * // => 3 * * _.last([1, 2, 3], 2); * // => [2, 3] * * _.last([1, 2, 3], function(num) { * return num > 1; * }); * // => [2, 3] * * var food = [ * { 'name': 'beet', 'organic': false }, * { 'name': 'carrot', 'organic': true } * ]; * * // using "_.pluck" callback shorthand * _.last(food, 'organic'); * // => [{ 'name': 'carrot', 'organic': true }] * * var food = [ * { 'name': 'banana', 'type': 'fruit' }, * { 'name': 'beet', 'type': 'vegetable' }, * { 'name': 'carrot', 'type': 'vegetable' } * ]; * * // using "_.where" callback shorthand * _.last(food, { 'type': 'vegetable' }); * // => [{ 'name': 'beet', 'type': 'vegetable' }, { 'name': 'carrot', 'type': 'vegetable' }] */ function last(array, callback, thisArg) { if (array) { var n = 0, length = array.length; if (typeof callback != 'number' && callback != null) { var index = length; callback = lodash.createCallback(callback, thisArg); while (index-- && callback(array[index], index, array)) { n++; } } else { n = callback; if (n == null || thisArg) { return array[length - 1]; } } return slice(array, nativeMax(0, length - n)); } } /** * Gets the index at which the last occurrence of `value` is found using strict * equality for comparisons, i.e. `===`. If `fromIndex` is negative, it is used * as the offset from the end of the collection. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to search. * @param {Mixed} value The value to search for. * @param {Number} [fromIndex=array.length-1] The index to search from. * @returns {Number} Returns the index of the matched value or `-1`. * @example * * _.lastIndexOf([1, 2, 3, 1, 2, 3], 2); * // => 4 * * _.lastIndexOf([1, 2, 3, 1, 2, 3], 2, 3); * // => 1 */ function lastIndexOf(array, value, fromIndex) { var index = array ? array.length : 0; if (typeof fromIndex == 'number') { index = (fromIndex < 0 ? nativeMax(0, index + fromIndex) : nativeMin(fromIndex, index - 1)) + 1; } while (index--) { if (array[index] === value) { return index; } } return -1; } /** * Creates an array of numbers (positive and/or negative) progressing from * `start` up to but not including `end`. * * @static * @memberOf _ * @category Arrays * @param {Number} [start=0] The start of the range. * @param {Number} end The end of the range. * @param {Number} [step=1] The value to increment or decrement by. * @returns {Array} Returns a new range array. * @example * * _.range(10); * // => [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] * * _.range(1, 11); * // => [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] * * _.range(0, 30, 5); * // => [0, 5, 10, 15, 20, 25] * * _.range(0, -10, -1); * // => [0, -1, -2, -3, -4, -5, -6, -7, -8, -9] * * _.range(0); * // => [] */ function range(start, end, step) { start = +start || 0; step = +step || 1; if (end == null) { end = start; start = 0; } // use `Array(length)` so V8 will avoid the slower "dictionary" mode // http://youtu.be/XAqIpGU8ZZk#t=17m25s var index = -1, length = nativeMax(0, ceil((end - start) / step)), result = Array(length); while (++index < length) { result[index] = start; start += step; } return result; } /** * The opposite of `_.initial`, this method gets all but the first value of * `array`. If a number `n` is passed, the first `n` values are excluded from * the result. If a `callback` function is passed, elements at the beginning * of the array are excluded from the result as long as the `callback` returns * truthy. The `callback` is bound to `thisArg` and invoked with three * arguments; (value, index, array). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias drop, tail * @category Arrays * @param {Array} array The array to query. * @param {Function|Object|Number|String} [callback|n=1] The function called * per element or the number of elements to exclude. If a property name or * object is passed, it will be used to create a "_.pluck" or "_.where" * style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a slice of `array`. * @example * * _.rest([1, 2, 3]); * // => [2, 3] * * _.rest([1, 2, 3], 2); * // => [3] * * _.rest([1, 2, 3], function(num) { * return num < 3; * }); * // => [3] * * var food = [ * { 'name': 'banana', 'organic': true }, * { 'name': 'beet', 'organic': false }, * ]; * * // using "_.pluck" callback shorthand * _.rest(food, 'organic'); * // => [{ 'name': 'beet', 'organic': false }] * * var food = [ * { 'name': 'apple', 'type': 'fruit' }, * { 'name': 'banana', 'type': 'fruit' }, * { 'name': 'beet', 'type': 'vegetable' } * ]; * * // using "_.where" callback shorthand * _.rest(food, { 'type': 'fruit' }); * // => [{ 'name': 'beet', 'type': 'vegetable' }] */ function rest(array, callback, thisArg) { if (typeof callback != 'number' && callback != null) { var n = 0, index = -1, length = array ? array.length : 0; callback = lodash.createCallback(callback, thisArg); while (++index < length && callback(array[index], index, array)) { n++; } } else { n = (callback == null || thisArg) ? 1 : nativeMax(0, callback); } return slice(array, n); } /** * Uses a binary search to determine the smallest index at which the `value` * should be inserted into `array` in order to maintain the sort order of the * sorted `array`. If `callback` is passed, it will be executed for `value` and * each element in `array` to compute their sort ranking. The `callback` is * bound to `thisArg` and invoked with one argument; (value). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to inspect. * @param {Mixed} value The value to evaluate. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Number} Returns the index at which the value should be inserted * into `array`. * @example * * _.sortedIndex([20, 30, 50], 40); * // => 2 * * // using "_.pluck" callback shorthand * _.sortedIndex([{ 'x': 20 }, { 'x': 30 }, { 'x': 50 }], { 'x': 40 }, 'x'); * // => 2 * * var dict = { * 'wordToNumber': { 'twenty': 20, 'thirty': 30, 'fourty': 40, 'fifty': 50 } * }; * * _.sortedIndex(['twenty', 'thirty', 'fifty'], 'fourty', function(word) { * return dict.wordToNumber[word]; * }); * // => 2 * * _.sortedIndex(['twenty', 'thirty', 'fifty'], 'fourty', function(word) { * return this.wordToNumber[word]; * }, dict); * // => 2 */ function sortedIndex(array, value, callback, thisArg) { var low = 0, high = array ? array.length : low; // explicitly reference `identity` for better inlining in Firefox callback = callback ? lodash.createCallback(callback, thisArg, 1) : identity; value = callback(value); while (low < high) { var mid = (low + high) >>> 1; (callback(array[mid]) < value) ? low = mid + 1 : high = mid; } return low; } /** * Computes the union of the passed-in arrays using strict equality for * comparisons, i.e. `===`. * * @static * @memberOf _ * @category Arrays * @param {Array} [array1, array2, ...] Arrays to process. * @returns {Array} Returns a new array of unique values, in order, that are * present in one or more of the arrays. * @example * * _.union([1, 2, 3], [101, 2, 1, 10], [2, 1]); * // => [1, 2, 3, 101, 10] */ function union(array) { if (!isArray(array)) { arguments[0] = array ? nativeSlice.call(array) : arrayRef; } return uniq(concat.apply(arrayRef, arguments)); } /** * Creates a duplicate-value-free version of the `array` using strict equality * for comparisons, i.e. `===`. If the `array` is already sorted, passing `true` * for `isSorted` will run a faster algorithm. If `callback` is passed, each * element of `array` is passed through a callback` before uniqueness is computed. * The `callback` is bound to `thisArg` and invoked with three arguments; (value, index, array). * * If a property name is passed for `callback`, the created "_.pluck" style * callback will return the property value of the given element. * * If an object is passed for `callback`, the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias unique * @category Arrays * @param {Array} array The array to process. * @param {Boolean} [isSorted=false] A flag to indicate that the `array` is already sorted. * @param {Function|Object|String} [callback=identity] The function called per * iteration. If a property name or object is passed, it will be used to create * a "_.pluck" or "_.where" style callback, respectively. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a duplicate-value-free array. * @example * * _.uniq([1, 2, 1, 3, 1]); * // => [1, 2, 3] * * _.uniq([1, 1, 2, 2, 3], true); * // => [1, 2, 3] * * _.uniq([1, 2, 1.5, 3, 2.5], function(num) { return Math.floor(num); }); * // => [1, 2, 3] * * _.uniq([1, 2, 1.5, 3, 2.5], function(num) { return this.floor(num); }, Math); * // => [1, 2, 3] * * // using "_.pluck" callback shorthand * _.uniq([{ 'x': 1 }, { 'x': 2 }, { 'x': 1 }], 'x'); * // => [{ 'x': 1 }, { 'x': 2 }] */ function uniq(array, isSorted, callback, thisArg) { var index = -1, length = array ? array.length : 0, result = [], seen = result; // juggle arguments if (typeof isSorted != 'boolean' && isSorted != null) { thisArg = callback; callback = isSorted; isSorted = false; } // init value cache for large arrays var isLarge = !isSorted && length >= largeArraySize; if (isLarge) { var cache = {}; } if (callback != null) { seen = []; callback = lodash.createCallback(callback, thisArg); } while (++index < length) { var value = array[index], computed = callback ? callback(value, index, array) : value; if (isLarge) { var key = keyPrefix + computed; var inited = cache[key] ? !(seen = cache[key]) : (seen = cache[key] = []); } if (isSorted ? !index || seen[seen.length - 1] !== computed : inited || indexOf(seen, computed) < 0 ) { if (callback || isLarge) { seen.push(computed); } result.push(value); } } return result; } /** * The inverse of `_.zip`, this method splits groups of elements into arrays * composed of elements from each group at their corresponding indexes. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to process. * @returns {Array} Returns a new array of the composed arrays. * @example * * _.unzip([['moe', 30, true], ['larry', 40, false]]); * // => [['moe', 'larry'], [30, 40], [true, false]]; */ function unzip(array) { var index = -1, length = array ? array.length : 0, tupleLength = length ? max(pluck(array, 'length')) : 0, result = Array(tupleLength); while (++index < length) { var tupleIndex = -1, tuple = array[index]; while (++tupleIndex < tupleLength) { (result[tupleIndex] || (result[tupleIndex] = Array(length)))[index] = tuple[tupleIndex]; } } return result; } /** * Creates an array with all occurrences of the passed values removed using * strict equality for comparisons, i.e. `===`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to filter. * @param {Mixed} [value1, value2, ...] Values to remove. * @returns {Array} Returns a new filtered array. * @example * * _.without([1, 2, 1, 0, 3, 1, 4], 0, 1); * // => [2, 3, 4] */ function without(array) { return difference(array, nativeSlice.call(arguments, 1)); } /** * Groups the elements of each array at their corresponding indexes. Useful for * separate data sources that are coordinated through matching array indexes. * For a matrix of nested arrays, `_.zip.apply(...)` can transpose the matrix * in a similar fashion. * * @static * @memberOf _ * @category Arrays * @param {Array} [array1, array2, ...] Arrays to process. * @returns {Array} Returns a new array of grouped elements. * @example * * _.zip(['moe', 'larry'], [30, 40], [true, false]); * // => [['moe', 30, true], ['larry', 40, false]] */ function zip(array) { var index = -1, length = array ? max(pluck(arguments, 'length')) : 0, result = Array(length); while (++index < length) { result[index] = pluck(arguments, index); } return result; } /** * Creates an object composed from arrays of `keys` and `values`. Pass either * a single two dimensional array, i.e. `[[key1, value1], [key2, value2]]`, or * two arrays, one of `keys` and one of corresponding `values`. * * @static * @memberOf _ * @alias object * @category Arrays * @param {Array} keys The array of keys. * @param {Array} [values=[]] The array of values. * @returns {Object} Returns an object composed of the given keys and * corresponding values. * @example * * _.zipObject(['moe', 'larry'], [30, 40]); * // => { 'moe': 30, 'larry': 40 } */ function zipObject(keys, values) { var index = -1, length = keys ? keys.length : 0, result = {}; while (++index < length) { var key = keys[index]; if (values) { result[key] = values[index]; } else { result[key[0]] = key[1]; } } return result; } /*--------------------------------------------------------------------------*/ /** * If `n` is greater than `0`, a function is created that is restricted to * executing `func`, with the `this` binding and arguments of the created * function, only after it is called `n` times. If `n` is less than `1`, * `func` is executed immediately, without a `this` binding or additional * arguments, and its result is returned. * * @static * @memberOf _ * @category Functions * @param {Number} n The number of times the function must be called before * it is executed. * @param {Function} func The function to restrict. * @returns {Function} Returns the new restricted function. * @example * * var renderNotes = _.after(notes.length, render); * _.forEach(notes, function(note) { * note.asyncSave({ 'success': renderNotes }); * }); * // `renderNotes` is run once, after all notes have saved */ function after(n, func) { if (n < 1) { return func(); } return function() { if (--n < 1) { return func.apply(this, arguments); } }; } /** * Creates a function that, when called, invokes `func` with the `this` * binding of `thisArg` and prepends any additional `bind` arguments to those * passed to the bound function. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to bind. * @param {Mixed} [thisArg] The `this` binding of `func`. * @param {Mixed} [arg1, arg2, ...] Arguments to be partially applied. * @returns {Function} Returns the new bound function. * @example * * var func = function(greeting) { * return greeting + ' ' + this.name; * }; * * func = _.bind(func, { 'name': 'moe' }, 'hi'); * func(); * // => 'hi moe' */ function bind(func, thisArg) { // use `Function#bind` if it exists and is fast // (in V8 `Function#bind` is slower except when partially applied) return support.fastBind || (nativeBind && arguments.length > 2) ? nativeBind.call.apply(nativeBind, arguments) : createBound(func, thisArg, nativeSlice.call(arguments, 2)); } /** * Binds methods on `object` to `object`, overwriting the existing method. * Method names may be specified as individual arguments or as arrays of method * names. If no method names are provided, all the function properties of `object` * will be bound. * * @static * @memberOf _ * @category Functions * @param {Object} object The object to bind and assign the bound methods to. * @param {String} [methodName1, methodName2, ...] Method names on the object to bind. * @returns {Object} Returns `object`. * @example * * var view = { * 'label': 'docs', * 'onClick': function() { alert('clicked ' + this.label); } * }; * * _.bindAll(view); * jQuery('#docs').on('click', view.onClick); * // => alerts 'clicked docs', when the button is clicked */ function bindAll(object) { var funcs = arguments.length > 1 ? concat.apply(arrayRef, nativeSlice.call(arguments, 1)) : functions(object), index = -1, length = funcs.length; while (++index < length) { var key = funcs[index]; object[key] = bind(object[key], object); } return object; } /** * Creates a function that, when called, invokes the method at `object[key]` * and prepends any additional `bindKey` arguments to those passed to the bound * function. This method differs from `_.bind` by allowing bound functions to * reference methods that will be redefined or don't yet exist. * See http://michaux.ca/articles/lazy-function-definition-pattern. * * @static * @memberOf _ * @category Functions * @param {Object} object The object the method belongs to. * @param {String} key The key of the method. * @param {Mixed} [arg1, arg2, ...] Arguments to be partially applied. * @returns {Function} Returns the new bound function. * @example * * var object = { * 'name': 'moe', * 'greet': function(greeting) { * return greeting + ' ' + this.name; * } * }; * * var func = _.bindKey(object, 'greet', 'hi'); * func(); * // => 'hi moe' * * object.greet = function(greeting) { * return greeting + ', ' + this.name + '!'; * }; * * func(); * // => 'hi, moe!' */ function bindKey(object, key) { return createBound(object, key, nativeSlice.call(arguments, 2), indicatorObject); } /** * Creates a function that is the composition of the passed functions, * where each function consumes the return value of the function that follows. * For example, composing the functions `f()`, `g()`, and `h()` produces `f(g(h()))`. * Each function is executed with the `this` binding of the composed function. * * @static * @memberOf _ * @category Functions * @param {Function} [func1, func2, ...] Functions to compose. * @returns {Function} Returns the new composed function. * @example * * var greet = function(name) { return 'hi ' + name; }; * var exclaim = function(statement) { return statement + '!'; }; * var welcome = _.compose(exclaim, greet); * welcome('moe'); * // => 'hi moe!' */ function compose() { var funcs = arguments; return function() { var args = arguments, length = funcs.length; while (length--) { args = [funcs[length].apply(this, args)]; } return args[0]; }; } /** * Produces a callback bound to an optional `thisArg`. If `func` is a property * name, the created callback will return the property value for a given element. * If `func` is an object, the created callback will return `true` for elements * that contain the equivalent object properties, otherwise it will return `false`. * * Note: All Lo-Dash methods, that accept a `callback` argument, use `_.createCallback`. * * @static * @memberOf _ * @category Functions * @param {Mixed} [func=identity] The value to convert to a callback. * @param {Mixed} [thisArg] The `this` binding of the created callback. * @param {Number} [argCount=3] The number of arguments the callback accepts. * @returns {Function} Returns a callback function. * @example * * var stooges = [ * { 'name': 'moe', 'age': 40 }, * { 'name': 'larry', 'age': 50 } * ]; * * // wrap to create custom callback shorthands * _.createCallback = _.wrap(_.createCallback, function(func, callback, thisArg) { * var match = /^(.+?)__([gl]t)(.+)$/.exec(callback); * return !match ? func(callback, thisArg) : function(object) { * return match[2] == 'gt' ? object[match[1]] > match[3] : object[match[1]] < match[3]; * }; * }); * * _.filter(stooges, 'age__gt45'); * // => [{ 'name': 'larry', 'age': 50 }] * * // create mixins with support for "_.pluck" and "_.where" callback shorthands * _.mixin({ * 'toLookup': function(collection, callback, thisArg) { * callback = _.createCallback(callback, thisArg); * return _.reduce(collection, function(result, value, index, collection) { * return (result[callback(value, index, collection)] = value, result); * }, {}); * } * }); * * _.toLookup(stooges, 'name'); * // => { 'moe': { 'name': 'moe', 'age': 40 }, 'larry': { 'name': 'larry', 'age': 50 } } */ function createCallback(func, thisArg, argCount) { if (func == null) { return identity; } var type = typeof func; if (type != 'function') { if (type != 'object') { return function(object) { return object[func]; }; } var props = keys(func); return function(object) { var length = props.length, result = false; while (length--) { if (!(result = isEqual(object[props[length]], func[props[length]], indicatorObject))) { break; } } return result; }; } if (typeof thisArg != 'undefined') { if (argCount === 1) { return function(value) { return func.call(thisArg, value); }; } if (argCount === 2) { return function(a, b) { return func.call(thisArg, a, b); }; } if (argCount === 4) { return function(accumulator, value, index, collection) { return func.call(thisArg, accumulator, value, index, collection); }; } return function(value, index, collection) { return func.call(thisArg, value, index, collection); }; } return func; } /** * Creates a function that will delay the execution of `func` until after * `wait` milliseconds have elapsed since the last time it was invoked. Pass * an `options` object to indicate that `func` should be invoked on the leading * and/or trailing edge of the `wait` timeout. Subsequent calls to the debounced * function will return the result of the last `func` call. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to debounce. * @param {Number} wait The number of milliseconds to delay. * @param {Object} options The options object. * [leading=false] A boolean to specify execution on the leading edge of the timeout. * [trailing=true] A boolean to specify execution on the trailing edge of the timeout. * @returns {Function} Returns the new debounced function. * @example * * var lazyLayout = _.debounce(calculateLayout, 300); * jQuery(window).on('resize', lazyLayout); */ function debounce(func, wait, options) { var args, result, thisArg, timeoutId, trailing = true; function delayed() { timeoutId = null; if (trailing) { result = func.apply(thisArg, args); } } if (options === true) { var leading = true; trailing = false; } else if (options && objectTypes[typeof options]) { leading = options.leading; trailing = 'trailing' in options ? options.trailing : trailing; } return function() { var isLeading = leading && !timeoutId; args = arguments; thisArg = this; clearTimeout(timeoutId); timeoutId = setTimeout(delayed, wait); if (isLeading) { result = func.apply(thisArg, args); } return result; }; } /** * Defers executing the `func` function until the current call stack has cleared. * Additional arguments will be passed to `func` when it is invoked. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to defer. * @param {Mixed} [arg1, arg2, ...] Arguments to invoke the function with. * @returns {Number} Returns the timer id. * @example * * _.defer(function() { alert('deferred'); }); * // returns from the function before `alert` is called */ function defer(func) { var args = nativeSlice.call(arguments, 1); return setTimeout(function() { func.apply(undefined, args); }, 1); } // use `setImmediate` if it's available in Node.js if (isV8 && freeModule && typeof setImmediate == 'function') { defer = bind(setImmediate, context); } /** * Executes the `func` function after `wait` milliseconds. Additional arguments * will be passed to `func` when it is invoked. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to delay. * @param {Number} wait The number of milliseconds to delay execution. * @param {Mixed} [arg1, arg2, ...] Arguments to invoke the function with. * @returns {Number} Returns the timer id. * @example * * var log = _.bind(console.log, console); * _.delay(log, 1000, 'logged later'); * // => 'logged later' (Appears after one second.) */ function delay(func, wait) { var args = nativeSlice.call(arguments, 2); return setTimeout(function() { func.apply(undefined, args); }, wait); } /** * Creates a function that memoizes the result of `func`. If `resolver` is * passed, it will be used to determine the cache key for storing the result * based on the arguments passed to the memoized function. By default, the first * argument passed to the memoized function is used as the cache key. The `func` * is executed with the `this` binding of the memoized function. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to have its output memoized. * @param {Function} [resolver] A function used to resolve the cache key. * @returns {Function} Returns the new memoizing function. * @example * * var fibonacci = _.memoize(function(n) { * return n < 2 ? n : fibonacci(n - 1) + fibonacci(n - 2); * }); */ function memoize(func, resolver) { var cache = {}; return function() { var key = keyPrefix + (resolver ? resolver.apply(this, arguments) : arguments[0]); return hasOwnProperty.call(cache, key) ? cache[key] : (cache[key] = func.apply(this, arguments)); }; } /** * Creates a function that is restricted to execute `func` once. Repeat calls to * the function will return the value of the first call. The `func` is executed * with the `this` binding of the created function. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to restrict. * @returns {Function} Returns the new restricted function. * @example * * var initialize = _.once(createApplication); * initialize(); * initialize(); * // `initialize` executes `createApplication` once */ function once(func) { var ran, result; return function() { if (ran) { return result; } ran = true; result = func.apply(this, arguments); // clear the `func` variable so the function may be garbage collected func = null; return result; }; } /** * Creates a function that, when called, invokes `func` with any additional * `partial` arguments prepended to those passed to the new function. This * method is similar to `_.bind`, except it does **not** alter the `this` binding. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to partially apply arguments to. * @param {Mixed} [arg1, arg2, ...] Arguments to be partially applied. * @returns {Function} Returns the new partially applied function. * @example * * var greet = function(greeting, name) { return greeting + ' ' + name; }; * var hi = _.partial(greet, 'hi'); * hi('moe'); * // => 'hi moe' */ function partial(func) { return createBound(func, nativeSlice.call(arguments, 1)); } /** * This method is similar to `_.partial`, except that `partial` arguments are * appended to those passed to the new function. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to partially apply arguments to. * @param {Mixed} [arg1, arg2, ...] Arguments to be partially applied. * @returns {Function} Returns the new partially applied function. * @example * * var defaultsDeep = _.partialRight(_.merge, _.defaults); * * var options = { * 'variable': 'data', * 'imports': { 'jq': $ } * }; * * defaultsDeep(options, _.templateSettings); * * options.variable * // => 'data' * * options.imports * // => { '_': _, 'jq': $ } */ function partialRight(func) { return createBound(func, nativeSlice.call(arguments, 1), null, indicatorObject); } /** * Creates a function that, when executed, will only call the `func` function * at most once per every `wait` milliseconds. If the throttled function is * invoked more than once during the `wait` timeout, `func` will also be called * on the trailing edge of the timeout. Pass an `options` object to indicate * that `func` should be invoked on the leading and/or trailing edge of the * `wait` timeout. Subsequent calls to the throttled function will return * the result of the last `func` call. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to throttle. * @param {Number} wait The number of milliseconds to throttle executions to. * @param {Object} options The options object. * [leading=true] A boolean to specify execution on the leading edge of the timeout. * [trailing=true] A boolean to specify execution on the trailing edge of the timeout. * @returns {Function} Returns the new throttled function. * @example * * var throttled = _.throttle(updatePosition, 100); * jQuery(window).on('scroll', throttled); */ function throttle(func, wait, options) { var args, result, thisArg, timeoutId, lastCalled = 0, leading = true, trailing = true; function trailingCall() { lastCalled = new Date; timeoutId = null; if (trailing) { result = func.apply(thisArg, args); } } if (options === false) { leading = false; } else if (options && objectTypes[typeof options]) { leading = 'leading' in options ? options.leading : leading; trailing = 'trailing' in options ? options.trailing : trailing; } return function() { var now = new Date; if (!timeoutId && !leading) { lastCalled = now; } var remaining = wait - (now - lastCalled); args = arguments; thisArg = this; if (remaining <= 0) { clearTimeout(timeoutId); timeoutId = null; lastCalled = now; result = func.apply(thisArg, args); } else if (!timeoutId) { timeoutId = setTimeout(trailingCall, remaining); } return result; }; } /** * Creates a function that passes `value` to the `wrapper` function as its * first argument. Additional arguments passed to the function are appended * to those passed to the `wrapper` function. The `wrapper` is executed with * the `this` binding of the created function. * * @static * @memberOf _ * @category Functions * @param {Mixed} value The value to wrap. * @param {Function} wrapper The wrapper function. * @returns {Function} Returns the new function. * @example * * var hello = function(name) { return 'hello ' + name; }; * hello = _.wrap(hello, function(func) { * return 'before, ' + func('moe') + ', after'; * }); * hello(); * // => 'before, hello moe, after' */ function wrap(value, wrapper) { return function() { var args = [value]; push.apply(args, arguments); return wrapper.apply(this, args); }; } /*--------------------------------------------------------------------------*/ /** * Converts the characters `&`, `<`, `>`, `"`, and `'` in `string` to their * corresponding HTML entities. * * @static * @memberOf _ * @category Utilities * @param {String} string The string to escape. * @returns {String} Returns the escaped string. * @example * * _.escape('Moe, Larry & Curly'); * // => 'Moe, Larry &amp; Curly' */ function escape(string) { return string == null ? '' : String(string).replace(reUnescapedHtml, escapeHtmlChar); } /** * This function returns the first argument passed to it. * * @static * @memberOf _ * @category Utilities * @param {Mixed} value Any value. * @returns {Mixed} Returns `value`. * @example * * var moe = { 'name': 'moe' }; * moe === _.identity(moe); * // => true */ function identity(value) { return value; } /** * Adds functions properties of `object` to the `lodash` function and chainable * wrapper. * * @static * @memberOf _ * @category Utilities * @param {Object} object The object of function properties to add to `lodash`. * @example * * _.mixin({ * 'capitalize': function(string) { * return string.charAt(0).toUpperCase() + string.slice(1).toLowerCase(); * } * }); * * _.capitalize('moe'); * // => 'Moe' * * _('moe').capitalize(); * // => 'Moe' */ function mixin(object) { forEach(functions(object), function(methodName) { var func = lodash[methodName] = object[methodName]; lodash.prototype[methodName] = function() { var value = this.__wrapped__, args = [value]; push.apply(args, arguments); var result = func.apply(lodash, args); return (value && typeof value == 'object' && value == result) ? this : new lodashWrapper(result); }; }); } /** * Reverts the '_' variable to its previous value and returns a reference to * the `lodash` function. * * @static * @memberOf _ * @category Utilities * @returns {Function} Returns the `lodash` function. * @example * * var lodash = _.noConflict(); */ function noConflict() { context._ = oldDash; return this; } /** * Converts the given `value` into an integer of the specified `radix`. * * Note: This method avoids differences in native ES3 and ES5 `parseInt` * implementations. See http://es5.github.com/#E. * * @static * @memberOf _ * @category Utilities * @param {Mixed} value The value to parse. * @returns {Number} Returns the new integer value. * @example * * _.parseInt('08'); * // => 8 */ var parseInt = nativeParseInt('08') == 8 ? nativeParseInt : function(value, radix) { // Firefox and Opera still follow the ES3 specified implementation of `parseInt` return nativeParseInt(isString(value) ? value.replace(reLeadingZeros, '') : value, radix || 0); }; /** * Produces a random number between `min` and `max` (inclusive). If only one * argument is passed, a number between `0` and the given number will be returned. * * @static * @memberOf _ * @category Utilities * @param {Number} [min=0] The minimum possible value. * @param {Number} [max=1] The maximum possible value. * @returns {Number} Returns a random number. * @example * * _.random(0, 5); * // => a number between 0 and 5 * * _.random(5); * // => also a number between 0 and 5 */ function random(min, max) { if (min == null && max == null) { max = 1; } min = +min || 0; if (max == null) { max = min; min = 0; } return min + floor(nativeRandom() * ((+max || 0) - min + 1)); } /** * Resolves the value of `property` on `object`. If `property` is a function, * it will be invoked with the `this` binding of `object` and its result returned, * else the property value is returned. If `object` is falsey, then `undefined` * is returned. * * @static * @memberOf _ * @category Utilities * @param {Object} object The object to inspect. * @param {String} property The property to get the value of. * @returns {Mixed} Returns the resolved value. * @example * * var object = { * 'cheese': 'crumpets', * 'stuff': function() { * return 'nonsense'; * } * }; * * _.result(object, 'cheese'); * // => 'crumpets' * * _.result(object, 'stuff'); * // => 'nonsense' */ function result(object, property) { var value = object ? object[property] : undefined; return isFunction(value) ? object[property]() : value; } /** * A micro-templating method that handles arbitrary delimiters, preserves * whitespace, and correctly escapes quotes within interpolated code. * * Note: In the development build, `_.template` utilizes sourceURLs for easier * debugging. See http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl * * For more information on precompiling templates see: * http://lodash.com/#custom-builds * * For more information on Chrome extension sandboxes see: * http://developer.chrome.com/stable/extensions/sandboxingEval.html * * @static * @memberOf _ * @category Utilities * @param {String} text The template text. * @param {Object} data The data object used to populate the text. * @param {Object} options The options object. * escape - The "escape" delimiter regexp. * evaluate - The "evaluate" delimiter regexp. * interpolate - The "interpolate" delimiter regexp. * sourceURL - The sourceURL of the template's compiled source. * variable - The data object variable name. * @returns {Function|String} Returns a compiled function when no `data` object * is given, else it returns the interpolated text. * @example * * // using a compiled template * var compiled = _.template('hello <%= name %>'); * compiled({ 'name': 'moe' }); * // => 'hello moe' * * var list = '<% _.forEach(people, function(name) { %><li><%= name %></li><% }); %>'; * _.template(list, { 'people': ['moe', 'larry'] }); * // => '<li>moe</li><li>larry</li>' * * // using the "escape" delimiter to escape HTML in data property values * _.template('<b><%- value %></b>', { 'value': '<script>' }); * // => '<b>&lt;script&gt;</b>' * * // using the ES6 delimiter as an alternative to the default "interpolate" delimiter * _.template('hello ${ name }', { 'name': 'curly' }); * // => 'hello curly' * * // using the internal `print` function in "evaluate" delimiters * _.template('<% print("hello " + epithet); %>!', { 'epithet': 'stooge' }); * // => 'hello stooge!' * * // using custom template delimiters * _.templateSettings = { * 'interpolate': /{{([\s\S]+?)}}/g * }; * * _.template('hello {{ name }}!', { 'name': 'mustache' }); * // => 'hello mustache!' * * // using the `sourceURL` option to specify a custom sourceURL for the template * var compiled = _.template('hello <%= name %>', null, { 'sourceURL': '/basic/greeting.jst' }); * compiled(data); * // => find the source of "greeting.jst" under the Sources tab or Resources panel of the web inspector * * // using the `variable` option to ensure a with-statement isn't used in the compiled template * var compiled = _.template('hi <%= data.name %>!', null, { 'variable': 'data' }); * compiled.source; * // => function(data) { * var __t, __p = '', __e = _.escape; * __p += 'hi ' + ((__t = ( data.name )) == null ? '' : __t) + '!'; * return __p; * } * * // using the `source` property to inline compiled templates for meaningful * // line numbers in error messages and a stack trace * fs.writeFileSync(path.join(cwd, 'jst.js'), '\ * var JST = {\ * "main": ' + _.template(mainText).source + '\ * };\ * '); */ function template(text, data, options) { // based on John Resig's `tmpl` implementation // http://ejohn.org/blog/javascript-micro-templating/ // and Laura Doktorova's doT.js // https://github.com/olado/doT var settings = lodash.templateSettings; text || (text = ''); // avoid missing dependencies when `iteratorTemplate` is not defined options = iteratorTemplate ? defaults({}, options, settings) : settings; var imports = iteratorTemplate && defaults({}, options.imports, settings.imports), importsKeys = iteratorTemplate ? keys(imports) : ['_'], importsValues = iteratorTemplate ? values(imports) : [lodash]; var isEvaluating, index = 0, interpolate = options.interpolate || reNoMatch, source = "__p += '"; // compile the regexp to match each delimiter var reDelimiters = RegExp( (options.escape || reNoMatch).source + '|' + interpolate.source + '|' + (interpolate === reInterpolate ? reEsTemplate : reNoMatch).source + '|' + (options.evaluate || reNoMatch).source + '|$' , 'g'); text.replace(reDelimiters, function(match, escapeValue, interpolateValue, esTemplateValue, evaluateValue, offset) { interpolateValue || (interpolateValue = esTemplateValue); // escape characters that cannot be included in string literals source += text.slice(index, offset).replace(reUnescapedString, escapeStringChar); // replace delimiters with snippets if (escapeValue) { source += "' +\n__e(" + escapeValue + ") +\n'"; } if (evaluateValue) { isEvaluating = true; source += "';\n" + evaluateValue + ";\n__p += '"; } if (interpolateValue) { source += "' +\n((__t = (" + interpolateValue + ")) == null ? '' : __t) +\n'"; } index = offset + match.length; // the JS engine embedded in Adobe products requires returning the `match` // string in order to produce the correct `offset` value return match; }); source += "';\n"; // if `variable` is not specified, wrap a with-statement around the generated // code to add the data object to the top of the scope chain var variable = options.variable, hasVariable = variable; if (!hasVariable) { variable = 'obj'; source = 'with (' + variable + ') {\n' + source + '\n}\n'; } // cleanup code by stripping empty strings source = (isEvaluating ? source.replace(reEmptyStringLeading, '') : source) .replace(reEmptyStringMiddle, '$1') .replace(reEmptyStringTrailing, '$1;'); // frame code as the function body source = 'function(' + variable + ') {\n' + (hasVariable ? '' : variable + ' || (' + variable + ' = {});\n') + "var __t, __p = '', __e = _.escape" + (isEvaluating ? ', __j = Array.prototype.join;\n' + "function print() { __p += __j.call(arguments, '') }\n" : ';\n' ) + source + 'return __p\n}'; // Use a sourceURL for easier debugging and wrap in a multi-line comment to // avoid issues with Narwhal, IE conditional compilation, and the JS engine // embedded in Adobe products. // http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl var sourceURL = '\n/*\n//@ sourceURL=' + (options.sourceURL || '/lodash/template/source[' + (templateCounter++) + ']') + '\n*/'; try { var result = Function(importsKeys, 'return ' + source + sourceURL).apply(undefined, importsValues); } catch(e) { e.source = source; throw e; } if (data) { return result(data); } // provide the compiled function's source via its `toString` method, in // supported environments, or the `source` property as a convenience for // inlining compiled templates during the build process result.source = source; return result; } /** * Executes the `callback` function `n` times, returning an array of the results * of each `callback` execution. The `callback` is bound to `thisArg` and invoked * with one argument; (index). * * @static * @memberOf _ * @category Utilities * @param {Number} n The number of times to execute the callback. * @param {Function} callback The function called per iteration. * @param {Mixed} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new array of the results of each `callback` execution. * @example * * var diceRolls = _.times(3, _.partial(_.random, 1, 6)); * // => [3, 6, 4] * * _.times(3, function(n) { mage.castSpell(n); }); * // => calls `mage.castSpell(n)` three times, passing `n` of `0`, `1`, and `2` respectively * * _.times(3, function(n) { this.cast(n); }, mage); * // => also calls `mage.castSpell(n)` three times */ function times(n, callback, thisArg) { n = (n = +n) > -1 ? n : 0; var index = -1, result = Array(n); callback = lodash.createCallback(callback, thisArg, 1); while (++index < n) { result[index] = callback(index); } return result; } /** * The inverse of `_.escape`, this method converts the HTML entities * `&amp;`, `&lt;`, `&gt;`, `&quot;`, and `&#39;` in `string` to their * corresponding characters. * * @static * @memberOf _ * @category Utilities * @param {String} string The string to unescape. * @returns {String} Returns the unescaped string. * @example * * _.unescape('Moe, Larry &amp; Curly'); * // => 'Moe, Larry & Curly' */ function unescape(string) { return string == null ? '' : String(string).replace(reEscapedHtml, unescapeHtmlChar); } /** * Generates a unique ID. If `prefix` is passed, the ID will be appended to it. * * @static * @memberOf _ * @category Utilities * @param {String} [prefix] The value to prefix the ID with. * @returns {String} Returns the unique ID. * @example * * _.uniqueId('contact_'); * // => 'contact_104' * * _.uniqueId(); * // => '105' */ function uniqueId(prefix) { var id = ++idCounter; return String(prefix == null ? '' : prefix) + id; } /*--------------------------------------------------------------------------*/ /** * Invokes `interceptor` with the `value` as the first argument, and then * returns `value`. The purpose of this method is to "tap into" a method chain, * in order to perform operations on intermediate results within the chain. * * @static * @memberOf _ * @category Chaining * @param {Mixed} value The value to pass to `interceptor`. * @param {Function} interceptor The function to invoke. * @returns {Mixed} Returns `value`. * @example * * _([1, 2, 3, 4]) * .filter(function(num) { return num % 2 == 0; }) * .tap(alert) * .map(function(num) { return num * num; }) * .value(); * // => // [2, 4] (alerted) * // => [4, 16] */ function tap(value, interceptor) { interceptor(value); return value; } /** * Produces the `toString` result of the wrapped value. * * @name toString * @memberOf _ * @category Chaining * @returns {String} Returns the string result. * @example * * _([1, 2, 3]).toString(); * // => '1,2,3' */ function wrapperToString() { return String(this.__wrapped__); } /** * Extracts the wrapped value. * * @name valueOf * @memberOf _ * @alias value * @category Chaining * @returns {Mixed} Returns the wrapped value. * @example * * _([1, 2, 3]).valueOf(); * // => [1, 2, 3] */ function wrapperValueOf() { return this.__wrapped__; } /*--------------------------------------------------------------------------*/ // add functions that return wrapped values when chaining lodash.after = after; lodash.assign = assign; lodash.at = at; lodash.bind = bind; lodash.bindAll = bindAll; lodash.bindKey = bindKey; lodash.compact = compact; lodash.compose = compose; lodash.countBy = countBy; lodash.createCallback = createCallback; lodash.debounce = debounce; lodash.defaults = defaults; lodash.defer = defer; lodash.delay = delay; lodash.difference = difference; lodash.filter = filter; lodash.flatten = flatten; lodash.forEach = forEach; lodash.forIn = forIn; lodash.forOwn = forOwn; lodash.functions = functions; lodash.groupBy = groupBy; lodash.initial = initial; lodash.intersection = intersection; lodash.invert = invert; lodash.invoke = invoke; lodash.keys = keys; lodash.map = map; lodash.max = max; lodash.memoize = memoize; lodash.merge = merge; lodash.min = min; lodash.omit = omit; lodash.once = once; lodash.pairs = pairs; lodash.partial = partial; lodash.partialRight = partialRight; lodash.pick = pick; lodash.pluck = pluck; lodash.range = range; lodash.reject = reject; lodash.rest = rest; lodash.shuffle = shuffle; lodash.sortBy = sortBy; lodash.tap = tap; lodash.throttle = throttle; lodash.times = times; lodash.toArray = toArray; lodash.union = union; lodash.uniq = uniq; lodash.unzip = unzip; lodash.values = values; lodash.where = where; lodash.without = without; lodash.wrap = wrap; lodash.zip = zip; lodash.zipObject = zipObject; // add aliases lodash.collect = map; lodash.drop = rest; lodash.each = forEach; lodash.extend = assign; lodash.methods = functions; lodash.object = zipObject; lodash.select = filter; lodash.tail = rest; lodash.unique = uniq; // add functions to `lodash.prototype` mixin(lodash); /*--------------------------------------------------------------------------*/ // add functions that return unwrapped values when chaining lodash.clone = clone; lodash.cloneDeep = cloneDeep; lodash.contains = contains; lodash.escape = escape; lodash.every = every; lodash.find = find; lodash.findIndex = findIndex; lodash.findKey = findKey; lodash.has = has; lodash.identity = identity; lodash.indexOf = indexOf; lodash.isArguments = isArguments; lodash.isArray = isArray; lodash.isBoolean = isBoolean; lodash.isDate = isDate; lodash.isElement = isElement; lodash.isEmpty = isEmpty; lodash.isEqual = isEqual; lodash.isFinite = isFinite; lodash.isFunction = isFunction; lodash.isNaN = isNaN; lodash.isNull = isNull; lodash.isNumber = isNumber; lodash.isObject = isObject; lodash.isPlainObject = isPlainObject; lodash.isRegExp = isRegExp; lodash.isString = isString; lodash.isUndefined = isUndefined; lodash.lastIndexOf = lastIndexOf; lodash.mixin = mixin; lodash.noConflict = noConflict; lodash.parseInt = parseInt; lodash.random = random; lodash.reduce = reduce; lodash.reduceRight = reduceRight; lodash.result = result; lodash.runInContext = runInContext; lodash.size = size; lodash.some = some; lodash.sortedIndex = sortedIndex; lodash.template = template; lodash.unescape = unescape; lodash.uniqueId = uniqueId; // add aliases lodash.all = every; lodash.any = some; lodash.detect = find; lodash.foldl = reduce; lodash.foldr = reduceRight; lodash.include = contains; lodash.inject = reduce; forOwn(lodash, function(func, methodName) { if (!lodash.prototype[methodName]) { lodash.prototype[methodName] = function() { var args = [this.__wrapped__]; push.apply(args, arguments); return func.apply(lodash, args); }; } }); /*--------------------------------------------------------------------------*/ // add functions capable of returning wrapped and unwrapped values when chaining lodash.first = first; lodash.last = last; // add aliases lodash.take = first; lodash.head = first; forOwn(lodash, function(func, methodName) { if (!lodash.prototype[methodName]) { lodash.prototype[methodName]= function(callback, thisArg) { var result = func(this.__wrapped__, callback, thisArg); return callback == null || (thisArg && typeof callback != 'function') ? result : new lodashWrapper(result); }; } }); /*--------------------------------------------------------------------------*/ /** * The semantic version number. * * @static * @memberOf _ * @type String */ lodash.VERSION = '1.2.0'; // add "Chaining" functions to the wrapper lodash.prototype.toString = wrapperToString; lodash.prototype.value = wrapperValueOf; lodash.prototype.valueOf = wrapperValueOf; // add `Array` functions that return unwrapped values each(['join', 'pop', 'shift'], function(methodName) { var func = arrayRef[methodName]; lodash.prototype[methodName] = function() { return func.apply(this.__wrapped__, arguments); }; }); // add `Array` functions that return the wrapped value each(['push', 'reverse', 'sort', 'unshift'], function(methodName) { var func = arrayRef[methodName]; lodash.prototype[methodName] = function() { func.apply(this.__wrapped__, arguments); return this; }; }); // add `Array` functions that return new wrapped values each(['concat', 'slice', 'splice'], function(methodName) { var func = arrayRef[methodName]; lodash.prototype[methodName] = function() { return new lodashWrapper(func.apply(this.__wrapped__, arguments)); }; }); // avoid array-like object bugs with `Array#shift` and `Array#splice` // in Firefox < 10 and IE < 9 if (!support.spliceObjects) { each(['pop', 'shift', 'splice'], function(methodName) { var func = arrayRef[methodName], isSplice = methodName == 'splice'; lodash.prototype[methodName] = function() { var value = this.__wrapped__, result = func.apply(value, arguments); if (value.length === 0) { delete value[0]; } return isSplice ? new lodashWrapper(result) : result; }; }); } // add pseudo private property to be used and removed during the build process lodash._each = each; lodash._iteratorTemplate = iteratorTemplate; lodash._shimKeys = shimKeys; return lodash; } /*--------------------------------------------------------------------------*/ // expose Lo-Dash var _ = runInContext(); // some AMD build optimizers, like r.js, check for specific condition patterns like the following: if (typeof define == 'function' && typeof define.amd == 'object' && define.amd) { // Expose Lo-Dash to the global object even when an AMD loader is present in // case Lo-Dash was injected by a third-party script and not intended to be // loaded as a module. The global assignment can be reverted in the Lo-Dash // module via its `noConflict()` method. window._ = _; // define as an anonymous module so, through path mapping, it can be // referenced as the "underscore" module define(function() { return _; }); } // check for `exports` after `define` in case a build optimizer adds an `exports` object else if (freeExports && !freeExports.nodeType) { // in Node.js or RingoJS v0.8.0+ if (freeModule) { (freeModule.exports = _)._ = _; } // in Narwhal or RingoJS v0.7.0- else { freeExports._ = _; } } else { // in a browser or Rhino window._ = _; } }(this)); /*global setImmediate: false, setTimeout: false, console: false */ (function () { var async = {}; // global on the server, window in the browser var root, previous_async; root = this; if (root != null) { previous_async = root.async; } async.noConflict = function () { root.async = previous_async; return async; }; function only_once(fn) { var called = false; return function() { if (called) throw new Error("Callback was already called."); called = true; fn.apply(root, arguments); } } //// cross-browser compatiblity functions //// var _each = function (arr, iterator) { if (arr.forEach) { return arr.forEach(iterator); } for (var i = 0; i < arr.length; i += 1) { iterator(arr[i], i, arr); } }; var _map = function (arr, iterator) { if (arr.map) { return arr.map(iterator); } var results = []; _each(arr, function (x, i, a) { results.push(iterator(x, i, a)); }); return results; }; var _reduce = function (arr, iterator, memo) { if (arr.reduce) { return arr.reduce(iterator, memo); } _each(arr, function (x, i, a) { memo = iterator(memo, x, i, a); }); return memo; }; var _keys = function (obj) { if (Object.keys) { return Object.keys(obj); } var keys = []; for (var k in obj) { if (obj.hasOwnProperty(k)) { keys.push(k); } } return keys; }; //// exported async module functions //// //// nextTick implementation with browser-compatible fallback //// if (typeof process === 'undefined' || !(process.nextTick)) { if (typeof setImmediate === 'function') { async.setImmediate = setImmediate; async.nextTick = setImmediate; } else { async.setImmediate = async.nextTick; async.nextTick = function (fn) { setTimeout(fn, 0); }; } } else { async.nextTick = process.nextTick; if (typeof setImmediate !== 'undefined') { async.setImmediate = setImmediate; } else { async.setImmediate = async.nextTick; } } async.each = function (arr, iterator, callback) { callback = callback || function () {}; if (!arr.length) { return callback(); } var completed = 0; _each(arr, function (x) { iterator(x, only_once(function (err) { if (err) { callback(err); callback = function () {}; } else { completed += 1; if (completed >= arr.length) { callback(null); } } })); }); }; async.forEach = async.each; async.eachSeries = function (arr, iterator, callback) { callback = callback || function () {}; if (!arr.length) { return callback(); } var completed = 0; var iterate = function () { iterator(arr[completed], function (err) { if (err) { callback(err); callback = function () {}; } else { completed += 1; if (completed >= arr.length) { callback(null); } else { iterate(); } } }); }; iterate(); }; async.forEachSeries = async.eachSeries; async.eachLimit = function (arr, limit, iterator, callback) { var fn = _eachLimit(limit); fn.apply(null, [arr, iterator, callback]); }; async.forEachLimit = async.eachLimit; var _eachLimit = function (limit) { return function (arr, iterator, callback) { callback = callback || function () {}; if (!arr.length || limit <= 0) { return callback(); } var completed = 0; var started = 0; var running = 0; (function replenish () { if (completed >= arr.length) { return callback(); } while (running < limit && started < arr.length) { started += 1; running += 1; iterator(arr[started - 1], function (err) { if (err) { callback(err); callback = function () {}; } else { completed += 1; running -= 1; if (completed >= arr.length) { callback(); } else { replenish(); } } }); } })(); }; }; var doParallel = function (fn) { return function () { var args = Array.prototype.slice.call(arguments); return fn.apply(null, [async.each].concat(args)); }; }; var doParallelLimit = function(limit, fn) { return function () { var args = Array.prototype.slice.call(arguments); return fn.apply(null, [_eachLimit(limit)].concat(args)); }; }; var doSeries = function (fn) { return function () { var args = Array.prototype.slice.call(arguments); return fn.apply(null, [async.eachSeries].concat(args)); }; }; var _asyncMap = function (eachfn, arr, iterator, callback) { var results = []; arr = _map(arr, function (x, i) { return {index: i, value: x}; }); eachfn(arr, function (x, callback) { iterator(x.value, function (err, v) { results[x.index] = v; callback(err); }); }, function (err) { callback(err, results); }); }; async.map = doParallel(_asyncMap); async.mapSeries = doSeries(_asyncMap); async.mapLimit = function (arr, limit, iterator, callback) { return _mapLimit(limit)(arr, iterator, callback); }; var _mapLimit = function(limit) { return doParallelLimit(limit, _asyncMap); }; // reduce only has a series version, as doing reduce in parallel won't // work in many situations. async.reduce = function (arr, memo, iterator, callback) { async.eachSeries(arr, function (x, callback) { iterator(memo, x, function (err, v) { memo = v; callback(err); }); }, function (err) { callback(err, memo); }); }; // inject alias async.inject = async.reduce; // foldl alias async.foldl = async.reduce; async.reduceRight = function (arr, memo, iterator, callback) { var reversed = _map(arr, function (x) { return x; }).reverse(); async.reduce(reversed, memo, iterator, callback); }; // foldr alias async.foldr = async.reduceRight; var _filter = function (eachfn, arr, iterator, callback) { var results = []; arr = _map(arr, function (x, i) { return {index: i, value: x}; }); eachfn(arr, function (x, callback) { iterator(x.value, function (v) { if (v) { results.push(x); } callback(); }); }, function (err) { callback(_map(results.sort(function (a, b) { return a.index - b.index; }), function (x) { return x.value; })); }); }; async.filter = doParallel(_filter); async.filterSeries = doSeries(_filter); // select alias async.select = async.filter; async.selectSeries = async.filterSeries; var _reject = function (eachfn, arr, iterator, callback) { var results = []; arr = _map(arr, function (x, i) { return {index: i, value: x}; }); eachfn(arr, function (x, callback) { iterator(x.value, function (v) { if (!v) { results.push(x); } callback(); }); }, function (err) { callback(_map(results.sort(function (a, b) { return a.index - b.index; }), function (x) { return x.value; })); }); }; async.reject = doParallel(_reject); async.rejectSeries = doSeries(_reject); var _detect = function (eachfn, arr, iterator, main_callback) { eachfn(arr, function (x, callback) { iterator(x, function (result) { if (result) { main_callback(x); main_callback = function () {}; } else { callback(); } }); }, function (err) { main_callback(); }); }; async.detect = doParallel(_detect); async.detectSeries = doSeries(_detect); async.some = function (arr, iterator, main_callback) { async.each(arr, function (x, callback) { iterator(x, function (v) { if (v) { main_callback(true); main_callback = function () {}; } callback(); }); }, function (err) { main_callback(false); }); }; // any alias async.any = async.some; async.every = function (arr, iterator, main_callback) { async.each(arr, function (x, callback) { iterator(x, function (v) { if (!v) { main_callback(false); main_callback = function () {}; } callback(); }); }, function (err) { main_callback(true); }); }; // all alias async.all = async.every; async.sortBy = function (arr, iterator, callback) { async.map(arr, function (x, callback) { iterator(x, function (err, criteria) { if (err) { callback(err); } else { callback(null, {value: x, criteria: criteria}); } }); }, function (err, results) { if (err) { return callback(err); } else { var fn = function (left, right) { var a = left.criteria, b = right.criteria; return a < b ? -1 : a > b ? 1 : 0; }; callback(null, _map(results.sort(fn), function (x) { return x.value; })); } }); }; async.auto = function (tasks, callback) { callback = callback || function () {}; var keys = _keys(tasks); if (!keys.length) { return callback(null); } var results = {}; var listeners = []; var addListener = function (fn) { listeners.unshift(fn); }; var removeListener = function (fn) { for (var i = 0; i < listeners.length; i += 1) { if (listeners[i] === fn) { listeners.splice(i, 1); return; } } }; var taskComplete = function () { _each(listeners.slice(0), function (fn) { fn(); }); }; addListener(function () { if (_keys(results).length === keys.length) { callback(null, results); callback = function () {}; } }); _each(keys, function (k) { var task = (tasks[k] instanceof Function) ? [tasks[k]]: tasks[k]; var taskCallback = function (err) { var args = Array.prototype.slice.call(arguments, 1); if (args.length <= 1) { args = args[0]; } if (err) { var safeResults = {}; _each(_keys(results), function(rkey) { safeResults[rkey] = results[rkey]; }); safeResults[k] = args; callback(err, safeResults); // stop subsequent errors hitting callback multiple times callback = function () {}; } else { results[k] = args; async.setImmediate(taskComplete); } }; var requires = task.slice(0, Math.abs(task.length - 1)) || []; var ready = function () { return _reduce(requires, function (a, x) { return (a && results.hasOwnProperty(x)); }, true) && !results.hasOwnProperty(k); }; if (ready()) { task[task.length - 1](taskCallback, results); } else { var listener = function () { if (ready()) { removeListener(listener); task[task.length - 1](taskCallback, results); } }; addListener(listener); } }); }; async.waterfall = function (tasks, callback) { callback = callback || function () {}; if (tasks.constructor !== Array) { var err = new Error('First argument to waterfall must be an array of functions'); return callback(err); } if (!tasks.length) { return callback(); } var wrapIterator = function (iterator) { return function (err) { if (err) { callback.apply(null, arguments); callback = function () {}; } else { var args = Array.prototype.slice.call(arguments, 1); var next = iterator.next(); if (next) { args.push(wrapIterator(next)); } else { args.push(callback); } async.setImmediate(function () { iterator.apply(null, args); }); } }; }; wrapIterator(async.iterator(tasks))(); }; var _parallel = function(eachfn, tasks, callback) { callback = callback || function () {}; if (tasks.constructor === Array) { eachfn.map(tasks, function (fn, callback) { if (fn) { fn(function (err) { var args = Array.prototype.slice.call(arguments, 1); if (args.length <= 1) { args = args[0]; } callback.call(null, err, args); }); } }, callback); } else { var results = {}; eachfn.each(_keys(tasks), function (k, callback) { tasks[k](function (err) { var args = Array.prototype.slice.call(arguments, 1); if (args.length <= 1) { args = args[0]; } results[k] = args; callback(err); }); }, function (err) { callback(err, results); }); } }; async.parallel = function (tasks, callback) { _parallel({ map: async.map, each: async.each }, tasks, callback); }; async.parallelLimit = function(tasks, limit, callback) { _parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback); }; async.series = function (tasks, callback) { callback = callback || function () {}; if (tasks.constructor === Array) { async.mapSeries(tasks, function (fn, callback) { if (fn) { fn(function (err) { var args = Array.prototype.slice.call(arguments, 1); if (args.length <= 1) { args = args[0]; } callback.call(null, err, args); }); } }, callback); } else { var results = {}; async.eachSeries(_keys(tasks), function (k, callback) { tasks[k](function (err) { var args = Array.prototype.slice.call(arguments, 1); if (args.length <= 1) { args = args[0]; } results[k] = args; callback(err); }); }, function (err) { callback(err, results); }); } }; async.iterator = function (tasks) { var makeCallback = function (index) { var fn = function () { if (tasks.length) { tasks[index].apply(null, arguments); } return fn.next(); }; fn.next = function () { return (index < tasks.length - 1) ? makeCallback(index + 1): null; }; return fn; }; return makeCallback(0); }; async.apply = function (fn) { var args = Array.prototype.slice.call(arguments, 1); return function () { return fn.apply( null, args.concat(Array.prototype.slice.call(arguments)) ); }; }; var _concat = function (eachfn, arr, fn, callback) { var r = []; eachfn(arr, function (x, cb) { fn(x, function (err, y) { r = r.concat(y || []); cb(err); }); }, function (err) { callback(err, r); }); }; async.concat = doParallel(_concat); async.concatSeries = doSeries(_concat); async.whilst = function (test, iterator, callback) { if (test()) { iterator(function (err) { if (err) { return callback(err); } async.whilst(test, iterator, callback); }); } else { callback(); } }; async.doWhilst = function (iterator, test, callback) { iterator(function (err) { if (err) { return callback(err); } if (test()) { async.doWhilst(iterator, test, callback); } else { callback(); } }); }; async.until = function (test, iterator, callback) { if (!test()) { iterator(function (err) { if (err) { return callback(err); } async.until(test, iterator, callback); }); } else { callback(); } }; async.doUntil = function (iterator, test, callback) { iterator(function (err) { if (err) { return callback(err); } if (!test()) { async.doUntil(iterator, test, callback); } else { callback(); } }); }; async.queue = function (worker, concurrency) { if (concurrency === undefined) { concurrency = 1; } function _insert(q, data, pos, callback) { if(data.constructor !== Array) { data = [data]; } _each(data, function(task) { var item = { data: task, callback: typeof callback === 'function' ? callback : null }; if (pos) { q.tasks.unshift(item); } else { q.tasks.push(item); } if (q.saturated && q.tasks.length === concurrency) { q.saturated(); } async.setImmediate(q.process); }); } var workers = 0; var q = { tasks: [], concurrency: concurrency, saturated: null, empty: null, drain: null, push: function (data, callback) { _insert(q, data, false, callback); }, unshift: function (data, callback) { _insert(q, data, true, callback); }, process: function () { if (workers < q.concurrency && q.tasks.length) { var task = q.tasks.shift(); if (q.empty && q.tasks.length === 0) { q.empty(); } workers += 1; var next = function () { workers -= 1; if (task.callback) { task.callback.apply(task, arguments); } if (q.drain && q.tasks.length + workers === 0) { q.drain(); } q.process(); }; var cb = only_once(next); worker(task.data, cb); } }, length: function () { return q.tasks.length; }, running: function () { return workers; } }; return q; }; async.cargo = function (worker, payload) { var working = false, tasks = []; var cargo = { tasks: tasks, payload: payload, saturated: null, empty: null, drain: null, push: function (data, callback) { if(data.constructor !== Array) { data = [data]; } _each(data, function(task) { tasks.push({ data: task, callback: typeof callback === 'function' ? callback : null }); if (cargo.saturated && tasks.length === payload) { cargo.saturated(); } }); async.setImmediate(cargo.process); }, process: function process() { if (working) return; if (tasks.length === 0) { if(cargo.drain) cargo.drain(); return; } var ts = typeof payload === 'number' ? tasks.splice(0, payload) : tasks.splice(0); var ds = _map(ts, function (task) { return task.data; }); if(cargo.empty) cargo.empty(); working = true; worker(ds, function () { working = false; var args = arguments; _each(ts, function (data) { if (data.callback) { data.callback.apply(null, args); } }); process(); }); }, length: function () { return tasks.length; }, running: function () { return working; } }; return cargo; }; var _console_fn = function (name) { return function (fn) { var args = Array.prototype.slice.call(arguments, 1); fn.apply(null, args.concat([function (err) { var args = Array.prototype.slice.call(arguments, 1); if (typeof console !== 'undefined') { if (err) { if (console.error) { console.error(err); } } else if (console[name]) { _each(args, function (x) { console[name](x); }); } } }])); }; }; async.log = _console_fn('log'); async.dir = _console_fn('dir'); /*async.info = _console_fn('info'); async.warn = _console_fn('warn'); async.error = _console_fn('error');*/ async.memoize = function (fn, hasher) { var memo = {}; var queues = {}; hasher = hasher || function (x) { return x; }; var memoized = function () { var args = Array.prototype.slice.call(arguments); var callback = args.pop(); var key = hasher.apply(null, args); if (key in memo) { callback.apply(null, memo[key]); } else if (key in queues) { queues[key].push(callback); } else { queues[key] = [callback]; fn.apply(null, args.concat([function () { memo[key] = arguments; var q = queues[key]; delete queues[key]; for (var i = 0, l = q.length; i < l; i++) { q[i].apply(null, arguments); } }])); } }; memoized.memo = memo; memoized.unmemoized = fn; return memoized; }; async.unmemoize = function (fn) { return function () { return (fn.unmemoized || fn).apply(null, arguments); }; }; async.times = function (count, iterator, callback) { var counter = []; for (var i = 0; i < count; i++) { counter.push(i); } return async.map(counter, iterator, callback); }; async.timesSeries = function (count, iterator, callback) { var counter = []; for (var i = 0; i < count; i++) { counter.push(i); } return async.mapSeries(counter, iterator, callback); }; async.compose = function (/* functions... */) { var fns = Array.prototype.reverse.call(arguments); return function () { var that = this; var args = Array.prototype.slice.call(arguments); var callback = args.pop(); async.reduce(fns, args, function (newargs, fn, cb) { fn.apply(that, newargs.concat([function () { var err = arguments[0]; var nextargs = Array.prototype.slice.call(arguments, 1); cb(err, nextargs); }])) }, function (err, results) { callback.apply(that, [err].concat(results)); }); }; }; var _applyEach = function (eachfn, fns /*args...*/) { var go = function () { var that = this; var args = Array.prototype.slice.call(arguments); var callback = args.pop(); return eachfn(fns, function (fn, cb) { fn.apply(that, args.concat([cb])); }, callback); }; if (arguments.length > 2) { var args = Array.prototype.slice.call(arguments, 2); return go.apply(this, args); } else { return go; } }; async.applyEach = doParallel(_applyEach); async.applyEachSeries = doSeries(_applyEach); async.forever = function (fn, callback) { function next(err) { if (err) { if (callback) { return callback(err); } throw err; } fn(next); } next(); }; // AMD / RequireJS if (typeof define !== 'undefined' && define.amd) { define([], function () { return async; }); } // Node.js else if (typeof module !== 'undefined' && module.exports) { module.exports = async; } // included directly via <script> tag else { root.async = async; } }()); /*! * Platform.js v1.0.0 <http://mths.be/platform> * Copyright 2010-2012 John-David Dalton <http://allyoucanleet.com/> * Available under MIT license <http://mths.be/mit> */ ;(function(window) { 'use strict'; /** Backup possible window/global object */ var oldWin = window; /** Detect free variable `exports` */ var freeExports = typeof exports == 'object' && exports; /** Detect free variable `global` */ var freeGlobal = typeof global == 'object' && global && (global == global.global ? (window = global) : global); /** Opera regexp */ var reOpera = /Opera/; /** Used to resolve a value's internal [[Class]] */ var toString = {}.toString; /** Detect Java environment */ var java = /Java/.test(getClassOf(window.java)) && window.java; /** A character to represent alpha */ var alpha = java ? 'a' : '\u03b1'; /** A character to represent beta */ var beta = java ? 'b' : '\u03b2'; /** Browser document object */ var doc = window.document || {}; /** Used to check for own properties of an object */ var hasOwnProperty = {}.hasOwnProperty; /** Browser navigator object */ var nav = window.navigator || {}; /** * Detect Opera browser * http://www.howtocreate.co.uk/operaStuff/operaObject.html * http://dev.opera.com/articles/view/opera-mini-web-content-authoring-guidelines/#operamini */ var opera = window.operamini || window.opera; /** Opera [[Class]] */ var operaClass = reOpera.test(operaClass = getClassOf(opera)) ? operaClass : (opera = null); /** Possible global object */ var thisBinding = this; /** Browser user agent string */ var userAgent = nav.userAgent || ''; /*--------------------------------------------------------------------------*/ /** * Capitalizes a string value. * * @private * @param {String} string The string to capitalize. * @returns {String} The capitalized string. */ function capitalize(string) { string = String(string); return string.charAt(0).toUpperCase() + string.slice(1); } /** * An iteration utility for arrays and objects. * * @private * @param {Array|Object} object The object to iterate over. * @param {Function} callback The function called per iteration. */ function each(object, callback) { var index = -1, length = object.length; if (length == length >>> 0) { while (++index < length) { callback(object[index], index, object); } } else { forOwn(object, callback); } } /** * Trim and conditionally capitalize string values. * * @private * @param {String} string The string to format. * @returns {String} The formatted string. */ function format(string) { string = trim(string); return /^(?:webOS|i(?:OS|P))/.test(string) ? string : capitalize(string); } /** * Iterates over an object's own properties, executing the `callback` for each. * * @private * @param {Object} object The object to iterate over. * @param {Function} callback The function executed per own property. */ function forOwn(object, callback) { for (var key in object) { hasKey(object, key) && callback(object[key], key, object); } } /** * Gets the internal [[Class]] of a value. * * @private * @param {Mixed} value The value. * @returns {String} The [[Class]]. */ function getClassOf(value) { return value == null ? capitalize(value) : toString.call(value).slice(8, -1); } /** * Checks if an object has the specified key as a direct property. * * @private * @param {Object} object The object to check. * @param {String} key The key to check for. * @returns {Boolean} Returns `true` if key is a direct property, else `false`. */ function hasKey() { // lazy define for others (not as accurate) hasKey = function(object, key) { var parent = object != null && (object.constructor || Object).prototype; return !!parent && key in Object(object) && !(key in parent && object[key] === parent[key]); }; // for modern browsers if (getClassOf(hasOwnProperty) == 'Function') { hasKey = function(object, key) { return object != null && hasOwnProperty.call(object, key); }; } // for Safari 2 else if ({}.__proto__ == Object.prototype) { hasKey = function(object, key) { var result = false; if (object != null) { object = Object(object); object.__proto__ = [object.__proto__, object.__proto__ = null, result = key in object][0]; } return result; }; } return hasKey.apply(this, arguments); } /** * Host objects can return type values that are different from their actual * data type. The objects we are concerned with usually return non-primitive * types of object, function, or unknown. * * @private * @param {Mixed} object The owner of the property. * @param {String} property The property to check. * @returns {Boolean} Returns `true` if the property value is a non-primitive, else `false`. */ function isHostType(object, property) { var type = object != null ? typeof object[property] : 'number'; return !/^(?:boolean|number|string|undefined)$/.test(type) && (type == 'object' ? !!object[property] : true); } /** * Prepares a string for use in a RegExp constructor by making hyphens and * spaces optional. * * @private * @param {String} string The string to qualify. * @returns {String} The qualified string. */ function qualify(string) { return String(string).replace(/([ -])(?!$)/g, '$1?'); } /** * A bare-bones` Array#reduce` like utility function. * * @private * @param {Array} array The array to iterate over. * @param {Function} callback The function called per iteration. * @param {Mixed} accumulator Initial value of the accumulator. * @returns {Mixed} The accumulator. */ function reduce(array, callback) { var accumulator = null; each(array, function(value, index) { accumulator = callback(accumulator, value, index, array); }); return accumulator; } /** * Removes leading and trailing whitespace from a string. * * @private * @param {String} string The string to trim. * @returns {String} The trimmed string. */ function trim(string) { return String(string).replace(/^ +| +$/g, ''); } /*--------------------------------------------------------------------------*/ /** * Creates a new platform object. * * @memberOf platform * @param {String} [ua = navigator.userAgent] The user agent string. * @returns {Object} A platform object. */ function parse(ua) { ua || (ua = userAgent); /** Temporary variable used over the script's lifetime */ var data; /** The CPU architecture */ var arch = ua; /** Platform description array */ var description = []; /** Platform alpha/beta indicator */ var prerelease = null; /** A flag to indicate that environment features should be used to resolve the platform */ var useFeatures = ua == userAgent; /** The browser/environment version */ var version = useFeatures && opera && typeof opera.version == 'function' && opera.version(); /* Detectable layout engines (order is important) */ var layout = getLayout([ { 'label': 'WebKit', 'pattern': 'AppleWebKit' }, 'iCab', 'Presto', 'NetFront', 'Tasman', 'Trident', 'KHTML', 'Gecko' ]); /* Detectable browser names (order is important) */ var name = getName([ 'Adobe AIR', 'Arora', 'Avant Browser', 'Camino', 'Epiphany', 'Fennec', 'Flock', 'Galeon', 'GreenBrowser', 'iCab', 'Iceweasel', 'Iron', 'K-Meleon', 'Konqueror', 'Lunascape', 'Maxthon', 'Midori', 'Nook Browser', 'PhantomJS', 'Raven', 'Rekonq', 'RockMelt', 'SeaMonkey', { 'label': 'Silk', 'pattern': '(?:Cloud9|Silk-Accelerated)' }, 'Sleipnir', 'SlimBrowser', 'Sunrise', 'Swiftfox', 'WebPositive', 'Opera Mini', 'Opera', 'Chrome', { 'label': 'Chrome Mobile', 'pattern': '(?:CriOS|CrMo)' }, { 'label': 'Firefox', 'pattern': '(?:Firefox|Minefield)' }, { 'label': 'IE', 'pattern': 'MSIE' }, 'Safari' ]); /* Detectable products (order is important) */ var product = getProduct([ 'BlackBerry', { 'label': 'Galaxy S', 'pattern': 'GT-I9000' }, { 'label': 'Galaxy S2', 'pattern': 'GT-I9100' }, 'Google TV', 'iPad', 'iPod', 'iPhone', 'Kindle', { 'label': 'Kindle Fire', 'pattern': '(?:Cloud9|Silk-Accelerated)' }, 'Nook', 'PlayBook', 'PlayStation Vita', 'TouchPad', 'Transformer', 'Xoom' ]); /* Detectable manufacturers */ var manufacturer = getManufacturer({ 'Apple': { 'iPad': 1, 'iPhone': 1, 'iPod': 1 }, 'Amazon': { 'Kindle': 1, 'Kindle Fire': 1 }, 'Asus': { 'Transformer': 1 }, 'Barnes & Noble': { 'Nook': 1 }, 'BlackBerry': { 'PlayBook': 1 }, 'Google': { 'Google TV': 1 }, 'HP': { 'TouchPad': 1 }, 'LG': { }, 'Motorola': { 'Xoom': 1 }, 'Nokia': { }, 'Samsung': { 'Galaxy S': 1, 'Galaxy S2': 1 }, 'Sony': { 'PlayStation Vita': 1 } }); /* Detectable OSes (order is important) */ var os = getOS([ 'Android', 'CentOS', 'Debian', 'Fedora', 'FreeBSD', 'Gentoo', 'Haiku', 'Kubuntu', 'Linux Mint', 'Red Hat', 'SuSE', 'Ubuntu', 'Xubuntu', 'Cygwin', 'Symbian OS', 'hpwOS', 'webOS ', 'webOS', 'Tablet OS', 'Linux', 'Mac OS X', 'Macintosh', 'Mac', 'Windows 98;', 'Windows ' ]); /*------------------------------------------------------------------------*/ /** * Picks the layout engine from an array of guesses. * * @private * @param {Array} guesses An array of guesses. * @returns {String|Null} The detected layout engine. */ function getLayout(guesses) { return reduce(guesses, function(result, guess) { return result || RegExp('\\b' + ( guess.pattern || qualify(guess) ) + '\\b', 'i').exec(ua) && (guess.label || guess); }); } /** * Picks the manufacturer from an array of guesses. * * @private * @param {Array} guesses An array of guesses. * @returns {String|Null} The detected manufacturer. */ function getManufacturer(guesses) { return reduce(guesses, function(result, value, key) { // lookup the manufacturer by product or scan the UA for the manufacturer return result || ( value[product] || value[0/*Opera 9.25 fix*/, /^[a-z]+(?: +[a-z]+\b)*/i.exec(product)] || RegExp('\\b' + (key.pattern || qualify(key)) + '(?:\\b|\\w*\\d)', 'i').exec(ua) ) && (key.label || key); }); } /** * Picks the browser name from an array of guesses. * * @private * @param {Array} guesses An array of guesses. * @returns {String|Null} The detected browser name. */ function getName(guesses) { return reduce(guesses, function(result, guess) { return result || RegExp('\\b' + ( guess.pattern || qualify(guess) ) + '\\b', 'i').exec(ua) && (guess.label || guess); }); } /** * Picks the OS name from an array of guesses. * * @private * @param {Array} guesses An array of guesses. * @returns {String|Null} The detected OS name. */ function getOS(guesses) { return reduce(guesses, function(result, guess) { var pattern = guess.pattern || qualify(guess); if (!result && (result = RegExp('\\b' + pattern + '(?:/[\\d.]+|[ \\w.]*)', 'i').exec(ua))) { // platform tokens defined at // http://msdn.microsoft.com/en-us/library/ms537503(VS.85).aspx // http://web.archive.org/web/20081122053950/http://msdn.microsoft.com/en-us/library/ms537503(VS.85).aspx data = { '6.2': '8', '6.1': 'Server 2008 R2 / 7', '6.0': 'Server 2008 / Vista', '5.2': 'Server 2003 / XP 64-bit', '5.1': 'XP', '5.01': '2000 SP1', '5.0': '2000', '4.0': 'NT', '4.90': 'ME' }; // detect Windows version from platform tokens if (/^Win/i.test(result) && (data = data[0/*Opera 9.25 fix*/, /[\d.]+$/.exec(result)])) { result = 'Windows ' + data; } // correct character case and cleanup result = format(String(result) .replace(RegExp(pattern, 'i'), guess.label || guess) .replace(/ ce$/i, ' CE') .replace(/hpw/i, 'web') .replace(/Macintosh/, 'Mac OS') .replace(/_PowerPC/i, ' OS') .replace(/(OS X) [^ \d]+/i, '$1') .replace(/\/(\d)/, ' $1') .replace(/_/g, '.') .replace(/(?: BePC|[ .]*fc[ \d.]+)$/i, '') .replace(/x86\.64/gi, 'x86_64') .split(' on ')[0]); } return result; }); } /** * Picks the product name from an array of guesses. * * @private * @param {Array} guesses An array of guesses. * @returns {String|Null} The detected product name. */ function getProduct(guesses) { return reduce(guesses, function(result, guess) { var pattern = guess.pattern || qualify(guess); if (!result && (result = RegExp('\\b' + pattern + ' *\\d+[.\\w_]*', 'i').exec(ua) || RegExp('\\b' + pattern + '(?:; *(?:[a-z]+[_-])?[a-z]+\\d+|[^ ();-]*)', 'i').exec(ua) )) { // split by forward slash and append product version if needed if ((result = String(guess.label || result).split('/'))[1] && !/[\d.]+/.test(result[0])) { result[0] += ' ' + result[1]; } // correct character case and cleanup guess = guess.label || guess; result = format(result[0] .replace(RegExp(pattern, 'i'), guess) .replace(RegExp('; *(?:' + guess + '[_-])?', 'i'), ' ') .replace(RegExp('(' + guess + ')(\\w)', 'i'), '$1 $2')); } return result; }); } /** * Resolves the version using an array of UA patterns. * * @private * @param {Array} patterns An array of UA patterns. * @returns {String|Null} The detected version. */ function getVersion(patterns) { return reduce(patterns, function(result, pattern) { return result || (RegExp(pattern + '(?:-[\\d.]+/|(?: for [\\w-]+)?[ /-])([\\d.]+[^ ();/_-]*)', 'i').exec(ua) || 0)[1] || null; }); } /*------------------------------------------------------------------------*/ /** * Returns `platform.description` when the platform object is coerced to a string. * * @name toString * @memberOf platform * @returns {String} Returns `platform.description` if available, else an empty string. */ function toStringPlatform() { return this.description || ''; } /*------------------------------------------------------------------------*/ // convert layout to an array so we can add extra details layout && (layout = [layout]); // detect product names that contain their manufacturer's name if (manufacturer && !product) { product = getProduct([manufacturer]); } // clean up Google TV if ((data = /Google TV/.exec(product))) { product = data[0]; } // detect simulators if (/\bSimulator\b/i.test(ua)) { product = (product ? product + ' ' : '') + 'Simulator'; } // detect iOS if (/^iP/.test(product)) { name || (name = 'Safari'); os = 'iOS' + ((data = / OS ([\d_]+)/i.exec(ua)) ? ' ' + data[1].replace(/_/g, '.') : ''); } // detect Kubuntu else if (name == 'Konqueror' && !/buntu/i.test(os)) { os = 'Kubuntu'; } // detect Android browsers else if (manufacturer && manufacturer != 'Google' && /Chrome|Vita/.test(name + ';' + product)) { name = 'Android Browser'; os = /Android/.test(os) ? os : 'Android'; } // detect false positives for Firefox/Safari else if (!name || (data = !/\bMinefield\b/i.test(ua) && /Firefox|Safari/.exec(name))) { // escape the `/` for Firefox 1 if (name && !product && /[\/,]|^[^(]+?\)/.test(ua.slice(ua.indexOf(data + '/') + 8))) { // clear name of false positives name = null; } // reassign a generic name if ((data = product || manufacturer || os) && (product || manufacturer || /Android|Symbian OS|Tablet OS|webOS/.test(os))) { name = /[a-z]+(?: Hat)?/i.exec(/Android/.test(os) ? os : data) + ' Browser'; } } // detect non-Opera versions (order is important) if (!version) { version = getVersion([ '(?:Cloud9|CriOS|CrMo|Opera ?Mini|Raven|Silk(?!/[\\d.]+$))', 'Version', qualify(name), '(?:Firefox|Minefield|NetFront)' ]); } // detect stubborn layout engines if (layout == 'iCab' && parseFloat(version) > 3) { layout = ['WebKit']; } else if (data = /Opera/.test(name) && 'Presto' || /\b(?:Midori|Nook|Safari)\b/i.test(ua) && 'WebKit' || !layout && /\bMSIE\b/i.test(ua) && (/^Mac/.test(os) ? 'Tasman' : 'Trident')) { layout = [data]; } // leverage environment features if (useFeatures) { // detect server-side environments // Rhino has a global function while others have a global object if (isHostType(window, 'global')) { if (java) { data = java.lang.System; arch = data.getProperty('os.arch'); os = os || data.getProperty('os.name') + ' ' + data.getProperty('os.version'); } if (typeof exports == 'object' && exports) { // if `thisBinding` is the [ModuleScope] if (thisBinding == oldWin && typeof system == 'object' && (data = [system])[0]) { os || (os = data[0].os || null); try { data[1] = require('ringo/engine').version; version = data[1].join('.'); name = 'RingoJS'; } catch(e) { if (data[0].global == freeGlobal) { name = 'Narwhal'; } } } else if (typeof process == 'object' && (data = process)) { name = 'Node.js'; arch = data.arch; os = data.platform; version = /[\d.]+/.exec(data.version)[0]; } } else if (getClassOf(window.environment) == 'Environment') { name = 'Rhino'; } } // detect Adobe AIR else if (getClassOf(data = window.runtime) == 'ScriptBridgingProxyObject') { name = 'Adobe AIR'; os = data.flash.system.Capabilities.os; } // detect PhantomJS else if (getClassOf(data = window.phantom) == 'RuntimeObject') { name = 'PhantomJS'; version = (data = data.version || null) && (data.major + '.' + data.minor + '.' + data.patch); } // detect IE compatibility modes else if (typeof doc.documentMode == 'number' && (data = /\bTrident\/(\d+)/i.exec(ua))) { // we're in compatibility mode when the Trident version + 4 doesn't // equal the document mode version = [version, doc.documentMode]; if ((data = +data[1] + 4) != version[1]) { description.push('IE ' + version[1] + ' mode'); layout[1] = ''; version[1] = data; } version = name == 'IE' ? String(version[1].toFixed(1)) : version[0]; } os = os && format(os); } // detect prerelease phases if (version && (data = /(?:[ab]|dp|pre|[ab]\d+pre)(?:\d+\+?)?$/i.exec(version) || /(?:alpha|beta)(?: ?\d)?/i.exec(ua + ';' + (useFeatures && nav.appMinorVersion)) || /\bMinefield\b/i.test(ua) && 'a')) { prerelease = /b/i.test(data) ? 'beta' : 'alpha'; version = version.replace(RegExp(data + '\\+?$'), '') + (prerelease == 'beta' ? beta : alpha) + (/\d+\+?/.exec(data) || ''); } // rename code name "Fennec" if (name == 'Fennec') { name = 'Firefox Mobile'; } // obscure Maxthon's unreliable version else if (name == 'Maxthon' && version) { version = version.replace(/\.[\d.]+/, '.x'); } // detect Silk desktop/accelerated modes else if (name == 'Silk') { if (!/Mobi/i.test(ua)) { os = 'Android'; description.unshift('desktop mode'); } if (/Accelerated *= *true/i.test(ua)) { description.unshift('accelerated'); } } // detect Windows Phone desktop mode else if (name == 'IE' && (data = (/; *(?:XBLWP|ZuneWP)(\d+)/i.exec(ua) || 0)[1])) { name += ' Mobile'; os = 'Windows Phone OS ' + data + '.x'; description.unshift('desktop mode'); } // add mobile postfix else if ((name == 'IE' || name && !product && !/Browser|Mobi/.test(name)) && (os == 'Windows CE' || /Mobi/i.test(ua))) { name += ' Mobile'; } // detect IE platform preview else if (name == 'IE' && useFeatures && typeof external == 'object' && !external) { description.unshift('platform preview'); } // detect BlackBerry OS version // http://docs.blackberry.com/en/developers/deliverables/18169/HTTP_headers_sent_by_BB_Browser_1234911_11.jsp else if (/BlackBerry/.test(product) && (data = (RegExp(product.replace(/ +/g, ' *') + '/([.\\d]+)', 'i').exec(ua) || 0)[1] || version)) { os = 'Device Software ' + data; version = null; } // detect Opera identifying/masking itself as another browser // http://www.opera.com/support/kb/view/843/ else if (this != forOwn && ( (useFeatures && opera) || (/Opera/.test(name) && /\b(?:MSIE|Firefox)\b/i.test(ua)) || (name == 'Firefox' && /OS X (?:\d+\.){2,}/.test(os)) || (name == 'IE' && ( (os && !/^Win/.test(os) && version > 5.5) || /Windows XP/.test(os) && version > 8 || version == 8 && !/Trident/.test(ua) )) ) && !reOpera.test(data = parse.call(forOwn, ua.replace(reOpera, '') + ';')) && data.name) { // when "indentifying", the UA contains both Opera and the other browser's name data = 'ing as ' + data.name + ((data = data.version) ? ' ' + data : ''); if (reOpera.test(name)) { if (/IE/.test(data) && os == 'Mac OS') { os = null; } data = 'identify' + data; } // when "masking", the UA contains only the other browser's name else { data = 'mask' + data; if (operaClass) { name = format(operaClass.replace(/([a-z])([A-Z])/g, '$1 $2')); } else { name = 'Opera'; } if (/IE/.test(data)) { os = null; } if (!useFeatures) { version = null; } } layout = ['Presto']; description.push(data); } // detect WebKit Nightly and approximate Chrome/Safari versions if ((data = (/\bAppleWebKit\/([\d.]+\+?)/i.exec(ua) || 0)[1])) { // correct build for numeric comparison // (e.g. "532.5" becomes "532.05") data = [parseFloat(data.replace(/\.(\d)$/, '.0$1')), data]; // nightly builds are postfixed with a `+` if (name == 'Safari' && data[1].slice(-1) == '+') { name = 'WebKit Nightly'; prerelease = 'alpha'; version = data[1].slice(0, -1); } // clear incorrect browser versions else if (version == data[1] || version == (/\bSafari\/([\d.]+\+?)/i.exec(ua) || 0)[1]) { version = null; } // use the full Chrome version when available data = [data[0], (/\bChrome\/([\d.]+)/i.exec(ua) || 0)[1]]; // detect JavaScriptCore // http://stackoverflow.com/questions/6768474/how-can-i-detect-which-javascript-engine-v8-or-jsc-is-used-at-runtime-in-androi if (!useFeatures || (/internal|\n/i.test(toString.toString()) && !data[1])) { layout[1] = 'like Safari'; data = (data = data[0], data < 400 ? 1 : data < 500 ? 2 : data < 526 ? 3 : data < 533 ? 4 : data < 534 ? '4+' : data < 535 ? 5 : '5'); } else { layout[1] = 'like Chrome'; data = data[1] || (data = data[0], data < 530 ? 1 : data < 532 ? 2 : data < 532.05 ? 3 : data < 533 ? 4 : data < 534.03 ? 5 : data < 534.07 ? 6 : data < 534.10 ? 7 : data < 534.13 ? 8 : data < 534.16 ? 9 : data < 534.24 ? 10 : data < 534.30 ? 11 : data < 535.01 ? 12 : data < 535.02 ? '13+' : data < 535.07 ? 15 : data < 535.11 ? 16 : data < 535.19 ? 17 : data < 536.05 ? 18 : data < 536.10 ? 19 : data < 537.01 ? 20 : '21'); } // add the postfix of ".x" or "+" for approximate versions layout[1] += ' ' + (data += typeof data == 'number' ? '.x' : /[.+]/.test(data) ? '' : '+'); // obscure version for some Safari 1-2 releases if (name == 'Safari' && (!version || parseInt(version) > 45)) { version = data; } } // detect Opera desktop modes if (name == 'Opera' && (data = /(?:zbov|zvav)$/.exec(os))) { name += ' '; description.unshift('desktop mode'); if (data == 'zvav') { name += 'Mini'; version = null; } else { name += 'Mobile'; } } // detect Chrome desktop mode else if (name == 'Safari' && /Chrome/.exec(layout[1])) { description.unshift('desktop mode'); name = 'Chrome Mobile'; version = null; if (/Mac OS X/.test(os)) { manufacturer = 'Apple'; os = 'iOS 4.3+'; } else { os = null; } } // strip incorrect OS versions if (version && version.indexOf(data = /[\d.]+$/.exec(os)) == 0 && ua.indexOf('/' + data + '-') > -1) { os = trim(os.replace(data, '')); } // add layout engine if (layout && !/Avant|Nook/.test(name) && ( /Browser|Lunascape|Maxthon/.test(name) || /^(?:Adobe|Arora|Midori|Phantom|Rekonq|Rock|Sleipnir|Web)/.test(name) && layout[1])) { // don't add layout details to description if they are falsey (data = layout[layout.length - 1]) && description.push(data); } // combine contextual information if (description.length) { description = ['(' + description.join('; ') + ')']; } // append manufacturer if (manufacturer && product && product.indexOf(manufacturer) < 0) { description.push('on ' + manufacturer); } // append product if (product) { description.push((/^on /.test(description[description.length -1]) ? '' : 'on ') + product); } // parse OS into an object if (os) { data = / ([\d.+]+)$/.exec(os); os = { 'architecture': 32, 'family': data ? os.replace(data[0], '') : os, 'version': data ? data[1] : null, 'toString': function() { var version = this.version; return this.family + (version ? ' ' + version : '') + (this.architecture == 64 ? ' 64-bit' : ''); } }; } // add browser/OS architecture if ((data = / (?:AMD|IA|Win|WOW|x86_|x)64\b/i.exec(arch)) && !/\bi686\b/i.test(arch)) { if (os) { os.architecture = 64; os.family = os.family.replace(data, ''); } if (name && (/WOW64/i.test(ua) || (useFeatures && /\w(?:86|32)$/.test(nav.cpuClass || nav.platform)))) { description.unshift('32-bit'); } } ua || (ua = null); /*------------------------------------------------------------------------*/ /** * The platform object. * * @name platform * @type Object */ return { /** * The browser/environment version. * * @memberOf platform * @type String|Null */ 'version': name && version && (description.unshift(version), version), /** * The name of the browser/environment. * * @memberOf platform * @type String|Null */ 'name': name && (description.unshift(name), name), /** * The name of the operating system. * * @memberOf platform * @type Object */ 'os': os ? (name && !(os == String(os).split(' ')[0] && (os == name.split(' ')[0] || product)) && description.push(product ? '(' + os + ')' : 'on ' + os), os) : { /** * The CPU architecture the OS is built for. * * @memberOf platform.os * @type String|Null */ 'architecture': null, /** * The family of the OS. * * @memberOf platform.os * @type String|Null */ 'family': null, /** * The version of the OS. * * @memberOf platform.os * @type String|Null */ 'version': null, /** * Returns the OS string. * * @memberOf platform.os * @returns {String} The OS string. */ 'toString': function() { return 'null'; } }, /** * The platform description. * * @memberOf platform * @type String|Null */ 'description': description.length ? description.join(' ') : ua, /** * The name of the browser layout engine. * * @memberOf platform * @type String|Null */ 'layout': layout && layout[0], /** * The name of the product's manufacturer. * * @memberOf platform * @type String|Null */ 'manufacturer': manufacturer, /** * The alpha/beta release indicator. * * @memberOf platform * @type String|Null */ 'prerelease': prerelease, /** * The name of the product hosting the browser. * * @memberOf platform * @type String|Null */ 'product': product, /** * The browser's user agent string. * * @memberOf platform * @type String|Null */ 'ua': ua, // parses a user agent string into a platform object 'parse': parse, // returns the platform description 'toString': toStringPlatform }; } /*--------------------------------------------------------------------------*/ // expose platform // some AMD build optimizers, like r.js, check for specific condition patterns like the following: if (typeof define == 'function' && typeof define.amd == 'object' && define.amd) { // define as an anonymous module so, through path mapping, it can be aliased define(function() { return parse(); }); } // check for `exports` after `define` in case a build optimizer adds an `exports` object else if (freeExports) { // in Narwhal, Node.js, or RingoJS forOwn(parse(), function(value, key) { freeExports[key] = value; }); } // in a browser or Rhino else { // use square bracket notation so Closure Compiler won't munge `platform` // http://code.google.com/closure/compiler/docs/api-tutorial3.html#export window['platform'] = parse(); } }(this)); var buster = this.buster = this.buster || {}; buster._ = buster.lodash = this._; buster.when = this.when; buster.async = this.async; buster.platform = this.platform; this.define = (function () { function resolve(ns) { var pieces = ns.replace(/^buster-test\//, "").replace(/^buster-/, "").replace(/-(.)/g, function (m, l) { return l.toUpperCase(); }).split("/"); return { property: pieces.pop(), object: buster._.reduce(pieces, function (ctx, name) { if (!ctx[name]) { ctx[name] = {}; } return ctx[name]; }, buster) }; } return function (id, dependencies, factory) { if (arguments.length === 2) { factory = dependencies; dependencies = []; } var deps = [], dep; for (var j, i = 0, l = dependencies.length; i < l; ++i) { dep = resolve(dependencies[i]); if (!dep.object[dep.property]) { throw new Error(id + " depends on unknown module " + dep.property); } deps.push(dep.object[dep.property]); } dep = resolve(id); dep.object[dep.property] = factory.apply(this, deps); }; }()); this.define.amd = true; ((typeof define === "function" && define.amd && function (m) { define("bane", m); }) || (typeof module === "object" && function (m) { module.exports = m(); }) || function (m) { this.bane = m(); } )(function () { "use strict"; var slice = Array.prototype.slice; function handleError(event, error, errbacks) { var i, l = errbacks.length; if (l > 0) { for (i = 0; i < l; ++i) { errbacks[i](event, error); } return; } setTimeout(function () { error.message = event + " listener threw error: " + error.message; throw error; }, 0); } function assertFunction(fn) { if (typeof fn !== "function") { throw new TypeError("Listener is not function"); } return fn; } function supervisors(object) { if (!object.supervisors) { object.supervisors = []; } return object.supervisors; } function listeners(object, event) { if (!object.listeners) { object.listeners = {}; } if (event && !object.listeners[event]) { object.listeners[event] = []; } return event ? object.listeners[event] : object.listeners; } function errbacks(object) { if (!object.errbacks) { object.errbacks = []; } return object.errbacks; } /** * @signature var emitter = bane.createEmitter([object]); * * Create a new event emitter. If an object is passed, it will be modified * by adding the event emitter methods (see below). */ function createEventEmitter(object) { object = object || {}; function notifyListener(event, listener, args) { try { listener.listener.apply(listener.thisp || object, args); } catch (e) { handleError(event, e, errbacks(object)); } } object.on = function (event, listener, thisp) { if (typeof event === "function") { return supervisors(this).push({ listener: event, thisp: listener }); } listeners(this, event).push({ listener: assertFunction(listener), thisp: thisp }); }; object.off = function (event, listener) { var fns, events, i, l; if (!event) { fns = supervisors(this); fns.splice(0, fns.length); events = listeners(this); for (i in events) { if (events.hasOwnProperty(i)) { fns = listeners(this, i); fns.splice(0, fns.length); } } fns = errbacks(this); fns.splice(0, fns.length); return; } if (typeof event === "function") { fns = supervisors(this); listener = event; } else { fns = listeners(this, event); } if (!listener) { fns.splice(0, fns.length); return; } for (i = 0, l = fns.length; i < l; ++i) { if (fns[i].listener === listener) { fns.splice(i, 1); return; } } }; object.once = function (event, listener, thisp) { var wrapper = function () { object.off(event, wrapper); listener.apply(this, arguments); }; object.on(event, wrapper, thisp); }; object.bind = function (object, events) { var prop, i, l; if (!events) { for (prop in object) { if (typeof object[prop] === "function") { this.on(prop, object[prop], object); } } } else { for (i = 0, l = events.length; i < l; ++i) { if (typeof object[events[i]] === "function") { this.on(events[i], object[events[i]], object); } else { throw new Error("No such method " + events[i]); } } } return object; }; object.emit = function (event) { var toNotify = supervisors(this); var args = slice.call(arguments), i, l; for (i = 0, l = toNotify.length; i < l; ++i) { notifyListener(event, toNotify[i], args); } toNotify = listeners(this, event).slice(); args = slice.call(arguments, 1); for (i = 0, l = toNotify.length; i < l; ++i) { notifyListener(event, toNotify[i], args); } }; object.errback = function (listener) { if (!this.errbacks) { this.errbacks = []; } this.errbacks.push(assertFunction(listener)); }; return object; } return { createEventEmitter: createEventEmitter }; }); ((typeof define === "function" && define.amd && function (m) { define("samsam", m); }) || (typeof module === "object" && function (m) { module.exports = m(); }) || // Node function (m) { this.samsam = m(); } // Browser globals )(function () { var o = Object.prototype; var div = typeof document !== "undefined" && document.createElement("div"); function isNaN(value) { // Unlike global isNaN, this avoids type coercion // typeof check avoids IE host object issues, hat tip to // lodash var val = value; // JsLint thinks value !== value is "weird" return typeof value === "number" && value !== val; } function getClass(value) { // Returns the internal [[Class]] by calling Object.prototype.toString // with the provided value as this. Return value is a string, naming the // internal class, e.g. "Array" return o.toString.call(value).split(/[ \]]/)[1]; } /** * @name samsam.isArguments * @param Object object * * Returns ``true`` if ``object`` is an ``arguments`` object, * ``false`` otherwise. */ function isArguments(object) { if (typeof object !== "object" || typeof object.length !== "number" || getClass(object) === "Array") { return false; } if (typeof object.callee == "function") { return true; } try { object[object.length] = 6; delete object[object.length]; } catch (e) { return true; } return false; } /** * @name samsam.isElement * @param Object object * * Returns ``true`` if ``object`` is a DOM element node. Unlike * Underscore.js/lodash, this function will return ``false`` if ``object`` * is an *element-like* object, i.e. a regular object with a ``nodeType`` * property that holds the value ``1``. */ function isElement(object) { if (!object || object.nodeType !== 1 || !div) { return false; } try { object.appendChild(div); object.removeChild(div); } catch (e) { return false; } return true; } /** * @name samsam.keys * @param Object object * * Return an array of own property names. */ function keys(object) { var ks = [], prop; for (prop in object) { if (o.hasOwnProperty.call(object, prop)) { ks.push(prop); } } return ks; } /** * @name samsam.isDate * @param Object value * * Returns true if the object is a ``Date``, or *date-like*. Duck typing * of date objects work by checking that the object has a ``getTime`` * function whose return value equals the return value from the object's * ``valueOf``. */ function isDate(value) { return typeof value.getTime == "function" && value.getTime() == value.valueOf(); } /** * @name samsam.isNegZero * @param Object value * * Returns ``true`` if ``value`` is ``-0``. */ function isNegZero(value) { return value === 0 && 1 / value === -Infinity; } /** * @name samsam.equal * @param Object obj1 * @param Object obj2 * * Returns ``true`` if two objects are strictly equal. Compared to * ``===`` there are two exceptions: * * - NaN is considered equal to NaN * - -0 and +0 are not considered equal */ function identical(obj1, obj2) { if (obj1 === obj2 || (isNaN(obj1) && isNaN(obj2))) { return obj1 !== 0 || isNegZero(obj1) === isNegZero(obj2); } } /** * @name samsam.deepEqual * @param Object obj1 * @param Object obj2 * * Deep equal comparison. Two values are "deep equal" if: * * - They are equal, according to samsam.identical * - They are both date objects representing the same time * - They are both arrays containing elements that are all deepEqual * - They are objects with the same set of properties, and each property * in ``obj1`` is deepEqual to the corresponding property in ``obj2`` * * Supports cyclic objects. */ function deepEqualCyclic(obj1, obj2) { // used for cyclic comparison // contain already visited objects var objects1 = [], objects2 = [], // contain pathes (position in the object structure) // of the already visited objects // indexes same as in objects arrays paths1 = [], paths2 = [], // contains combinations of already compared objects // in the manner: { "$1['ref']$2['ref']": true } compared = {}; /** * used to check, if the value of a property is an object * (cyclic logic is only needed for objects) * only needed for cyclic logic */ function isObject(value) { if (typeof value === 'object' && value !== null && !(value instanceof Boolean) && !(value instanceof Date) && !(value instanceof Number) && !(value instanceof RegExp) && !(value instanceof String)) { return true; } return false; } /** * returns the index of the given object in the * given objects array, -1 if not contained * only needed for cyclic logic */ function getIndex(objects, obj) { var i; for (i = 0; i < objects.length; i++) { if (objects[i] === obj) { return i; } } return -1; } // does the recursion for the deep equal check return (function deepEqual(obj1, obj2, path1, path2) { var type1 = typeof obj1; var type2 = typeof obj2; // == null also matches undefined if (obj1 === obj2 || isNaN(obj1) || isNaN(obj2) || obj1 == null || obj2 == null || type1 !== "object" || type2 !== "object") { return identical(obj1, obj2); } // Elements are only equal if identical(expected, actual) if (isElement(obj1) || isElement(obj2)) { return false; } var isDate1 = isDate(obj1), isDate2 = isDate(obj2); if (isDate1 || isDate2) { if (!isDate1 || !isDate2 || obj1.getTime() !== obj2.getTime()) { return false; } } if (obj1 instanceof RegExp && obj2 instanceof RegExp) { if (obj1.toString() !== obj2.toString()) { return false; } } var class1 = getClass(obj1); var class2 = getClass(obj2); var keys1 = keys(obj1); var keys2 = keys(obj2); if (isArguments(obj1) || isArguments(obj2)) { if (obj1.length !== obj2.length) { return false; } } else { if (type1 !== type2 || class1 !== class2 || keys1.length !== keys2.length) { return false; } } var key, i, l, // following vars are used for the cyclic logic value1, value2, isObject1, isObject2, index1, index2, newPath1, newPath2; for (i = 0, l = keys1.length; i < l; i++) { key = keys1[i]; if (!o.hasOwnProperty.call(obj2, key)) { return false; } // Start of the cyclic logic value1 = obj1[key]; value2 = obj2[key]; isObject1 = isObject(value1); isObject2 = isObject(value2); // determine, if the objects were already visited // (it's faster to check for isObject first, than to // get -1 from getIndex for non objects) index1 = isObject1 ? getIndex(objects1, value1) : -1; index2 = isObject2 ? getIndex(objects2, value2) : -1; // determine the new pathes of the objects // - for non cyclic objects the current path will be extended // by current property name // - for cyclic objects the stored path is taken newPath1 = index1 !== -1 ? paths1[index1] : path1 + '[' + JSON.stringify(key) + ']'; newPath2 = index2 !== -1 ? paths2[index2] : path2 + '[' + JSON.stringify(key) + ']'; // stop recursion if current objects are already compared if (compared[newPath1 + newPath2]) { return true; } // remember the current objects and their pathes if (index1 === -1 && isObject1) { objects1.push(value1); paths1.push(newPath1); } if (index2 === -1 && isObject2) { objects2.push(value2); paths2.push(newPath2); } // remember that the current objects are already compared if (isObject1 && isObject2) { compared[newPath1 + newPath2] = true; } // End of cyclic logic // neither value1 nor value2 is a cycle // continue with next level if (!deepEqual(value1, value2, newPath1, newPath2)) { return false; } } return true; }(obj1, obj2, '$1', '$2')); } var match; function arrayContains(array, subset) { var i, l, j, k; for (i = 0, l = array.length; i < l; ++i) { if (match(array[i], subset[0])) { for (j = 0, k = subset.length; j < k; ++j) { if (!match(array[i + j], subset[j])) { return false; } } return true; } } return false; } /** * @name samsam.match * @param Object object * @param Object matcher * * Compare arbitrary value ``object`` with matcher. */ match = function match(object, matcher) { if (matcher && typeof matcher.test === "function") { return matcher.test(object); } if (typeof matcher === "function") { return matcher(object) === true; } if (typeof matcher === "string") { matcher = matcher.toLowerCase(); var notNull = typeof object === "string" || !!object; return notNull && (String(object)).toLowerCase().indexOf(matcher) >= 0; } if (typeof matcher === "number") { return matcher === object; } if (typeof matcher === "boolean") { return matcher === object; } if (getClass(object) === "Array" && getClass(matcher) === "Array") { return arrayContains(object, matcher); } if (matcher && typeof matcher === "object") { var prop; for (prop in matcher) { if (!match(object[prop], matcher[prop])) { return false; } } return true; } throw new Error("Matcher was not a string, a number, a " + "function, a boolean or an object"); }; return { isArguments: isArguments, isElement: isElement, isDate: isDate, isNegZero: isNegZero, identical: identical, deepEqual: deepEqualCyclic, match: match }; }); ((typeof define === "function" && define.amd && function (m) { define("evented-logger", ["lodash", "bane"], m); }) || (typeof module === "object" && typeof require === "function" && function (m) { module.exports = m(require("lodash"), require("bane")); }) || function (m) { this.eventedLogger = m(this._, this.bane); } )(function (_, bane) { "use strict"; function formatMessage(logger, message) { if (!logger.logFunctions && typeof message === "function") { return logger.format(message()); } return logger.format(message); } function createLogger(name, level) { return function () { if (level > _.indexOf(this.levels, this.level)) { return; } var self = this; var message = _.reduce(arguments, function (memo, arg) { return memo.concat(formatMessage(self, arg)); }, []).join(" "); this.emit("log", { message: message, level: this.levels[level] }); }; } function Logger(opt) { var logger = this; logger.levels = opt.levels || ["error", "warn", "log", "debug"]; logger.level = opt.level || logger.levels[logger.levels.length - 1]; _.each(logger.levels, function (level, i) { logger[level] = createLogger(level, i); }); if (opt.formatter) { logger.format = opt.formatter; } logger.logFunctions = !!opt.logFunctions; } Logger.prototype = bane.createEventEmitter({ create: function (opt) { return new Logger(opt || {}); }, format: function (obj) { if (typeof obj !== "object") { return String(obj); } try { return JSON.stringify(obj); } catch (e) { return String(obj); } } }); return Logger.prototype; }); ((typeof define === "function" && define.amd && function (m) { define("referee", ["lodash", "samsam", "bane"], m); }) || (typeof module === "object" && function (m) { module.exports = m(require("lodash"), require("samsam"), require("bane")); }) || function (m) { this.referee = m(this._, this.samsam, this.bane); } )(function (_, samsam, bane) { "use strict"; var toString = Object.prototype.toString; var slice = Array.prototype.slice; var assert, refute, referee = bane.createEventEmitter(); referee.countAssertion = function countAssertion() { if (typeof referee.count !== "number") { referee.count = 0; } referee.count += 1; }; function interpolate(string, prop, value) { return string.replace(new RegExp("\\$\\{" + prop + "\\}", "g"), value); } // Interpolate positional arguments. Replaces occurences of ${<index>} in // the string with the corresponding entry in values[<index>] function interpolatePosArg(message, values) { return _.reduce(values, function (msg, value, index) { return interpolate(msg, index, referee.format(value)); }, message); } function interpolateProperties(message, properties) { return _.reduce(_.keys(properties), function (msg, name) { return interpolate(msg, name, referee.format(properties[name])); }, message || ""); } // Fail an assertion. Interpolates message before calling referee.fail function fail(type, assertion, msg) { delete this.fail; var message = interpolateProperties(interpolatePosArg( referee[type][assertion][msg] || msg, [].slice.call(arguments, 3) ), this); referee.fail("[" + type + "." + assertion + "] " + message); } // Internal helper. Used throughout to fail assertions if they receive // too few arguments. The name is provided for a helpful error message. function assertArgNum(name, args, num) { if (args.length < num) { referee.fail("[" + name + "] Expected to receive at least " + num + " argument" + (num > 1 ? "s" : "")); return false; } return true; } // Internal helper. Not the most elegant of functions, but it takes // care of all the nitty-gritty of assertion functions: counting, // verifying parameter count, interpolating messages with actual // values and so on. function defineAssertion(type, name, func, minArgs, messageValues) { referee[type][name] = function () { referee.countAssertion(); var fullName = type + "." + name, failed = false; if (!assertArgNum(fullName, arguments, minArgs || func.length)) { return; } var ctx = { fail: function () { failed = true; var failArgs = [type, name].concat(slice.call(arguments)); fail.apply(this, failArgs); return true; } }; var args = slice.call(arguments, 0); if (typeof messageValues === "function") { args = messageValues.apply(this, args); } if (!func.apply(ctx, arguments)) { return fail.apply(ctx, [type, name, "message"].concat(args)); } if (!failed) { referee.emit.apply(referee, ["pass", fullName].concat(args)); } }; } referee.add = function (name, opt) { var refuteArgs; if (opt.refute) { refuteArgs = opt.refute.length; } else { refuteArgs = opt.assert.length; opt.refute = function () { return !opt.assert.apply(this, arguments); }; } var values = opt.values; defineAssertion("assert", name, opt.assert, opt.assert.length, values); defineAssertion("refute", name, opt.refute, refuteArgs, values); assert[name].message = opt.assertMessage; refute[name].message = opt.refuteMessage; if (opt.expectation) { if (referee.expect && referee.expect.wrapAssertion) { referee.expect.wrapAssertion(name, opt.expectation); } else { assert[name].expectationName = opt.expectation; refute[name].expectationName = opt.expectation; } } }; assert = referee.assert = function assert(actual, message) { referee.countAssertion(); if (!assertArgNum("assert", arguments, 1)) { return; } if (!actual) { var v = referee.format(actual); referee.fail(message || "[assert] Expected " + v + " to be truthy"); } else { referee.emit("pass", "assert", message || "", actual); } }; assert.toString = function () { return "referee.assert()"; }; refute = referee.refute = function (actual, message) { referee.countAssertion(); if (!assertArgNum("refute", arguments, 1)) { return; } if (actual) { var v = referee.format(actual); referee.fail(message || "[refute] Expected " + v + " to be falsy"); } else { referee.emit("pass", "refute", message || "", actual); } }; assert.message = "[assert] Expected ${0} to be truthy"; referee.count = 0; referee.fail = function (message) { var exception = new Error(message); exception.name = "AssertionError"; try { throw exception; } catch (e) { referee.emit("failure", e); } if (typeof referee.throwOnFailure !== "boolean" || referee.throwOnFailure) { throw exception; } }; referee.format = function (object) { return String(object); }; function msg(message) { if (!message) { return ""; } return message + (/[.:!?]$/.test(message) ? " " : ": "); } referee.prepareMessage = msg; function actualAndExpectedMessageValues(actual, expected, message) { return [actual, expected, msg(message)]; } function actualMessageValues(actual, message) { return [actual, msg(message)]; } function actualAndTypeOfMessageValues(actual, message) { return [actual, typeof actual, msg(message)]; } referee.add("same", { assert: function (actual, expected) { return samsam.identical(actual, expected); }, refute: function (actual, expected) { return !samsam.identical(actual, expected); }, assertMessage: "${2}${0} expected to be the same object as ${1}", refuteMessage: "${2}${0} expected not to be the same object as ${1}", expectation: "toBe", values: actualAndExpectedMessageValues }); // Extract/replace with separate module that does a more detailed // visualization of multi-line strings function multiLineStringDiff(actual, expected, message) { if (actual === expected) { return true; } var heading = assert.equals.multiLineStringHeading; message = interpolatePosArg(heading, [message]); var actualLines = actual.split("\n"); var expectedLines = expected.split("\n"); var lineCount = Math.max(expectedLines.length, actualLines.length); var i, lines = []; for (i = 0; i < lineCount; ++i) { if (expectedLines[i] !== actualLines[i]) { lines.push("line " + (i + 1) + ": " + (expectedLines[i] || "") + "\nwas: " + (actualLines[i] || "")); } } referee.fail("[assert.equals] " + message + lines.join("\n\n")); return false; } referee.add("equals", { // Uses arguments[2] because the function's .length is used to determine // the minimum required number of arguments. assert: function (actual, expected) { if (typeof actual === "string" && typeof expected === "string" && (actual.indexOf("\n") >= 0 || expected.indexOf("\n") >= 0)) { var message = msg(arguments[2]); return multiLineStringDiff(actual, expected, message); } return samsam.deepEqual(actual, expected); }, refute: function (actual, expected) { return !samsam.deepEqual(actual, expected); }, assertMessage: "${2}${0} expected to be equal to ${1}", refuteMessage: "${2}${0} expected not to be equal to ${1}", expectation: "toEqual", values: actualAndExpectedMessageValues }); assert.equals.multiLineStringHeading = "${0}Expected multi-line strings " + "to be equal:\n"; referee.add("greater", { assert: function (actual, expected) { return actual > expected; }, assertMessage: "${2}Expected ${0} to be greater than ${1}", refuteMessage: "${2}Expected ${0} to be less than or equal to ${1}", expectation: "toBeGreaterThan", values: actualAndExpectedMessageValues }); referee.add("less", { assert: function (actual, expected) { return actual < expected; }, assertMessage: "${2}Expected ${0} to be less than ${1}", refuteMessage: "${2}Expected ${0} to be greater than or equal to ${1}", expectation: "toBeLessThan", values: actualAndExpectedMessageValues }); referee.add("defined", { assert: function (actual) { return typeof actual !== "undefined"; }, assertMessage: "${2}Expected to be defined", refuteMessage: "${2}Expected ${0} (${1}) not to be defined", expectation: "toBeDefined", values: actualAndTypeOfMessageValues }); referee.add("isNull", { assert: function (actual) { return actual === null; }, assertMessage: "${1}Expected ${0} to be null", refuteMessage: "${1}Expected not to be null", expectation: "toBeNull", values: actualMessageValues }); referee.match = function (actual, matcher) { try { return samsam.match(actual, matcher); } catch (e) { throw new Error("Matcher (" + referee.format(matcher) + ") was not a string, a number, a function, " + "a boolean or an object"); } }; referee.add("match", { assert: function (actual, matcher) { var passed; try { passed = referee.match(actual, matcher); } catch (e) { // Uses arguments[2] because the function's .length is used // to determine the minimum required number of arguments. var message = msg(arguments[2]); return this.fail("exceptionMessage", e.message, message); } return passed; }, refute: function (actual, matcher) { var passed; try { passed = referee.match(actual, matcher); } catch (e) { return this.fail("exceptionMessage", e.message); } return !passed; }, assertMessage: "${2}${0} expected to match ${1}", refuteMessage: "${2}${0} expected not to match ${1}", expectation: "toMatch", values: actualAndExpectedMessageValues }); assert.match.exceptionMessage = "${1}${0}"; refute.match.exceptionMessage = "${1}${0}"; referee.add("isObject", { assert: function (actual) { return typeof actual === "object" && !!actual; }, assertMessage: "${2}${0} (${1}) expected to be object and not null", refuteMessage: "${2}${0} expected to be null or not an object", expectation: "toBeObject", values: actualAndTypeOfMessageValues }); referee.add("isFunction", { assert: function (actual) { return typeof actual === "function"; }, assertMessage: "${2}${0} (${1}) expected to be function", refuteMessage: "${2}${0} expected not to be function", expectation: "toBeFunction", values: function (actual) { // Uses arguments[1] because the function's .length is used to // determine the minimum required number of arguments. var message = msg(arguments[1]); return [String(actual).replace("\n", ""), typeof actual, message]; } }); referee.add("isTrue", { assert: function (actual) { return actual === true; }, assertMessage: "${1}Expected ${0} to be true", refuteMessage: "${1}Expected ${0} to not be true", expectation: "toBeTrue", values: actualMessageValues }); referee.add("isFalse", { assert: function (actual) { return actual === false; }, assertMessage: "${1}Expected ${0} to be false", refuteMessage: "${1}Expected ${0} to not be false", expectation: "toBeFalse", values: actualMessageValues }); referee.add("isString", { assert: function (actual) { return typeof actual === "string"; }, assertMessage: "${2}Expected ${0} (${1}) to be string", refuteMessage: "${2}Expected ${0} not to be string", expectation: "toBeString", values: actualAndTypeOfMessageValues }); referee.add("isBoolean", { assert: function (actual) { return typeof actual === "boolean"; }, assertMessage: "${2}Expected ${0} (${1}) to be boolean", refuteMessage: "${2}Expected ${0} not to be boolean", expectation: "toBeBoolean", values: actualAndTypeOfMessageValues }); referee.add("isNumber", { assert: function (actual) { return typeof actual === "number" && !isNaN(actual); }, assertMessage: "${2}Expected ${0} (${1}) to be a non-NaN number", refuteMessage: "${2}Expected ${0} to be NaN or a non-number value", expectation: "toBeNumber", values: actualAndTypeOfMessageValues }); referee.add("isNaN", { assert: function (actual) { return typeof actual === "number" && isNaN(actual); }, assertMessage: "${2}Expected ${0} to be NaN", refuteMessage: "${2}Expected not to be NaN", expectation: "toBeNaN", values: actualAndTypeOfMessageValues }); referee.add("isArray", { assert: function (actual) { return toString.call(actual) === "[object Array]"; }, assertMessage: "${2}Expected ${0} to be array", refuteMessage: "${2}Expected ${0} not to be array", expectation: "toBeArray", values: actualAndTypeOfMessageValues }); function isArrayLike(object) { return _.isArray(object) || (!!object && typeof object.length === "number" && typeof object.splice === "function") || _.isArguments(object); } referee.isArrayLike = isArrayLike; referee.add("isArrayLike", { assert: function (actual) { return isArrayLike(actual); }, assertMessage: "${2}Expected ${0} to be array like", refuteMessage: "${2}Expected ${0} not to be array like", expectation: "toBeArrayLike", values: actualAndTypeOfMessageValues }); function exactKeys(object, keys) { var keyMap = {}; var keyCnt = 0; for (var i=0; i < keys.length; i++) { keyMap[keys[i]] = true; keyCnt += 1; } for (var key in object) { if (object.hasOwnProperty(key)) { if (! keyMap[key]) { return false; } keyCnt -= 1; } } return keyCnt === 0; } referee.add('keys', { assert: function (actual, keys) { return exactKeys(actual, keys); }, assertMessage: "Expected ${0} to have exact keys ${1}!", refuteMessage: "Expected ${0} not to have exact keys ${1}!", expectation: "toHaveKeys" }); function captureException(callback) { try { callback(); } catch (e) { return e; } return null; } referee.captureException = captureException; assert.exception = function (callback, matcher, message) { referee.countAssertion(); if (!assertArgNum("assert.exception", arguments, 1)) { return; } if (!callback) { return; } if (typeof matcher === "string") { message = matcher; matcher = undefined; } var err = captureException(callback); message = msg(message); if (!err) { if (typeof matcher === "object") { return fail.call( {}, "assert", "exception", "typeNoExceptionMessage", message, referee.format(matcher) ); } else { return fail.call({}, "assert", "exception", "message", message); } } if (typeof matcher === "object" && !referee.match(err, matcher)) { return fail.call( {}, "assert", "exception", "typeFailMessage", message, referee.format(matcher), err.name, err.message, err.stack ); } if (typeof matcher === "function" && matcher(err) !== true) { return fail.call({}, "assert", "exception", "matchFailMessage", message, err.name, err.message); } referee.emit("pass", "assert.exception", message, callback, matcher); }; assert.exception.typeNoExceptionMessage = "${0}Expected ${1} but no " + "exception was thrown"; assert.exception.message = "${0}Expected exception"; assert.exception.typeFailMessage = "${0}Expected ${1} but threw ${2} " + "(${3})\n${4}"; assert.exception.matchFailMessage = "${0}Expected thrown ${1} (${2}) to " + "pass matcher function"; assert.exception.expectationName = "toThrow"; refute.exception = function (callback) { referee.countAssertion(); if (!assertArgNum("refute.exception", arguments, 1)) { return; } var err = captureException(callback); if (err) { // Uses arguments[1] because the function's .length is used to // determine the minimum required number of arguments. fail.call({}, "refute", "exception", "message", msg(arguments[1]), err.name, err.message, callback); } else { referee.emit("pass", "refute.exception", callback); } }; refute.exception.message = "${0}Expected not to throw but " + "threw ${1} (${2})"; refute.exception.expectationName = "toThrow"; referee.add("near", { assert: function (actual, expected, delta) { return Math.abs(actual - expected) <= delta; }, assertMessage: "${3}Expected ${0} to be equal to ${1} +/- ${2}", refuteMessage: "${3}Expected ${0} not to be equal to ${1} +/- ${2}", expectation: "toBeNear", values: function (actual, expected, delta, message) { return [actual, expected, delta, msg(message)]; } }); referee.add("hasPrototype", { assert: function (actual, protoObj) { return protoObj.isPrototypeOf(actual); }, assertMessage: "${2}Expected ${0} to have ${1} on its prototype chain", refuteMessage: "${2}Expected ${0} not to have ${1} on its " + "prototype chain", expectation: "toHavePrototype", values: actualAndExpectedMessageValues }); referee.add("contains", { assert: function (haystack, needle) { return _.include(haystack, needle); }, assertMessage: "${2}Expected [${0}] to contain ${1}", refuteMessage: "${2}Expected [${0}] not to contain ${1}", expectation: "toContain", values: actualAndExpectedMessageValues }); referee.add("tagName", { assert: function (element, tagName) { // Uses arguments[2] because the function's .length is used to // determine the minimum required number of arguments. if (!element.tagName) { return this.fail( "noTagNameMessage", tagName, element, msg(arguments[2]) ); } return tagName.toLowerCase && tagName.toLowerCase() === element.tagName.toLowerCase(); }, assertMessage: "${2}Expected tagName to be ${0} but was ${1}", refuteMessage: "${2}Expected tagName not to be ${0}", expectation: "toHaveTagName", values: function (element, tagName, message) { return [tagName, element.tagName, msg(message)]; } }); assert.tagName.noTagNameMessage = "${2}Expected ${1} to have tagName " + "property"; refute.tagName.noTagNameMessage = "${2}Expected ${1} to have tagName " + "property"; referee.add("className", { assert: function (element, name) { if (typeof element.className === "undefined") { // Uses arguments[2] because the function's .length is used to // determine the minimum required number of arguments. return this.fail( "noClassNameMessage", name, element, msg(arguments[2]) ); } var expected = typeof name === "string" ? name.split(" ") : name; var actual = element.className.split(" "); var i, l; for (i = 0, l = expected.length; i < l; i++) { if (!_.include(actual, expected[i])) { return false; } } return true; }, assertMessage: "${2}Expected object's className to include ${0} " + "but was ${1}", refuteMessage: "${2}Expected object's className not to include ${0}", expectation: "toHaveClassName", values: function (element, className, message) { return [className, element.className, msg(message)]; } }); assert.className.noClassNameMessage = "${2}Expected object to have " + "className property"; refute.className.noClassNameMessage = "${2}Expected object to have " + "className property"; if (typeof module !== "undefined" && typeof require === "function") { referee.expect = function () { referee.expect = require("./expect"); return referee.expect.apply(referee, arguments); }; } return referee; }); ((typeof define === "function" && define.amd && function (m) { define("referee/expect", ["lodash", "referee"], m); }) || (typeof module === "object" && function (m) { module.exports = m(require("lodash"), require("./referee")); }) || function (m) { this.referee.expect = m(this._, this.referee); } )(function (_, referee) { var expectation = {}; function F() {} var create = function (object) { F.prototype = object; return new F(); }; var expect = function (actual) { var expectation = _.extend(create(expect.expectation), { actual: actual, assertMode: true }); expectation.not = create(expectation); expectation.not.assertMode = false; return expectation; }; expect.expectation = expectation; expect.wrapAssertion = function (assertion, expectation) { expect.expectation[expectation] = function () { var args = [this.actual].concat(_.toArray(arguments)); var type = this.assertMode ? "assert" : "refute"; var callFunc; if (assertion === "assert") { callFunc = this.assertMode ? referee.assert : referee.refute; } else if (assertion === "refute") { callFunc = this.assertMode ? referee.refute : referee.assert; } else { callFunc = referee[type][assertion]; } try { return callFunc.apply(referee.expect, args); } catch (e) { e.message = (e.message || "").replace( "[" + type + "." + assertion + "]", "[expect." + (this.assertMode ? "" : "not.") + expectation + "]" ); throw e; } }; }; _.each(_.keys(referee.assert), function (name) { var expectationName = referee.assert[name].expectationName; if (expectationName) { expect.wrapAssertion(name, expectationName); } }); expect.wrapAssertion("assert", "toBeTruthy"); expect.wrapAssertion("refute", "toBeFalsy"); if (expect.expectation.toBeNear) { expect.expectation.toBeCloseTo = expect.expectation.toBeNear; } return expect; }); ((typeof define === "function" && define.amd && function (m) { define("formatio", ["samsam", "lodash"], m); }) || (typeof module === "object" && function (m) { module.exports = m(require("samsam"), require("lodash")); }) || function (m) { this.formatio = m(this.samsam, this._); } )(function (samsam, _) { "use strict"; var formatio = { excludeConstructors: ["Object", /^.$/], quoteStrings: true }; var hasOwn = Object.prototype.hasOwnProperty; var specialObjects = []; if (typeof global !== "undefined") { specialObjects.push({ object: global, value: "[object global]" }); } if (typeof document !== "undefined") { specialObjects.push({ object: document, value: "[object HTMLDocument]" }); } if (typeof window !== "undefined") { specialObjects.push({ object: window, value: "[object Window]" }); } function functionName(func) { if (!func) { return ""; } if (func.displayName) { return func.displayName; } if (func.name) { return func.name; } var matches = func.toString().match(/function\s+([^\(]+)/m); return (matches && matches[1]) || ""; } function constructorName(f, object) { var name = functionName(object && object.constructor); var excludes = f.excludeConstructors || formatio.excludeConstructors || []; var i, l; for (i = 0, l = excludes.length; i < l; ++i) { if (typeof excludes[i] === "string" && excludes[i] === name) { return ""; } else if (excludes[i].test && excludes[i].test(name)) { return ""; } } return name; } function isCircular(object, objects) { if (typeof object !== "object") { return false; } var i, l; for (i = 0, l = objects.length; i < l; ++i) { if (objects[i] === object) { return true; } } return false; } function ascii(f, object, processed, indent) { if (typeof object === "string") { var qs = f.quoteStrings; var quote = typeof qs !== "boolean" || qs; return processed || quote ? '"' + object + '"' : object; } if (typeof object === "function" && !(object instanceof RegExp)) { return ascii.func(object); } processed = processed || []; if (isCircular(object, processed)) { return "[Circular]"; } if (_.isArray(object)) { return ascii.array.call(f, object, processed); } if (!object) { return String(object === -0 ? "-0" : object); } if (samsam.isElement(object)) { return ascii.element(object); } if (typeof object.toString === "function" && object.toString !== Object.prototype.toString) { return object.toString(); } var i, l; for (i = 0, l = specialObjects.length; i < l; i++) { if (object === specialObjects[i].object) { return specialObjects[i].value; } } return ascii.object.call(f, object, processed, indent); } ascii.func = function (func) { return "function " + functionName(func) + "() {}"; }; ascii.array = function (array, processed) { processed = processed || []; processed.push(array); var i, l, pieces = []; for (i = 0, l = array.length; i < l; ++i) { pieces.push(ascii(this, array[i], processed)); } return "[" + pieces.join(", ") + "]"; }; ascii.object = function (object, processed, indent) { processed = processed || []; processed.push(object); indent = indent || 0; var pieces = [], properties = _.keys(object).sort(); var length = 3; var prop, str, obj, i, l; for (i = 0, l = properties.length; i < l; ++i) { prop = properties[i]; obj = object[prop]; if (isCircular(obj, processed)) { str = "[Circular]"; } else { str = ascii(this, obj, processed, indent + 2); } str = (/\s/.test(prop) ? '"' + prop + '"' : prop) + ": " + str; length += str.length; pieces.push(str); } var cons = constructorName(this, object); var prefix = cons ? "[" + cons + "] " : ""; var is = ""; for (i = 0, l = indent; i < l; ++i) { is += " "; } if (length + indent > 80) { return prefix + "{\n " + is + pieces.join(",\n " + is) + "\n" + is + "}"; } return prefix + "{ " + pieces.join(", ") + " }"; }; ascii.element = function (element) { var tagName = element.tagName.toLowerCase(); var attrs = element.attributes, attr, pairs = [], attrName, i, l, val; for (i = 0, l = attrs.length; i < l; ++i) { attr = attrs.item(i); attrName = attr.nodeName.toLowerCase().replace("html:", ""); val = attr.nodeValue; if (attrName !== "contenteditable" || val !== "inherit") { if (!!val) { pairs.push(attrName + "=\"" + val + "\""); } } } var formatted = "<" + tagName + (pairs.length > 0 ? " " : ""); var content = element.innerHTML; if (content.length > 20) { content = content.substr(0, 20) + "[...]"; } var res = formatted + pairs.join(" ") + ">" + content + "</" + tagName + ">"; return res.replace(/ contentEditable="inherit"/, ""); }; function Formatio(options) { for (var opt in options) { this[opt] = options[opt]; } } Formatio.prototype = { functionName: functionName, configure: function (options) { return new Formatio(options); }, constructorName: function (object) { return constructorName(this, object); }, ascii: function (object, processed, indent) { return ascii(this, object, processed, indent); } }; return Formatio.prototype; }); ((typeof define === "function" && define.amd && function (m) { define("stack-filter", m); }) || (typeof module === "object" && function (m) { module.exports = m(); }) || function (m) { this.stackFilter = m(); } )(function () { "use strict"; var regexpes = {}; return { filters: [], configure: function (opt) { opt = opt || {}; var instance = Object.create(this); instance.filters = opt.filters || []; instance.cwd = opt.cwd; return instance; }, /** * Return true if the stack trace line matches any filter */ match: function (line) { var i, l, filters = this.filters; for (i = 0, l = filters.length; i < l; ++i) { if (!regexpes[filters[i]]) { // Backslashes must be double-escaped: // new RegExp("\\") is equivalent to /\/ - which is an invalid pattern // new RegExp("\\\\") is equivalent to /\\/ - an escaped backslash // This must be done for Windows paths to work properly regexpes[filters[i]] = new RegExp(filters[i].replace(/\\/g, "\\\\")); } if (regexpes[filters[i]].test(line)) { return true; } } return false; }, /** * Filter a stack trace and optionally trim off the current * working directory. Accepts a stack trace as a string, and * an optional cwd (also a string). The cwd can also be * configured directly on the instance. * * Returns an array of lines - a pruned stack trace. The * result only includes lines that point to a file and a * location - the initial error message is stripped off. If a * cwd is available, all paths will be stripped of it. Any * line matching any filter will not be included in the * result. */ filter: function (stack, cwd) { var lines = (stack || "").split("\n"); var i, l, line, stackLines = [], replacer = "./"; cwd = cwd || this.cwd; if (typeof cwd === "string") { cwd = cwd.replace(/\/?$/, "/"); } if (cwd instanceof RegExp && !/\/\/$/.test(cwd)) { replacer = "."; } for (i = 0, l = lines.length; i < l; ++i) { if (/(\d+)?:\d+\)?$/.test(lines[i])) { if (!this.match(lines[i])) { line = lines[i].replace(/^\s+|\s+$/g, ""); if (cwd) { line = line.replace(cwd, replacer); } stackLines.push(line); } } } return stackLines; } }; }); /*jslint eqeqeq: false, onevar: false, forin: true, nomen: false, regexp: false, plusplus: false*/ /*global module, require, __dirname, document*/ /** * Sinon core utilities. For internal use only. * * @author Christian Johansen ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen */ "use strict"; var sinon = (function (buster) { var div = typeof document != "undefined" && document.createElement("div"); var hasOwn = Object.prototype.hasOwnProperty; function isDOMNode(obj) { var success = false; try { obj.appendChild(div); success = div.parentNode == obj; } catch (e) { return false; } finally { try { obj.removeChild(div); } catch (e) { // Remove failed, not much we can do about that } } return success; } function isElement(obj) { return div && obj && obj.nodeType === 1 && isDOMNode(obj); } function isFunction(obj) { return typeof obj === "function" || !!(obj && obj.constructor && obj.call && obj.apply); } function mirrorProperties(target, source) { for (var prop in source) { if (!hasOwn.call(target, prop)) { target[prop] = source[prop]; } } } function isRestorable (obj) { return typeof obj === "function" && typeof obj.restore === "function" && obj.restore.sinon; } var sinon = { wrapMethod: function wrapMethod(object, property, method) { if (!object) { throw new TypeError("Should wrap property of object"); } if (typeof method != "function") { throw new TypeError("Method wrapper should be function"); } var wrappedMethod = object[property]; if (!isFunction(wrappedMethod)) { throw new TypeError("Attempted to wrap " + (typeof wrappedMethod) + " property " + property + " as function"); } if (wrappedMethod.restore && wrappedMethod.restore.sinon) { throw new TypeError("Attempted to wrap " + property + " which is already wrapped"); } if (wrappedMethod.calledBefore) { var verb = !!wrappedMethod.returns ? "stubbed" : "spied on"; throw new TypeError("Attempted to wrap " + property + " which is already " + verb); } // IE 8 does not support hasOwnProperty on the window object. var owned = hasOwn.call(object, property); object[property] = method; method.displayName = property; method.restore = function () { // For prototype properties try to reset by delete first. // If this fails (ex: localStorage on mobile safari) then force a reset // via direct assignment. if (!owned) { delete object[property]; } if (object[property] === method) { object[property] = wrappedMethod; } }; method.restore.sinon = true; mirrorProperties(method, wrappedMethod); return method; }, extend: function extend(target) { for (var i = 1, l = arguments.length; i < l; i += 1) { for (var prop in arguments[i]) { if (arguments[i].hasOwnProperty(prop)) { target[prop] = arguments[i][prop]; } // DONT ENUM bug, only care about toString if (arguments[i].hasOwnProperty("toString") && arguments[i].toString != target.toString) { target.toString = arguments[i].toString; } } } return target; }, create: function create(proto) { var F = function () {}; F.prototype = proto; return new F(); }, deepEqual: function deepEqual(a, b) { if (sinon.match && sinon.match.isMatcher(a)) { return a.test(b); } if (typeof a != "object" || typeof b != "object") { return a === b; } if (isElement(a) || isElement(b)) { return a === b; } if (a === b) { return true; } if ((a === null && b !== null) || (a !== null && b === null)) { return false; } var aString = Object.prototype.toString.call(a); if (aString != Object.prototype.toString.call(b)) { return false; } if (aString == "[object Array]") { if (a.length !== b.length) { return false; } for (var i = 0, l = a.length; i < l; i += 1) { if (!deepEqual(a[i], b[i])) { return false; } } return true; } if (aString == "[object Date]") { return a.valueOf() === b.valueOf(); } var prop, aLength = 0, bLength = 0; for (prop in a) { aLength += 1; if (!deepEqual(a[prop], b[prop])) { return false; } } for (prop in b) { bLength += 1; } return aLength == bLength; }, functionName: function functionName(func) { var name = func.displayName || func.name; // Use function decomposition as a last resort to get function // name. Does not rely on function decomposition to work - if it // doesn't debugging will be slightly less informative // (i.e. toString will say 'spy' rather than 'myFunc'). if (!name) { var matches = func.toString().match(/function ([^\s\(]+)/); name = matches && matches[1]; } return name; }, functionToString: function toString() { if (this.getCall && this.callCount) { var thisValue, prop, i = this.callCount; while (i--) { thisValue = this.getCall(i).thisValue; for (prop in thisValue) { if (thisValue[prop] === this) { return prop; } } } } return this.displayName || "sinon fake"; }, getConfig: function (custom) { var config = {}; custom = custom || {}; var defaults = sinon.defaultConfig; for (var prop in defaults) { if (defaults.hasOwnProperty(prop)) { config[prop] = custom.hasOwnProperty(prop) ? custom[prop] : defaults[prop]; } } return config; }, format: function (val) { return "" + val; }, defaultConfig: { injectIntoThis: true, injectInto: null, properties: ["spy", "stub", "mock", "clock", "server", "requests"], useFakeTimers: true, useFakeServer: true }, timesInWords: function timesInWords(count) { return count == 1 && "once" || count == 2 && "twice" || count == 3 && "thrice" || (count || 0) + " times"; }, calledInOrder: function (spies) { for (var i = 1, l = spies.length; i < l; i++) { if (!spies[i - 1].calledBefore(spies[i]) || !spies[i].called) { return false; } } return true; }, orderByFirstCall: function (spies) { return spies.sort(function (a, b) { // uuid, won't ever be equal var aCall = a.getCall(0); var bCall = b.getCall(0); var aId = aCall && aCall.callId || -1; var bId = bCall && bCall.callId || -1; return aId < bId ? -1 : 1; }); }, log: function () {}, logError: function (label, err) { var msg = label + " threw exception: " sinon.log(msg + "[" + err.name + "] " + err.message); if (err.stack) { sinon.log(err.stack); } setTimeout(function () { err.message = msg + err.message; throw err; }, 0); }, typeOf: function (value) { if (value === null) { return "null"; } else if (value === undefined) { return "undefined"; } var string = Object.prototype.toString.call(value); return string.substring(8, string.length - 1).toLowerCase(); }, createStubInstance: function (constructor) { if (typeof constructor !== "function") { throw new TypeError("The constructor should be a function."); } return sinon.stub(sinon.create(constructor.prototype)); }, restore: function (object) { if (object !== null && typeof object === "object") { for (var prop in object) { if (isRestorable(object[prop])) { object[prop].restore(); } } } else if (isRestorable(object)) { object.restore(); } } }; var isNode = typeof module == "object" && typeof require == "function"; if (isNode) { try { buster = { format: require("buster-format") }; } catch (e) {} module.exports = sinon; module.exports.spy = require("./sinon/spy"); module.exports.stub = require("./sinon/stub"); module.exports.mock = require("./sinon/mock"); module.exports.collection = require("./sinon/collection"); module.exports.assert = require("./sinon/assert"); module.exports.sandbox = require("./sinon/sandbox"); module.exports.test = require("./sinon/test"); module.exports.testCase = require("./sinon/test_case"); module.exports.assert = require("./sinon/assert"); module.exports.match = require("./sinon/match"); } if (buster) { var formatter = sinon.create(buster.format); formatter.quoteStrings = false; sinon.format = function () { return formatter.ascii.apply(formatter, arguments); }; } else if (isNode) { try { var util = require("util"); sinon.format = function (value) { return typeof value == "object" && value.toString === Object.prototype.toString ? util.inspect(value) : value; }; } catch (e) { /* Node, but no util module - would be very old, but better safe than sorry */ } } return sinon; }(typeof buster == "object" && buster)); /** * @depend ../sinon.js * @depend match.js */ /*jslint eqeqeq: false, onevar: false, plusplus: false*/ /*global module, require, sinon*/ /** * Spy calls * * @author Christian Johansen ([email protected]) * @author Maximilian Antoni ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen * Copyright (c) 2013 Maximilian Antoni */ "use strict"; var commonJSModule = typeof module == "object" && typeof require == "function"; if (!this.sinon && commonJSModule) { var sinon = require("../sinon"); } (function (sinon) { function throwYieldError(proxy, text, args) { var msg = sinon.functionName(proxy) + text; if (args.length) { msg += " Received [" + slice.call(args).join(", ") + "]"; } throw new Error(msg); } var slice = Array.prototype.slice; var callProto = { calledOn: function calledOn(thisValue) { if (sinon.match && sinon.match.isMatcher(thisValue)) { return thisValue.test(this.thisValue); } return this.thisValue === thisValue; }, calledWith: function calledWith() { for (var i = 0, l = arguments.length; i < l; i += 1) { if (!sinon.deepEqual(arguments[i], this.args[i])) { return false; } } return true; }, calledWithMatch: function calledWithMatch() { for (var i = 0, l = arguments.length; i < l; i += 1) { var actual = this.args[i]; var expectation = arguments[i]; if (!sinon.match || !sinon.match(expectation).test(actual)) { return false; } } return true; }, calledWithExactly: function calledWithExactly() { return arguments.length == this.args.length && this.calledWith.apply(this, arguments); }, notCalledWith: function notCalledWith() { return !this.calledWith.apply(this, arguments); }, notCalledWithMatch: function notCalledWithMatch() { return !this.calledWithMatch.apply(this, arguments); }, returned: function returned(value) { return sinon.deepEqual(value, this.returnValue); }, threw: function threw(error) { if (typeof error === "undefined" || !this.exception) { return !!this.exception; } return this.exception === error || this.exception.name === error; }, calledWithNew: function calledWithNew(thisValue) { return this.thisValue instanceof this.proxy; }, calledBefore: function (other) { return this.callId < other.callId; }, calledAfter: function (other) { return this.callId > other.callId; }, callArg: function (pos) { this.args[pos](); }, callArgOn: function (pos, thisValue) { this.args[pos].apply(thisValue); }, callArgWith: function (pos) { this.callArgOnWith.apply(this, [pos, null].concat(slice.call(arguments, 1))); }, callArgOnWith: function (pos, thisValue) { var args = slice.call(arguments, 2); this.args[pos].apply(thisValue, args); }, "yield": function () { this.yieldOn.apply(this, [null].concat(slice.call(arguments, 0))); }, yieldOn: function (thisValue) { var args = this.args; for (var i = 0, l = args.length; i < l; ++i) { if (typeof args[i] === "function") { args[i].apply(thisValue, slice.call(arguments, 1)); return; } } throwYieldError(this.proxy, " cannot yield since no callback was passed.", args); }, yieldTo: function (prop) { this.yieldToOn.apply(this, [prop, null].concat(slice.call(arguments, 1))); }, yieldToOn: function (prop, thisValue) { var args = this.args; for (var i = 0, l = args.length; i < l; ++i) { if (args[i] && typeof args[i][prop] === "function") { args[i][prop].apply(thisValue, slice.call(arguments, 2)); return; } } throwYieldError(this.proxy, " cannot yield to '" + prop + "' since no callback was passed.", args); }, toString: function () { var callStr = this.proxy.toString() + "("; var args = []; for (var i = 0, l = this.args.length; i < l; ++i) { args.push(sinon.format(this.args[i])); } callStr = callStr + args.join(", ") + ")"; if (typeof this.returnValue != "undefined") { callStr += " => " + sinon.format(this.returnValue); } if (this.exception) { callStr += " !" + this.exception.name; if (this.exception.message) { callStr += "(" + this.exception.message + ")"; } } return callStr; } }; callProto.invokeCallback = callProto.yield; function createSpyCall(spy, thisValue, args, returnValue, exception, id) { if (typeof id !== "number") { throw new TypeError("Call id is not a number"); } var proxyCall = sinon.create(callProto); proxyCall.proxy = spy; proxyCall.thisValue = thisValue; proxyCall.args = args; proxyCall.returnValue = returnValue; proxyCall.exception = exception; proxyCall.callId = id; return proxyCall; }; createSpyCall.toString = callProto.toString; // used by mocks sinon.spyCall = createSpyCall; }(typeof sinon == "object" && sinon || null)); /** * @depend ../sinon.js */ /*jslint eqeqeq: false, onevar: false, plusplus: false*/ /*global module, require, sinon*/ /** * Spy functions * * @author Christian Johansen ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen */ "use strict"; (function (sinon) { var commonJSModule = typeof module == "object" && typeof require == "function"; var push = Array.prototype.push; var slice = Array.prototype.slice; var callId = 0; function spy(object, property) { if (!property && typeof object == "function") { return spy.create(object); } if (!object && !property) { return spy.create(function () { }); } var method = object[property]; return sinon.wrapMethod(object, property, spy.create(method)); } function matchingFake(fakes, args, strict) { if (!fakes) { return; } var alen = args.length; for (var i = 0, l = fakes.length; i < l; i++) { if (fakes[i].matches(args, strict)) { return fakes[i]; } } } function incrementCallCount() { this.called = true; this.callCount += 1; this.notCalled = false; this.calledOnce = this.callCount == 1; this.calledTwice = this.callCount == 2; this.calledThrice = this.callCount == 3; } function createCallProperties() { this.firstCall = this.getCall(0); this.secondCall = this.getCall(1); this.thirdCall = this.getCall(2); this.lastCall = this.getCall(this.callCount - 1); } var vars = "a,b,c,d,e,f,g,h,i,j,k,l"; function createProxy(func) { // Retain the function length: var p; if (func.length) { eval("p = (function proxy(" + vars.substring(0, func.length * 2 - 1) + ") { return p.invoke(func, this, slice.call(arguments)); });"); } else { p = function proxy() { return p.invoke(func, this, slice.call(arguments)); }; } return p; } var uuid = 0; // Public API var spyApi = { reset: function () { this.called = false; this.notCalled = true; this.calledOnce = false; this.calledTwice = false; this.calledThrice = false; this.callCount = 0; this.firstCall = null; this.secondCall = null; this.thirdCall = null; this.lastCall = null; this.args = []; this.returnValues = []; this.thisValues = []; this.exceptions = []; this.callIds = []; if (this.fakes) { for (var i = 0; i < this.fakes.length; i++) { this.fakes[i].reset(); } } }, create: function create(func) { var name; if (typeof func != "function") { func = function () { }; } else { name = sinon.functionName(func); } var proxy = createProxy(func); sinon.extend(proxy, spy); delete proxy.create; sinon.extend(proxy, func); proxy.reset(); proxy.prototype = func.prototype; proxy.displayName = name || "spy"; proxy.toString = sinon.functionToString; proxy._create = sinon.spy.create; proxy.id = "spy#" + uuid++; return proxy; }, invoke: function invoke(func, thisValue, args) { var matching = matchingFake(this.fakes, args); var exception, returnValue; incrementCallCount.call(this); push.call(this.thisValues, thisValue); push.call(this.args, args); push.call(this.callIds, callId++); try { if (matching) { returnValue = matching.invoke(func, thisValue, args); } else { returnValue = (this.func || func).apply(thisValue, args); } } catch (e) { push.call(this.returnValues, undefined); exception = e; throw e; } finally { push.call(this.exceptions, exception); } push.call(this.returnValues, returnValue); createCallProperties.call(this); return returnValue; }, getCall: function getCall(i) { if (i < 0 || i >= this.callCount) { return null; } return sinon.spyCall(this, this.thisValues[i], this.args[i], this.returnValues[i], this.exceptions[i], this.callIds[i]); }, calledBefore: function calledBefore(spyFn) { if (!this.called) { return false; } if (!spyFn.called) { return true; } return this.callIds[0] < spyFn.callIds[spyFn.callIds.length - 1]; }, calledAfter: function calledAfter(spyFn) { if (!this.called || !spyFn.called) { return false; } return this.callIds[this.callCount - 1] > spyFn.callIds[spyFn.callCount - 1]; }, withArgs: function () { var args = slice.call(arguments); if (this.fakes) { var match = matchingFake(this.fakes, args, true); if (match) { return match; } } else { this.fakes = []; } var original = this; var fake = this._create(); fake.matchingAguments = args; push.call(this.fakes, fake); fake.withArgs = function () { return original.withArgs.apply(original, arguments); }; for (var i = 0; i < this.args.length; i++) { if (fake.matches(this.args[i])) { incrementCallCount.call(fake); push.call(fake.thisValues, this.thisValues[i]); push.call(fake.args, this.args[i]); push.call(fake.returnValues, this.returnValues[i]); push.call(fake.exceptions, this.exceptions[i]); push.call(fake.callIds, this.callIds[i]); } } createCallProperties.call(fake); return fake; }, matches: function (args, strict) { var margs = this.matchingAguments; if (margs.length <= args.length && sinon.deepEqual(margs, args.slice(0, margs.length))) { return !strict || margs.length == args.length; } }, printf: function (format) { var spy = this; var args = slice.call(arguments, 1); var formatter; return (format || "").replace(/%(.)/g, function (match, specifyer) { formatter = spyApi.formatters[specifyer]; if (typeof formatter == "function") { return formatter.call(null, spy, args); } else if (!isNaN(parseInt(specifyer), 10)) { return sinon.format(args[specifyer - 1]); } return "%" + specifyer; }); } }; function delegateToCalls(method, matchAny, actual, notCalled) { spyApi[method] = function () { if (!this.called) { if (notCalled) { return notCalled.apply(this, arguments); } return false; } var currentCall; var matches = 0; for (var i = 0, l = this.callCount; i < l; i += 1) { currentCall = this.getCall(i); if (currentCall[actual || method].apply(currentCall, arguments)) { matches += 1; if (matchAny) { return true; } } } return matches === this.callCount; }; } delegateToCalls("calledOn", true); delegateToCalls("alwaysCalledOn", false, "calledOn"); delegateToCalls("calledWith", true); delegateToCalls("calledWithMatch", true); delegateToCalls("alwaysCalledWith", false, "calledWith"); delegateToCalls("alwaysCalledWithMatch", false, "calledWithMatch"); delegateToCalls("calledWithExactly", true); delegateToCalls("alwaysCalledWithExactly", false, "calledWithExactly"); delegateToCalls("neverCalledWith", false, "notCalledWith", function () { return true; }); delegateToCalls("neverCalledWithMatch", false, "notCalledWithMatch", function () { return true; }); delegateToCalls("threw", true); delegateToCalls("alwaysThrew", false, "threw"); delegateToCalls("returned", true); delegateToCalls("alwaysReturned", false, "returned"); delegateToCalls("calledWithNew", true); delegateToCalls("alwaysCalledWithNew", false, "calledWithNew"); delegateToCalls("callArg", false, "callArgWith", function () { throw new Error(this.toString() + " cannot call arg since it was not yet invoked."); }); spyApi.callArgWith = spyApi.callArg; delegateToCalls("callArgOn", false, "callArgOnWith", function () { throw new Error(this.toString() + " cannot call arg since it was not yet invoked."); }); spyApi.callArgOnWith = spyApi.callArgOn; delegateToCalls("yield", false, "yield", function () { throw new Error(this.toString() + " cannot yield since it was not yet invoked."); }); // "invokeCallback" is an alias for "yield" since "yield" is invalid in strict mode. spyApi.invokeCallback = spyApi.yield; delegateToCalls("yieldOn", false, "yieldOn", function () { throw new Error(this.toString() + " cannot yield since it was not yet invoked."); }); delegateToCalls("yieldTo", false, "yieldTo", function (property) { throw new Error(this.toString() + " cannot yield to '" + property + "' since it was not yet invoked."); }); delegateToCalls("yieldToOn", false, "yieldToOn", function (property) { throw new Error(this.toString() + " cannot yield to '" + property + "' since it was not yet invoked."); }); spyApi.formatters = { "c": function (spy) { return sinon.timesInWords(spy.callCount); }, "n": function (spy) { return spy.toString(); }, "C": function (spy) { var calls = []; for (var i = 0, l = spy.callCount; i < l; ++i) { var stringifiedCall = " " + spy.getCall(i).toString(); if (/\n/.test(calls[i - 1])) { stringifiedCall = "\n" + stringifiedCall; } push.call(calls, stringifiedCall); } return calls.length > 0 ? "\n" + calls.join("\n") : ""; }, "t": function (spy) { var objects = []; for (var i = 0, l = spy.callCount; i < l; ++i) { push.call(objects, sinon.format(spy.thisValues[i])); } return objects.join(", "); }, "*": function (spy, args) { var formatted = []; for (var i = 0, l = args.length; i < l; ++i) { push.call(formatted, sinon.format(args[i])); } return formatted.join(", "); } }; sinon.extend(spy, spyApi); spy.spyCall = sinon.spyCall; if (commonJSModule) { module.exports = spy; } else { sinon.spy = spy; } }(typeof sinon == "object" && sinon || null)); /** * @depend ../sinon.js * @depend match.js */ /*jslint eqeqeq: false, onevar: false, plusplus: false*/ /*global module, require, sinon*/ /** * Spy calls * * @author Christian Johansen ([email protected]) * @author Maximilian Antoni ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen * Copyright (c) 2013 Maximilian Antoni */ "use strict"; (function (sinon) { var commonJSModule = typeof module == "object" && typeof require == "function"; if (!sinon && commonJSModule) { sinon = require("../sinon"); } if (!sinon) { return; } function throwYieldError(proxy, text, args) { var msg = sinon.functionName(proxy) + text; if (args.length) { msg += " Received [" + slice.call(args).join(", ") + "]"; } throw new Error(msg); } var slice = Array.prototype.slice; var callProto = { calledOn: function calledOn(thisValue) { if (sinon.match && sinon.match.isMatcher(thisValue)) { return thisValue.test(this.thisValue); } return this.thisValue === thisValue; }, calledWith: function calledWith() { for (var i = 0, l = arguments.length; i < l; i += 1) { if (!sinon.deepEqual(arguments[i], this.args[i])) { return false; } } return true; }, calledWithMatch: function calledWithMatch() { for (var i = 0, l = arguments.length; i < l; i += 1) { var actual = this.args[i]; var expectation = arguments[i]; if (!sinon.match || !sinon.match(expectation).test(actual)) { return false; } } return true; }, calledWithExactly: function calledWithExactly() { return arguments.length == this.args.length && this.calledWith.apply(this, arguments); }, notCalledWith: function notCalledWith() { return !this.calledWith.apply(this, arguments); }, notCalledWithMatch: function notCalledWithMatch() { return !this.calledWithMatch.apply(this, arguments); }, returned: function returned(value) { return sinon.deepEqual(value, this.returnValue); }, threw: function threw(error) { if (typeof error === "undefined" || !this.exception) { return !!this.exception; } return this.exception === error || this.exception.name === error; }, calledWithNew: function calledWithNew(thisValue) { return this.thisValue instanceof this.proxy; }, calledBefore: function (other) { return this.callId < other.callId; }, calledAfter: function (other) { return this.callId > other.callId; }, callArg: function (pos) { this.args[pos](); }, callArgOn: function (pos, thisValue) { this.args[pos].apply(thisValue); }, callArgWith: function (pos) { this.callArgOnWith.apply(this, [pos, null].concat(slice.call(arguments, 1))); }, callArgOnWith: function (pos, thisValue) { var args = slice.call(arguments, 2); this.args[pos].apply(thisValue, args); }, "yield": function () { this.yieldOn.apply(this, [null].concat(slice.call(arguments, 0))); }, yieldOn: function (thisValue) { var args = this.args; for (var i = 0, l = args.length; i < l; ++i) { if (typeof args[i] === "function") { args[i].apply(thisValue, slice.call(arguments, 1)); return; } } throwYieldError(this.proxy, " cannot yield since no callback was passed.", args); }, yieldTo: function (prop) { this.yieldToOn.apply(this, [prop, null].concat(slice.call(arguments, 1))); }, yieldToOn: function (prop, thisValue) { var args = this.args; for (var i = 0, l = args.length; i < l; ++i) { if (args[i] && typeof args[i][prop] === "function") { args[i][prop].apply(thisValue, slice.call(arguments, 2)); return; } } throwYieldError(this.proxy, " cannot yield to '" + prop + "' since no callback was passed.", args); }, toString: function () { var callStr = this.proxy.toString() + "("; var args = []; for (var i = 0, l = this.args.length; i < l; ++i) { args.push(sinon.format(this.args[i])); } callStr = callStr + args.join(", ") + ")"; if (typeof this.returnValue != "undefined") { callStr += " => " + sinon.format(this.returnValue); } if (this.exception) { callStr += " !" + this.exception.name; if (this.exception.message) { callStr += "(" + this.exception.message + ")"; } } return callStr; } }; callProto.invokeCallback = callProto.yield; function createSpyCall(spy, thisValue, args, returnValue, exception, id) { if (typeof id !== "number") { throw new TypeError("Call id is not a number"); } var proxyCall = sinon.create(callProto); proxyCall.proxy = spy; proxyCall.thisValue = thisValue; proxyCall.args = args; proxyCall.returnValue = returnValue; proxyCall.exception = exception;<|fim▁hole|> return proxyCall; }; createSpyCall.toString = callProto.toString; // used by mocks if (commonJSModule) { module.exports = createSpyCall; } else { sinon.spyCall = createSpyCall; } }(typeof sinon == "object" && sinon || null)); /** * @depend ../sinon.js * @depend spy.js */ /*jslint eqeqeq: false, onevar: false*/ /*global module, require, sinon*/ /** * Stub functions * * @author Christian Johansen ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen */ "use strict"; (function (sinon) { var commonJSModule = typeof module == "object" && typeof require == "function"; if (!sinon && commonJSModule) { sinon = require("../sinon"); } if (!sinon) { return; } function stub(object, property, func) { if (!!func && typeof func != "function") { throw new TypeError("Custom stub should be function"); } var wrapper; if (func) { wrapper = sinon.spy && sinon.spy.create ? sinon.spy.create(func) : func; } else { wrapper = stub.create(); } if (!object && !property) { return sinon.stub.create(); } if (!property && !!object && typeof object == "object") { for (var prop in object) { if (typeof object[prop] === "function") { stub(object, prop); } } return object; } return sinon.wrapMethod(object, property, wrapper); } function getChangingValue(stub, property) { var index = stub.callCount - 1; var values = stub[property]; var prop = index in values ? values[index] : values[values.length - 1]; stub[property + "Last"] = prop; return prop; } function getCallback(stub, args) { var callArgAt = getChangingValue(stub, "callArgAts"); if (callArgAt < 0) { var callArgProp = getChangingValue(stub, "callArgProps"); for (var i = 0, l = args.length; i < l; ++i) { if (!callArgProp && typeof args[i] == "function") { return args[i]; } if (callArgProp && args[i] && typeof args[i][callArgProp] == "function") { return args[i][callArgProp]; } } return null; } return args[callArgAt]; } var join = Array.prototype.join; function getCallbackError(stub, func, args) { if (stub.callArgAtsLast < 0) { var msg; if (stub.callArgPropsLast) { msg = sinon.functionName(stub) + " expected to yield to '" + stub.callArgPropsLast + "', but no object with such a property was passed." } else { msg = sinon.functionName(stub) + " expected to yield, but no callback was passed." } if (args.length > 0) { msg += " Received [" + join.call(args, ", ") + "]"; } return msg; } return "argument at index " + stub.callArgAtsLast + " is not a function: " + func; } var nextTick = (function () { if (typeof process === "object" && typeof process.nextTick === "function") { return process.nextTick; } else if (typeof setImmediate === "function") { return setImmediate; } else { return function (callback) { setTimeout(callback, 0); }; } })(); function callCallback(stub, args) { if (stub.callArgAts.length > 0) { var func = getCallback(stub, args); if (typeof func != "function") { throw new TypeError(getCallbackError(stub, func, args)); } var callbackArguments = getChangingValue(stub, "callbackArguments"); var callbackContext = getChangingValue(stub, "callbackContexts"); if (stub.callbackAsync) { nextTick(function() { func.apply(callbackContext, callbackArguments); }); } else { func.apply(callbackContext, callbackArguments); } } } var uuid = 0; sinon.extend(stub, (function () { var slice = Array.prototype.slice, proto; function throwsException(error, message) { if (typeof error == "string") { this.exception = new Error(message || ""); this.exception.name = error; } else if (!error) { this.exception = new Error("Error"); } else { this.exception = error; } return this; } proto = { create: function create() { var functionStub = function () { callCallback(functionStub, arguments); if (functionStub.exception) { throw functionStub.exception; } else if (typeof functionStub.returnArgAt == 'number') { return arguments[functionStub.returnArgAt]; } else if (functionStub.returnThis) { return this; } return functionStub.returnValue; }; functionStub.id = "stub#" + uuid++; var orig = functionStub; functionStub = sinon.spy.create(functionStub); functionStub.func = orig; functionStub.callArgAts = []; functionStub.callbackArguments = []; functionStub.callbackContexts = []; functionStub.callArgProps = []; sinon.extend(functionStub, stub); functionStub._create = sinon.stub.create; functionStub.displayName = "stub"; functionStub.toString = sinon.functionToString; return functionStub; }, resetBehavior: function () { var i; this.callArgAts = []; this.callbackArguments = []; this.callbackContexts = []; this.callArgProps = []; delete this.returnValue; delete this.returnArgAt; this.returnThis = false; if (this.fakes) { for (i = 0; i < this.fakes.length; i++) { this.fakes[i].resetBehavior(); } } }, returns: function returns(value) { this.returnValue = value; return this; }, returnsArg: function returnsArg(pos) { if (typeof pos != "number") { throw new TypeError("argument index is not number"); } this.returnArgAt = pos; return this; }, returnsThis: function returnsThis() { this.returnThis = true; return this; }, "throws": throwsException, throwsException: throwsException, callsArg: function callsArg(pos) { if (typeof pos != "number") { throw new TypeError("argument index is not number"); } this.callArgAts.push(pos); this.callbackArguments.push([]); this.callbackContexts.push(undefined); this.callArgProps.push(undefined); return this; }, callsArgOn: function callsArgOn(pos, context) { if (typeof pos != "number") { throw new TypeError("argument index is not number"); } if (typeof context != "object") { throw new TypeError("argument context is not an object"); } this.callArgAts.push(pos); this.callbackArguments.push([]); this.callbackContexts.push(context); this.callArgProps.push(undefined); return this; }, callsArgWith: function callsArgWith(pos) { if (typeof pos != "number") { throw new TypeError("argument index is not number"); } this.callArgAts.push(pos); this.callbackArguments.push(slice.call(arguments, 1)); this.callbackContexts.push(undefined); this.callArgProps.push(undefined); return this; }, callsArgOnWith: function callsArgWith(pos, context) { if (typeof pos != "number") { throw new TypeError("argument index is not number"); } if (typeof context != "object") { throw new TypeError("argument context is not an object"); } this.callArgAts.push(pos); this.callbackArguments.push(slice.call(arguments, 2)); this.callbackContexts.push(context); this.callArgProps.push(undefined); return this; }, yields: function () { this.callArgAts.push(-1); this.callbackArguments.push(slice.call(arguments, 0)); this.callbackContexts.push(undefined); this.callArgProps.push(undefined); return this; }, yieldsOn: function (context) { if (typeof context != "object") { throw new TypeError("argument context is not an object"); } this.callArgAts.push(-1); this.callbackArguments.push(slice.call(arguments, 1)); this.callbackContexts.push(context); this.callArgProps.push(undefined); return this; }, yieldsTo: function (prop) { this.callArgAts.push(-1); this.callbackArguments.push(slice.call(arguments, 1)); this.callbackContexts.push(undefined); this.callArgProps.push(prop); return this; }, yieldsToOn: function (prop, context) { if (typeof context != "object") { throw new TypeError("argument context is not an object"); } this.callArgAts.push(-1); this.callbackArguments.push(slice.call(arguments, 2)); this.callbackContexts.push(context); this.callArgProps.push(prop); return this; } }; // create asynchronous versions of callsArg* and yields* methods for (var method in proto) { // need to avoid creating anotherasync versions of the newly added async methods if (proto.hasOwnProperty(method) && method.match(/^(callsArg|yields|thenYields$)/) && !method.match(/Async/)) { proto[method + 'Async'] = (function (syncFnName) { return function () { this.callbackAsync = true; return this[syncFnName].apply(this, arguments); }; })(method); } } return proto; }())); if (commonJSModule) { module.exports = stub; } else { sinon.stub = stub; } }(typeof sinon == "object" && sinon || null)); /** * @depend ../sinon.js * @depend stub.js */ /*jslint eqeqeq: false, onevar: false, nomen: false*/ /*global module, require, sinon*/ /** * Mock functions. * * @author Christian Johansen ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen */ "use strict"; (function (sinon) { var commonJSModule = typeof module == "object" && typeof require == "function"; var push = [].push; if (!sinon && commonJSModule) { sinon = require("../sinon"); } if (!sinon) { return; } function mock(object) { if (!object) { return sinon.expectation.create("Anonymous mock"); } return mock.create(object); } sinon.mock = mock; sinon.extend(mock, (function () { function each(collection, callback) { if (!collection) { return; } for (var i = 0, l = collection.length; i < l; i += 1) { callback(collection[i]); } } return { create: function create(object) { if (!object) { throw new TypeError("object is null"); } var mockObject = sinon.extend({}, mock); mockObject.object = object; delete mockObject.create; return mockObject; }, expects: function expects(method) { if (!method) { throw new TypeError("method is falsy"); } if (!this.expectations) { this.expectations = {}; this.proxies = []; } if (!this.expectations[method]) { this.expectations[method] = []; var mockObject = this; sinon.wrapMethod(this.object, method, function () { return mockObject.invokeMethod(method, this, arguments); }); push.call(this.proxies, method); } var expectation = sinon.expectation.create(method); push.call(this.expectations[method], expectation); return expectation; }, restore: function restore() { var object = this.object; each(this.proxies, function (proxy) { if (typeof object[proxy].restore == "function") { object[proxy].restore(); } }); }, verify: function verify() { var expectations = this.expectations || {}; var messages = [], met = []; each(this.proxies, function (proxy) { each(expectations[proxy], function (expectation) { if (!expectation.met()) { push.call(messages, expectation.toString()); } else { push.call(met, expectation.toString()); } }); }); this.restore(); if (messages.length > 0) { sinon.expectation.fail(messages.concat(met).join("\n")); } else { sinon.expectation.pass(messages.concat(met).join("\n")); } return true; }, invokeMethod: function invokeMethod(method, thisValue, args) { var expectations = this.expectations && this.expectations[method]; var length = expectations && expectations.length || 0, i; for (i = 0; i < length; i += 1) { if (!expectations[i].met() && expectations[i].allowsCall(thisValue, args)) { return expectations[i].apply(thisValue, args); } } var messages = [], available, exhausted = 0; for (i = 0; i < length; i += 1) { if (expectations[i].allowsCall(thisValue, args)) { available = available || expectations[i]; } else { exhausted += 1; } push.call(messages, " " + expectations[i].toString()); } if (exhausted === 0) { return available.apply(thisValue, args); } messages.unshift("Unexpected call: " + sinon.spyCall.toString.call({ proxy: method, args: args })); sinon.expectation.fail(messages.join("\n")); } }; }())); var times = sinon.timesInWords; sinon.expectation = (function () { var slice = Array.prototype.slice; var _invoke = sinon.spy.invoke; function callCountInWords(callCount) { if (callCount == 0) { return "never called"; } else { return "called " + times(callCount); } } function expectedCallCountInWords(expectation) { var min = expectation.minCalls; var max = expectation.maxCalls; if (typeof min == "number" && typeof max == "number") { var str = times(min); if (min != max) { str = "at least " + str + " and at most " + times(max); } return str; } if (typeof min == "number") { return "at least " + times(min); } return "at most " + times(max); } function receivedMinCalls(expectation) { var hasMinLimit = typeof expectation.minCalls == "number"; return !hasMinLimit || expectation.callCount >= expectation.minCalls; } function receivedMaxCalls(expectation) { if (typeof expectation.maxCalls != "number") { return false; } return expectation.callCount == expectation.maxCalls; } return { minCalls: 1, maxCalls: 1, create: function create(methodName) { var expectation = sinon.extend(sinon.stub.create(), sinon.expectation); delete expectation.create; expectation.method = methodName; return expectation; }, invoke: function invoke(func, thisValue, args) { this.verifyCallAllowed(thisValue, args); return _invoke.apply(this, arguments); }, atLeast: function atLeast(num) { if (typeof num != "number") { throw new TypeError("'" + num + "' is not number"); } if (!this.limitsSet) { this.maxCalls = null; this.limitsSet = true; } this.minCalls = num; return this; }, atMost: function atMost(num) { if (typeof num != "number") { throw new TypeError("'" + num + "' is not number"); } if (!this.limitsSet) { this.minCalls = null; this.limitsSet = true; } this.maxCalls = num; return this; }, never: function never() { return this.exactly(0); }, once: function once() { return this.exactly(1); }, twice: function twice() { return this.exactly(2); }, thrice: function thrice() { return this.exactly(3); }, exactly: function exactly(num) { if (typeof num != "number") { throw new TypeError("'" + num + "' is not a number"); } this.atLeast(num); return this.atMost(num); }, met: function met() { return !this.failed && receivedMinCalls(this); }, verifyCallAllowed: function verifyCallAllowed(thisValue, args) { if (receivedMaxCalls(this)) { this.failed = true; sinon.expectation.fail(this.method + " already called " + times(this.maxCalls)); } if ("expectedThis" in this && this.expectedThis !== thisValue) { sinon.expectation.fail(this.method + " called with " + thisValue + " as thisValue, expected " + this.expectedThis); } if (!("expectedArguments" in this)) { return; } if (!args) { sinon.expectation.fail(this.method + " received no arguments, expected " + sinon.format(this.expectedArguments)); } if (args.length < this.expectedArguments.length) { sinon.expectation.fail(this.method + " received too few arguments (" + sinon.format(args) + "), expected " + sinon.format(this.expectedArguments)); } if (this.expectsExactArgCount && args.length != this.expectedArguments.length) { sinon.expectation.fail(this.method + " received too many arguments (" + sinon.format(args) + "), expected " + sinon.format(this.expectedArguments)); } for (var i = 0, l = this.expectedArguments.length; i < l; i += 1) { if (!sinon.deepEqual(this.expectedArguments[i], args[i])) { sinon.expectation.fail(this.method + " received wrong arguments " + sinon.format(args) + ", expected " + sinon.format(this.expectedArguments)); } } }, allowsCall: function allowsCall(thisValue, args) { if (this.met() && receivedMaxCalls(this)) { return false; } if ("expectedThis" in this && this.expectedThis !== thisValue) { return false; } if (!("expectedArguments" in this)) { return true; } args = args || []; if (args.length < this.expectedArguments.length) { return false; } if (this.expectsExactArgCount && args.length != this.expectedArguments.length) { return false; } for (var i = 0, l = this.expectedArguments.length; i < l; i += 1) { if (!sinon.deepEqual(this.expectedArguments[i], args[i])) { return false; } } return true; }, withArgs: function withArgs() { this.expectedArguments = slice.call(arguments); return this; }, withExactArgs: function withExactArgs() { this.withArgs.apply(this, arguments); this.expectsExactArgCount = true; return this; }, on: function on(thisValue) { this.expectedThis = thisValue; return this; }, toString: function () { var args = (this.expectedArguments || []).slice(); if (!this.expectsExactArgCount) { push.call(args, "[...]"); } var callStr = sinon.spyCall.toString.call({ proxy: this.method || "anonymous mock expectation", args: args }); var message = callStr.replace(", [...", "[, ...") + " " + expectedCallCountInWords(this); if (this.met()) { return "Expectation met: " + message; } return "Expected " + message + " (" + callCountInWords(this.callCount) + ")"; }, verify: function verify() { if (!this.met()) { sinon.expectation.fail(this.toString()); } else { sinon.expectation.pass(this.toString()); } return true; }, pass: function(message) { sinon.assert.pass(message); }, fail: function (message) { var exception = new Error(message); exception.name = "ExpectationError"; throw exception; } }; }()); if (commonJSModule) { module.exports = mock; } else { sinon.mock = mock; } }(typeof sinon == "object" && sinon || null)); /** * @depend ../sinon.js * @depend stub.js * @depend mock.js */ /*jslint eqeqeq: false, onevar: false, forin: true*/ /*global module, require, sinon*/ /** * Collections of stubs, spies and mocks. * * @author Christian Johansen ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen */ "use strict"; (function (sinon) { var commonJSModule = typeof module == "object" && typeof require == "function"; var push = [].push; var hasOwnProperty = Object.prototype.hasOwnProperty; if (!sinon && commonJSModule) { sinon = require("../sinon"); } if (!sinon) { return; } function getFakes(fakeCollection) { if (!fakeCollection.fakes) { fakeCollection.fakes = []; } return fakeCollection.fakes; } function each(fakeCollection, method) { var fakes = getFakes(fakeCollection); for (var i = 0, l = fakes.length; i < l; i += 1) { if (typeof fakes[i][method] == "function") { fakes[i][method](); } } } function compact(fakeCollection) { var fakes = getFakes(fakeCollection); var i = 0; while (i < fakes.length) { fakes.splice(i, 1); } } var collection = { verify: function resolve() { each(this, "verify"); }, restore: function restore() { each(this, "restore"); compact(this); }, verifyAndRestore: function verifyAndRestore() { var exception; try { this.verify(); } catch (e) { exception = e; } this.restore(); if (exception) { throw exception; } }, add: function add(fake) { push.call(getFakes(this), fake); return fake; }, spy: function spy() { return this.add(sinon.spy.apply(sinon, arguments)); }, stub: function stub(object, property, value) { if (property) { var original = object[property]; if (typeof original != "function") { if (!hasOwnProperty.call(object, property)) { throw new TypeError("Cannot stub non-existent own property " + property); } object[property] = value; return this.add({ restore: function () { object[property] = original; } }); } } if (!property && !!object && typeof object == "object") { var stubbedObj = sinon.stub.apply(sinon, arguments); for (var prop in stubbedObj) { if (typeof stubbedObj[prop] === "function") { this.add(stubbedObj[prop]); } } return stubbedObj; } return this.add(sinon.stub.apply(sinon, arguments)); }, mock: function mock() { return this.add(sinon.mock.apply(sinon, arguments)); }, inject: function inject(obj) { var col = this; obj.spy = function () { return col.spy.apply(col, arguments); }; obj.stub = function () { return col.stub.apply(col, arguments); }; obj.mock = function () { return col.mock.apply(col, arguments); }; return obj; } }; if (commonJSModule) { module.exports = collection; } else { sinon.collection = collection; } }(typeof sinon == "object" && sinon || null)); /** * @depend ../sinon.js * @depend collection.js * @depend util/fake_timers.js * @depend util/fake_server_with_clock.js */ /*jslint eqeqeq: false, onevar: false, plusplus: false*/ /*global require, module*/ /** * Manages fake collections as well as fake utilities such as Sinon's * timers and fake XHR implementation in one convenient object. * * @author Christian Johansen ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen */ "use strict"; if (typeof module == "object" && typeof require == "function") { var sinon = require("../sinon"); sinon.extend(sinon, require("./util/fake_timers")); } (function () { var push = [].push; function exposeValue(sandbox, config, key, value) { if (!value) { return; } if (config.injectInto) { config.injectInto[key] = value; } else { push.call(sandbox.args, value); } } function prepareSandboxFromConfig(config) { var sandbox = sinon.create(sinon.sandbox); if (config.useFakeServer) { if (typeof config.useFakeServer == "object") { sandbox.serverPrototype = config.useFakeServer; } sandbox.useFakeServer(); } if (config.useFakeTimers) { if (typeof config.useFakeTimers == "object") { sandbox.useFakeTimers.apply(sandbox, config.useFakeTimers); } else { sandbox.useFakeTimers(); } } return sandbox; } sinon.sandbox = sinon.extend(sinon.create(sinon.collection), { useFakeTimers: function useFakeTimers() { this.clock = sinon.useFakeTimers.apply(sinon, arguments); return this.add(this.clock); }, serverPrototype: sinon.fakeServer, useFakeServer: function useFakeServer() { var proto = this.serverPrototype || sinon.fakeServer; if (!proto || !proto.create) { return null; } this.server = proto.create(); return this.add(this.server); }, inject: function (obj) { sinon.collection.inject.call(this, obj); if (this.clock) { obj.clock = this.clock; } if (this.server) { obj.server = this.server; obj.requests = this.server.requests; } return obj; }, create: function (config) { if (!config) { return sinon.create(sinon.sandbox); } var sandbox = prepareSandboxFromConfig(config); sandbox.args = sandbox.args || []; var prop, value, exposed = sandbox.inject({}); if (config.properties) { for (var i = 0, l = config.properties.length; i < l; i++) { prop = config.properties[i]; value = exposed[prop] || prop == "sandbox" && sandbox; exposeValue(sandbox, config, prop, value); } } else { exposeValue(sandbox, config, "sandbox", value); } return sandbox; } }); sinon.sandbox.useFakeXMLHttpRequest = sinon.sandbox.useFakeServer; if (typeof module == "object" && typeof require == "function") { module.exports = sinon.sandbox; } }()); /* @depend ../sinon.js */ /*jslint eqeqeq: false, onevar: false, plusplus: false*/ /*global module, require, sinon*/ /** * Match functions * * @author Maximilian Antoni ([email protected]) * @license BSD * * Copyright (c) 2012 Maximilian Antoni */ "use strict"; (function (sinon) { var commonJSModule = typeof module == "object" && typeof require == "function"; if (!sinon && commonJSModule) { sinon = require("../sinon"); } if (!sinon) { return; } function assertType(value, type, name) { var actual = sinon.typeOf(value); if (actual !== type) { throw new TypeError("Expected type of " + name + " to be " + type + ", but was " + actual); } } var matcher = { toString: function () { return this.message; } }; function isMatcher(object) { return matcher.isPrototypeOf(object); } function matchObject(expectation, actual) { if (actual === null || actual === undefined) { return false; } for (var key in expectation) { if (expectation.hasOwnProperty(key)) { var exp = expectation[key]; var act = actual[key]; if (match.isMatcher(exp)) { if (!exp.test(act)) { return false; } } else if (sinon.typeOf(exp) === "object") { if (!matchObject(exp, act)) { return false; } } else if (!sinon.deepEqual(exp, act)) { return false; } } } return true; } matcher.or = function (m2) { if (!isMatcher(m2)) { throw new TypeError("Matcher expected"); } var m1 = this; var or = sinon.create(matcher); or.test = function (actual) { return m1.test(actual) || m2.test(actual); }; or.message = m1.message + ".or(" + m2.message + ")"; return or; }; matcher.and = function (m2) { if (!isMatcher(m2)) { throw new TypeError("Matcher expected"); } var m1 = this; var and = sinon.create(matcher); and.test = function (actual) { return m1.test(actual) && m2.test(actual); }; and.message = m1.message + ".and(" + m2.message + ")"; return and; }; var match = function (expectation, message) { var m = sinon.create(matcher); var type = sinon.typeOf(expectation); switch (type) { case "object": if (typeof expectation.test === "function") { m.test = function (actual) { return expectation.test(actual) === true; }; m.message = "match(" + sinon.functionName(expectation.test) + ")"; return m; } var str = []; for (var key in expectation) { if (expectation.hasOwnProperty(key)) { str.push(key + ": " + expectation[key]); } } m.test = function (actual) { return matchObject(expectation, actual); }; m.message = "match(" + str.join(", ") + ")"; break; case "number": m.test = function (actual) { return expectation == actual; }; break; case "string": m.test = function (actual) { if (typeof actual !== "string") { return false; } return actual.indexOf(expectation) !== -1; }; m.message = "match(\"" + expectation + "\")"; break; case "regexp": m.test = function (actual) { if (typeof actual !== "string") { return false; } return expectation.test(actual); }; break; case "function": m.test = expectation; if (message) { m.message = message; } else { m.message = "match(" + sinon.functionName(expectation) + ")"; } break; default: m.test = function (actual) { return sinon.deepEqual(expectation, actual); }; } if (!m.message) { m.message = "match(" + expectation + ")"; } return m; }; match.isMatcher = isMatcher; match.any = match(function () { return true; }, "any"); match.defined = match(function (actual) { return actual !== null && actual !== undefined; }, "defined"); match.truthy = match(function (actual) { return !!actual; }, "truthy"); match.falsy = match(function (actual) { return !actual; }, "falsy"); match.same = function (expectation) { return match(function (actual) { return expectation === actual; }, "same(" + expectation + ")"); }; match.typeOf = function (type) { assertType(type, "string", "type"); return match(function (actual) { return sinon.typeOf(actual) === type; }, "typeOf(\"" + type + "\")"); }; match.instanceOf = function (type) { assertType(type, "function", "type"); return match(function (actual) { return actual instanceof type; }, "instanceOf(" + sinon.functionName(type) + ")"); }; function createPropertyMatcher(propertyTest, messagePrefix) { return function (property, value) { assertType(property, "string", "property"); var onlyProperty = arguments.length === 1; var message = messagePrefix + "(\"" + property + "\""; if (!onlyProperty) { message += ", " + value; } message += ")"; return match(function (actual) { if (actual === undefined || actual === null || !propertyTest(actual, property)) { return false; } return onlyProperty || sinon.deepEqual(value, actual[property]); }, message); }; } match.has = createPropertyMatcher(function (actual, property) { if (typeof actual === "object") { return property in actual; } return actual[property] !== undefined; }, "has"); match.hasOwn = createPropertyMatcher(function (actual, property) { return actual.hasOwnProperty(property); }, "hasOwn"); match.bool = match.typeOf("boolean"); match.number = match.typeOf("number"); match.string = match.typeOf("string"); match.object = match.typeOf("object"); match.func = match.typeOf("function"); match.array = match.typeOf("array"); match.regexp = match.typeOf("regexp"); match.date = match.typeOf("date"); if (commonJSModule) { module.exports = match; } else { sinon.match = match; } }(typeof sinon == "object" && sinon || null)); /*jslint eqeqeq: false, onevar: false*/ /*global sinon, module, require, ActiveXObject, XMLHttpRequest, DOMParser*/ /** * Minimal Event interface implementation * * Original implementation by Sven Fuchs: https://gist.github.com/995028 * Modifications and tests by Christian Johansen. * * @author Sven Fuchs ([email protected]) * @author Christian Johansen ([email protected]) * @license BSD * * Copyright (c) 2011 Sven Fuchs, Christian Johansen */ "use strict"; if (typeof sinon == "undefined") { this.sinon = {}; } (function () { var push = [].push; sinon.Event = function Event(type, bubbles, cancelable, target) { this.initEvent(type, bubbles, cancelable, target); }; sinon.Event.prototype = { initEvent: function(type, bubbles, cancelable, target) { this.type = type; this.bubbles = bubbles; this.cancelable = cancelable; this.target = target; }, stopPropagation: function () {}, preventDefault: function () { this.defaultPrevented = true; } }; sinon.EventTarget = { addEventListener: function addEventListener(event, listener, useCapture) { this.eventListeners = this.eventListeners || {}; this.eventListeners[event] = this.eventListeners[event] || []; push.call(this.eventListeners[event], listener); }, removeEventListener: function removeEventListener(event, listener, useCapture) { var listeners = this.eventListeners && this.eventListeners[event] || []; for (var i = 0, l = listeners.length; i < l; ++i) { if (listeners[i] == listener) { return listeners.splice(i, 1); } } }, dispatchEvent: function dispatchEvent(event) { var type = event.type; var listeners = this.eventListeners && this.eventListeners[type] || []; for (var i = 0; i < listeners.length; i++) { if (typeof listeners[i] == "function") { listeners[i].call(this, event); } else { listeners[i].handleEvent(event); } } return !!event.defaultPrevented; } }; }()); /** * @depend ../../sinon.js * @depend event.js */ /*jslint eqeqeq: false, onevar: false*/ /*global sinon, module, require, ActiveXObject, XMLHttpRequest, DOMParser*/ /** * Fake XMLHttpRequest object * * @author Christian Johansen ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen */ "use strict"; if (typeof sinon == "undefined") { this.sinon = {}; } sinon.xhr = { XMLHttpRequest: this.XMLHttpRequest }; // wrapper for global (function(global) { var xhr = sinon.xhr; xhr.GlobalXMLHttpRequest = global.XMLHttpRequest; xhr.GlobalActiveXObject = global.ActiveXObject; xhr.supportsActiveX = typeof xhr.GlobalActiveXObject != "undefined"; xhr.supportsXHR = typeof xhr.GlobalXMLHttpRequest != "undefined"; xhr.workingXHR = xhr.supportsXHR ? xhr.GlobalXMLHttpRequest : xhr.supportsActiveX ? function() { return new xhr.GlobalActiveXObject("MSXML2.XMLHTTP.3.0") } : false; /*jsl:ignore*/ var unsafeHeaders = { "Accept-Charset": true, "Accept-Encoding": true, "Connection": true, "Content-Length": true, "Cookie": true, "Cookie2": true, "Content-Transfer-Encoding": true, "Date": true, "Expect": true, "Host": true, "Keep-Alive": true, "Referer": true, "TE": true, "Trailer": true, "Transfer-Encoding": true, "Upgrade": true, "User-Agent": true, "Via": true }; /*jsl:end*/ function FakeXMLHttpRequest() { this.readyState = FakeXMLHttpRequest.UNSENT; this.requestHeaders = {}; this.requestBody = null; this.status = 0; this.statusText = ""; var xhr = this; var events = ["loadstart", "load", "abort", "loadend"]; function addEventListener(eventName) { xhr.addEventListener(eventName, function (event) { var listener = xhr["on" + eventName]; if (listener && typeof listener == "function") { listener(event); } }); } for (var i = events.length - 1; i >= 0; i--) { addEventListener(events[i]); } if (typeof FakeXMLHttpRequest.onCreate == "function") { FakeXMLHttpRequest.onCreate(this); } } function verifyState(xhr) { if (xhr.readyState !== FakeXMLHttpRequest.OPENED) { throw new Error("INVALID_STATE_ERR"); } if (xhr.sendFlag) { throw new Error("INVALID_STATE_ERR"); } } // filtering to enable a white-list version of Sinon FakeXhr, // where whitelisted requests are passed through to real XHR function each(collection, callback) { if (!collection) return; for (var i = 0, l = collection.length; i < l; i += 1) { callback(collection[i]); } } function some(collection, callback) { for (var index = 0; index < collection.length; index++) { if(callback(collection[index]) === true) return true; }; return false; } // largest arity in XHR is 5 - XHR#open var apply = function(obj,method,args) { switch(args.length) { case 0: return obj[method](); case 1: return obj[method](args[0]); case 2: return obj[method](args[0],args[1]); case 3: return obj[method](args[0],args[1],args[2]); case 4: return obj[method](args[0],args[1],args[2],args[3]); case 5: return obj[method](args[0],args[1],args[2],args[3],args[4]); }; }; FakeXMLHttpRequest.filters = []; FakeXMLHttpRequest.addFilter = function(fn) { this.filters.push(fn) }; var IE6Re = /MSIE 6/; FakeXMLHttpRequest.defake = function(fakeXhr,xhrArgs) { var xhr = new sinon.xhr.workingXHR(); each(["open","setRequestHeader","send","abort","getResponseHeader", "getAllResponseHeaders","addEventListener","overrideMimeType","removeEventListener"], function(method) { fakeXhr[method] = function() { return apply(xhr,method,arguments); }; }); var copyAttrs = function(args) { each(args, function(attr) { try { fakeXhr[attr] = xhr[attr] } catch(e) { if(!IE6Re.test(navigator.userAgent)) throw e; } }); }; var stateChange = function() { fakeXhr.readyState = xhr.readyState; if(xhr.readyState >= FakeXMLHttpRequest.HEADERS_RECEIVED) { copyAttrs(["status","statusText"]); } if(xhr.readyState >= FakeXMLHttpRequest.LOADING) { copyAttrs(["responseText"]); } if(xhr.readyState === FakeXMLHttpRequest.DONE) { copyAttrs(["responseXML"]); } if(fakeXhr.onreadystatechange) fakeXhr.onreadystatechange.call(fakeXhr); }; if(xhr.addEventListener) { for(var event in fakeXhr.eventListeners) { if(fakeXhr.eventListeners.hasOwnProperty(event)) { each(fakeXhr.eventListeners[event],function(handler) { xhr.addEventListener(event, handler); }); } } xhr.addEventListener("readystatechange",stateChange); } else { xhr.onreadystatechange = stateChange; } apply(xhr,"open",xhrArgs); }; FakeXMLHttpRequest.useFilters = false; function verifyRequestSent(xhr) { if (xhr.readyState == FakeXMLHttpRequest.DONE) { throw new Error("Request done"); } } function verifyHeadersReceived(xhr) { if (xhr.async && xhr.readyState != FakeXMLHttpRequest.HEADERS_RECEIVED) { throw new Error("No headers received"); } } function verifyResponseBodyType(body) { if (typeof body != "string") { var error = new Error("Attempted to respond to fake XMLHttpRequest with " + body + ", which is not a string."); error.name = "InvalidBodyException"; throw error; } } sinon.extend(FakeXMLHttpRequest.prototype, sinon.EventTarget, { async: true, open: function open(method, url, async, username, password) { this.method = method; this.url = url; this.async = typeof async == "boolean" ? async : true; this.username = username; this.password = password; this.responseText = null; this.responseXML = null; this.requestHeaders = {}; this.sendFlag = false; if(sinon.FakeXMLHttpRequest.useFilters === true) { var xhrArgs = arguments; var defake = some(FakeXMLHttpRequest.filters,function(filter) { return filter.apply(this,xhrArgs) }); if (defake) { return sinon.FakeXMLHttpRequest.defake(this,arguments); } } this.readyStateChange(FakeXMLHttpRequest.OPENED); }, readyStateChange: function readyStateChange(state) { this.readyState = state; if (typeof this.onreadystatechange == "function") { try { this.onreadystatechange(); } catch (e) { sinon.logError("Fake XHR onreadystatechange handler", e); } } this.dispatchEvent(new sinon.Event("readystatechange")); switch (this.readyState) { case FakeXMLHttpRequest.DONE: this.dispatchEvent(new sinon.Event("load", false, false, this)); this.dispatchEvent(new sinon.Event("loadend", false, false, this)); break; } }, setRequestHeader: function setRequestHeader(header, value) { verifyState(this); if (unsafeHeaders[header] || /^(Sec-|Proxy-)/.test(header)) { throw new Error("Refused to set unsafe header \"" + header + "\""); } if (this.requestHeaders[header]) { this.requestHeaders[header] += "," + value; } else { this.requestHeaders[header] = value; } }, // Helps testing setResponseHeaders: function setResponseHeaders(headers) { this.responseHeaders = {}; for (var header in headers) { if (headers.hasOwnProperty(header)) { this.responseHeaders[header] = headers[header]; } } if (this.async) { this.readyStateChange(FakeXMLHttpRequest.HEADERS_RECEIVED); } else { this.readyState = FakeXMLHttpRequest.HEADERS_RECEIVED; } }, // Currently treats ALL data as a DOMString (i.e. no Document) send: function send(data) { verifyState(this); if (!/^(get|head)$/i.test(this.method)) { if (this.requestHeaders["Content-Type"]) { var value = this.requestHeaders["Content-Type"].split(";"); this.requestHeaders["Content-Type"] = value[0] + ";charset=utf-8"; } else { this.requestHeaders["Content-Type"] = "text/plain;charset=utf-8"; } this.requestBody = data; } this.errorFlag = false; this.sendFlag = this.async; this.readyStateChange(FakeXMLHttpRequest.OPENED); if (typeof this.onSend == "function") { this.onSend(this); } this.dispatchEvent(new sinon.Event("loadstart", false, false, this)); }, abort: function abort() { this.aborted = true; this.responseText = null; this.errorFlag = true; this.requestHeaders = {}; if (this.readyState > sinon.FakeXMLHttpRequest.UNSENT && this.sendFlag) { this.readyStateChange(sinon.FakeXMLHttpRequest.DONE); this.sendFlag = false; } this.readyState = sinon.FakeXMLHttpRequest.UNSENT; this.dispatchEvent(new sinon.Event("abort", false, false, this)); if (typeof this.onerror === "function") { this.onerror(); } }, getResponseHeader: function getResponseHeader(header) { if (this.readyState < FakeXMLHttpRequest.HEADERS_RECEIVED) { return null; } if (/^Set-Cookie2?$/i.test(header)) { return null; } header = header.toLowerCase(); for (var h in this.responseHeaders) { if (h.toLowerCase() == header) { return this.responseHeaders[h]; } } return null; }, getAllResponseHeaders: function getAllResponseHeaders() { if (this.readyState < FakeXMLHttpRequest.HEADERS_RECEIVED) { return ""; } var headers = ""; for (var header in this.responseHeaders) { if (this.responseHeaders.hasOwnProperty(header) && !/^Set-Cookie2?$/i.test(header)) { headers += header + ": " + this.responseHeaders[header] + "\r\n"; } } return headers; }, setResponseBody: function setResponseBody(body) { verifyRequestSent(this); verifyHeadersReceived(this); verifyResponseBodyType(body); var chunkSize = this.chunkSize || 10; var index = 0; this.responseText = ""; do { if (this.async) { this.readyStateChange(FakeXMLHttpRequest.LOADING); } this.responseText += body.substring(index, index + chunkSize); index += chunkSize; } while (index < body.length); var type = this.getResponseHeader("Content-Type"); if (this.responseText && (!type || /(text\/xml)|(application\/xml)|(\+xml)/.test(type))) { try { this.responseXML = FakeXMLHttpRequest.parseXML(this.responseText); } catch (e) { // Unable to parse XML - no biggie } } if (this.async) { this.readyStateChange(FakeXMLHttpRequest.DONE); } else { this.readyState = FakeXMLHttpRequest.DONE; } }, respond: function respond(status, headers, body) { this.setResponseHeaders(headers || {}); this.status = typeof status == "number" ? status : 200; this.statusText = FakeXMLHttpRequest.statusCodes[this.status]; this.setResponseBody(body || ""); if (typeof this.onload === "function"){ this.onload(); } } }); sinon.extend(FakeXMLHttpRequest, { UNSENT: 0, OPENED: 1, HEADERS_RECEIVED: 2, LOADING: 3, DONE: 4 }); // Borrowed from JSpec FakeXMLHttpRequest.parseXML = function parseXML(text) { var xmlDoc; if (typeof DOMParser != "undefined") { var parser = new DOMParser(); xmlDoc = parser.parseFromString(text, "text/xml"); } else { xmlDoc = new ActiveXObject("Microsoft.XMLDOM"); xmlDoc.async = "false"; xmlDoc.loadXML(text); } return xmlDoc; }; FakeXMLHttpRequest.statusCodes = { 100: "Continue", 101: "Switching Protocols", 200: "OK", 201: "Created", 202: "Accepted", 203: "Non-Authoritative Information", 204: "No Content", 205: "Reset Content", 206: "Partial Content", 300: "Multiple Choice", 301: "Moved Permanently", 302: "Found", 303: "See Other", 304: "Not Modified", 305: "Use Proxy", 307: "Temporary Redirect", 400: "Bad Request", 401: "Unauthorized", 402: "Payment Required", 403: "Forbidden", 404: "Not Found", 405: "Method Not Allowed", 406: "Not Acceptable", 407: "Proxy Authentication Required", 408: "Request Timeout", 409: "Conflict", 410: "Gone", 411: "Length Required", 412: "Precondition Failed", 413: "Request Entity Too Large", 414: "Request-URI Too Long", 415: "Unsupported Media Type", 416: "Requested Range Not Satisfiable", 417: "Expectation Failed", 422: "Unprocessable Entity", 500: "Internal Server Error", 501: "Not Implemented", 502: "Bad Gateway", 503: "Service Unavailable", 504: "Gateway Timeout", 505: "HTTP Version Not Supported" }; sinon.useFakeXMLHttpRequest = function () { sinon.FakeXMLHttpRequest.restore = function restore(keepOnCreate) { if (xhr.supportsXHR) { global.XMLHttpRequest = xhr.GlobalXMLHttpRequest; } if (xhr.supportsActiveX) { global.ActiveXObject = xhr.GlobalActiveXObject; } delete sinon.FakeXMLHttpRequest.restore; if (keepOnCreate !== true) { delete sinon.FakeXMLHttpRequest.onCreate; } }; if (xhr.supportsXHR) { global.XMLHttpRequest = sinon.FakeXMLHttpRequest; } if (xhr.supportsActiveX) { global.ActiveXObject = function ActiveXObject(objId) { if (objId == "Microsoft.XMLHTTP" || /^Msxml2\.XMLHTTP/i.test(objId)) { return new sinon.FakeXMLHttpRequest(); } return new xhr.GlobalActiveXObject(objId); }; } return sinon.FakeXMLHttpRequest; }; sinon.FakeXMLHttpRequest = FakeXMLHttpRequest; })(this); if (typeof module == "object" && typeof require == "function") { module.exports = sinon; } /*jslint eqeqeq: false, plusplus: false, evil: true, onevar: false, browser: true, forin: false*/ /*global module, require, window*/ /** * Fake timer API * setTimeout * setInterval * clearTimeout * clearInterval * tick * reset * Date * * Inspired by jsUnitMockTimeOut from JsUnit * * @author Christian Johansen ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen */ "use strict"; if (typeof sinon == "undefined") { var sinon = {}; } (function (global) { var id = 1; function addTimer(args, recurring) { if (args.length === 0) { throw new Error("Function requires at least 1 parameter"); } var toId = id++; var delay = args[1] || 0; if (!this.timeouts) { this.timeouts = {}; } this.timeouts[toId] = { id: toId, func: args[0], callAt: this.now + delay, invokeArgs: Array.prototype.slice.call(args, 2) }; if (recurring === true) { this.timeouts[toId].interval = delay; } return toId; } function parseTime(str) { if (!str) { return 0; } var strings = str.split(":"); var l = strings.length, i = l; var ms = 0, parsed; if (l > 3 || !/^(\d\d:){0,2}\d\d?$/.test(str)) { throw new Error("tick only understands numbers and 'h:m:s'"); } while (i--) { parsed = parseInt(strings[i], 10); if (parsed >= 60) { throw new Error("Invalid time " + str); } ms += parsed * Math.pow(60, (l - i - 1)); } return ms * 1000; } function createObject(object) { var newObject; if (Object.create) { newObject = Object.create(object); } else { var F = function () {}; F.prototype = object; newObject = new F(); } newObject.Date.clock = newObject; return newObject; } sinon.clock = { now: 0, create: function create(now) { var clock = createObject(this); if (typeof now == "number") { clock.now = now; } if (!!now && typeof now == "object") { throw new TypeError("now should be milliseconds since UNIX epoch"); } return clock; }, setTimeout: function setTimeout(callback, timeout) { return addTimer.call(this, arguments, false); }, clearTimeout: function clearTimeout(timerId) { if (!this.timeouts) { this.timeouts = []; } if (timerId in this.timeouts) { delete this.timeouts[timerId]; } }, setInterval: function setInterval(callback, timeout) { return addTimer.call(this, arguments, true); }, clearInterval: function clearInterval(timerId) { this.clearTimeout(timerId); }, tick: function tick(ms) { ms = typeof ms == "number" ? ms : parseTime(ms); var tickFrom = this.now, tickTo = this.now + ms, previous = this.now; var timer = this.firstTimerInRange(tickFrom, tickTo); var firstException; while (timer && tickFrom <= tickTo) { if (this.timeouts[timer.id]) { tickFrom = this.now = timer.callAt; try { this.callTimer(timer); } catch (e) { firstException = firstException || e; } } timer = this.firstTimerInRange(previous, tickTo); previous = tickFrom; } this.now = tickTo; if (firstException) { throw firstException; } return this.now; }, firstTimerInRange: function (from, to) { var timer, smallest, originalTimer; for (var id in this.timeouts) { if (this.timeouts.hasOwnProperty(id)) { if (this.timeouts[id].callAt < from || this.timeouts[id].callAt > to) { continue; } if (!smallest || this.timeouts[id].callAt < smallest) { originalTimer = this.timeouts[id]; smallest = this.timeouts[id].callAt; timer = { func: this.timeouts[id].func, callAt: this.timeouts[id].callAt, interval: this.timeouts[id].interval, id: this.timeouts[id].id, invokeArgs: this.timeouts[id].invokeArgs }; } } } return timer || null; }, callTimer: function (timer) { if (typeof timer.interval == "number") { this.timeouts[timer.id].callAt += timer.interval; } else { delete this.timeouts[timer.id]; } try { if (typeof timer.func == "function") { timer.func.apply(null, timer.invokeArgs); } else { eval(timer.func); } } catch (e) { var exception = e; } if (!this.timeouts[timer.id]) { if (exception) { throw exception; } return; } if (exception) { throw exception; } }, reset: function reset() { this.timeouts = {}; }, Date: (function () { var NativeDate = Date; function ClockDate(year, month, date, hour, minute, second, ms) { // Defensive and verbose to avoid potential harm in passing // explicit undefined when user does not pass argument switch (arguments.length) { case 0: return new NativeDate(ClockDate.clock.now); case 1: return new NativeDate(year); case 2: return new NativeDate(year, month); case 3: return new NativeDate(year, month, date); case 4: return new NativeDate(year, month, date, hour); case 5: return new NativeDate(year, month, date, hour, minute); case 6: return new NativeDate(year, month, date, hour, minute, second); default: return new NativeDate(year, month, date, hour, minute, second, ms); } } return mirrorDateProperties(ClockDate, NativeDate); }()) }; function mirrorDateProperties(target, source) { if (source.now) { target.now = function now() { return target.clock.now; }; } else { delete target.now; } if (source.toSource) { target.toSource = function toSource() { return source.toSource(); }; } else { delete target.toSource; } target.toString = function toString() { return source.toString(); }; target.prototype = source.prototype; target.parse = source.parse; target.UTC = source.UTC; target.prototype.toUTCString = source.prototype.toUTCString; return target; } var methods = ["Date", "setTimeout", "setInterval", "clearTimeout", "clearInterval"]; function restore() { var method; for (var i = 0, l = this.methods.length; i < l; i++) { method = this.methods[i]; if (global[method].hadOwnProperty) { global[method] = this["_" + method]; } else { delete global[method]; } } // Prevent multiple executions which will completely remove these props this.methods = []; } function stubGlobal(method, clock) { clock[method].hadOwnProperty = Object.prototype.hasOwnProperty.call(global, method); clock["_" + method] = global[method]; if (method == "Date") { var date = mirrorDateProperties(clock[method], global[method]); global[method] = date; } else { global[method] = function () { return clock[method].apply(clock, arguments); }; for (var prop in clock[method]) { if (clock[method].hasOwnProperty(prop)) { global[method][prop] = clock[method][prop]; } } } global[method].clock = clock; } sinon.useFakeTimers = function useFakeTimers(now) { var clock = sinon.clock.create(now); clock.restore = restore; clock.methods = Array.prototype.slice.call(arguments, typeof now == "number" ? 1 : 0); if (clock.methods.length === 0) { clock.methods = methods; } for (var i = 0, l = clock.methods.length; i < l; i++) { stubGlobal(clock.methods[i], clock); } return clock; }; }(typeof global != "undefined" && typeof global !== "function" ? global : this)); sinon.timers = { setTimeout: setTimeout, clearTimeout: clearTimeout, setInterval: setInterval, clearInterval: clearInterval, Date: Date }; if (typeof module == "object" && typeof require == "function") { module.exports = sinon; } /** * @depend fake_xml_http_request.js */ /*jslint eqeqeq: false, onevar: false, regexp: false, plusplus: false*/ /*global module, require, window*/ /** * The Sinon "server" mimics a web server that receives requests from * sinon.FakeXMLHttpRequest and provides an API to respond to those requests, * both synchronously and asynchronously. To respond synchronuously, canned * answers have to be provided upfront. * * @author Christian Johansen ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen */ "use strict"; if (typeof sinon == "undefined") { var sinon = {}; } sinon.fakeServer = (function () { var push = [].push; function F() {} function create(proto) { F.prototype = proto; return new F(); } function responseArray(handler) { var response = handler; if (Object.prototype.toString.call(handler) != "[object Array]") { response = [200, {}, handler]; } if (typeof response[2] != "string") { throw new TypeError("Fake server response body should be string, but was " + typeof response[2]); } return response; } var wloc = typeof window !== "undefined" ? window.location : {}; var rCurrLoc = new RegExp("^" + wloc.protocol + "//" + wloc.host); function matchOne(response, reqMethod, reqUrl) { var rmeth = response.method; var matchMethod = !rmeth || rmeth.toLowerCase() == reqMethod.toLowerCase(); var url = response.url; var matchUrl = !url || url == reqUrl || (typeof url.test == "function" && url.test(reqUrl)); return matchMethod && matchUrl; } function match(response, request) { var requestMethod = this.getHTTPMethod(request); var requestUrl = request.url; if (!/^https?:\/\//.test(requestUrl) || rCurrLoc.test(requestUrl)) { requestUrl = requestUrl.replace(rCurrLoc, ""); } if (matchOne(response, this.getHTTPMethod(request), requestUrl)) { if (typeof response.response == "function") { var ru = response.url; var args = [request].concat(!ru ? [] : requestUrl.match(ru).slice(1)); return response.response.apply(response, args); } return true; } return false; } function log(response, request) { var str; str = "Request:\n" + sinon.format(request) + "\n\n"; str += "Response:\n" + sinon.format(response) + "\n\n"; sinon.log(str); } return { create: function () { var server = create(this); this.xhr = sinon.useFakeXMLHttpRequest(); server.requests = []; this.xhr.onCreate = function (xhrObj) { server.addRequest(xhrObj); }; return server; }, addRequest: function addRequest(xhrObj) { var server = this; push.call(this.requests, xhrObj); xhrObj.onSend = function () { server.handleRequest(this); }; if (this.autoRespond && !this.responding) { setTimeout(function () { server.responding = false; server.respond(); }, this.autoRespondAfter || 10); this.responding = true; } }, getHTTPMethod: function getHTTPMethod(request) { if (this.fakeHTTPMethods && /post/i.test(request.method)) { var matches = (request.requestBody || "").match(/_method=([^\b;]+)/); return !!matches ? matches[1] : request.method; } return request.method; }, handleRequest: function handleRequest(xhr) { if (xhr.async) { if (!this.queue) { this.queue = []; } push.call(this.queue, xhr); } else { this.processRequest(xhr); } }, respondWith: function respondWith(method, url, body) { if (arguments.length == 1 && typeof method != "function") { this.response = responseArray(method); return; } if (!this.responses) { this.responses = []; } if (arguments.length == 1) { body = method; url = method = null; } if (arguments.length == 2) { body = url; url = method; method = null; } push.call(this.responses, { method: method, url: url, response: typeof body == "function" ? body : responseArray(body) }); }, respond: function respond() { if (arguments.length > 0) this.respondWith.apply(this, arguments); var queue = this.queue || []; var request; while(request = queue.shift()) { this.processRequest(request); } }, processRequest: function processRequest(request) { try { if (request.aborted) { return; } var response = this.response || [404, {}, ""]; if (this.responses) { for (var i = 0, l = this.responses.length; i < l; i++) { if (match.call(this, this.responses[i], request)) { response = this.responses[i].response; break; } } } if (request.readyState != 4) { log(response, request); request.respond(response[0], response[1], response[2]); } } catch (e) { sinon.logError("Fake server request processing", e); } }, restore: function restore() { return this.xhr.restore && this.xhr.restore.apply(this.xhr, arguments); } }; }()); if (typeof module == "object" && typeof require == "function") { module.exports = sinon; } /** * @depend fake_server.js * @depend fake_timers.js */ /*jslint browser: true, eqeqeq: false, onevar: false*/ /*global sinon*/ /** * Add-on for sinon.fakeServer that automatically handles a fake timer along with * the FakeXMLHttpRequest. The direct inspiration for this add-on is jQuery * 1.3.x, which does not use xhr object's onreadystatehandler at all - instead, * it polls the object for completion with setInterval. Dispite the direct * motivation, there is nothing jQuery-specific in this file, so it can be used * in any environment where the ajax implementation depends on setInterval or * setTimeout. * * @author Christian Johansen ([email protected]) * @license BSD * * Copyright (c) 2010-2013 Christian Johansen */ "use strict"; (function () { function Server() {} Server.prototype = sinon.fakeServer; sinon.fakeServerWithClock = new Server(); sinon.fakeServerWithClock.addRequest = function addRequest(xhr) { if (xhr.async) { if (typeof setTimeout.clock == "object") { this.clock = setTimeout.clock; } else { this.clock = sinon.useFakeTimers(); this.resetClock = true; } if (!this.longestTimeout) { var clockSetTimeout = this.clock.setTimeout; var clockSetInterval = this.clock.setInterval; var server = this; this.clock.setTimeout = function (fn, timeout) { server.longestTimeout = Math.max(timeout, server.longestTimeout || 0); return clockSetTimeout.apply(this, arguments); }; this.clock.setInterval = function (fn, timeout) { server.longestTimeout = Math.max(timeout, server.longestTimeout || 0); return clockSetInterval.apply(this, arguments); }; } } return sinon.fakeServer.addRequest.call(this, xhr); }; sinon.fakeServerWithClock.respond = function respond() { var returnVal = sinon.fakeServer.respond.apply(this, arguments); if (this.clock) { this.clock.tick(this.longestTimeout || 0); this.longestTimeout = 0; if (this.resetClock) { this.clock.restore(); this.resetClock = false; } } return returnVal; }; sinon.fakeServerWithClock.restore = function restore() { if (this.clock) { this.clock.restore(); } return sinon.fakeServer.restore.apply(this, arguments); }; }()); // Based on seedrandom.js version 2.2. // Original author: David Bau // Date: 2013 Jun 15 // // LICENSE (BSD): // // Copyright 2013 David Bau, all rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // 3. Neither the name of this module nor the names of its contributors may // be used to endorse or promote products derived from this software // without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // (typeof global === "object" ? global : this).seedRandom = (function (pool, math, width, chunks, digits) { var startdenom = math.pow(width, chunks), significance = math.pow(2, digits), overflow = significance * 2, mask = width - 1; function ARC4(key) { var t, keylen = key.length, me = this, i = 0, j = me.i = me.j = 0, s = me.S = []; if (!keylen) { key = [keylen++]; } while (i < width) { s[i] = i++; } for (i = 0; i < width; i++) { s[i] = s[j = mask & (j + key[i % keylen] + (t = s[i]))]; s[j] = t; } (me.g = function(count) { var t, r = 0, i = me.i, j = me.j, s = me.S; while (count--) { t = s[i = mask & (i + 1)]; r = r * width + s[mask & ((s[i] = s[j = mask & (j + t)]) + (s[j] = t))]; } me.i = i; me.j = j; return r; })(width); } function flatten(obj, depth) { var result = [], typ = (typeof obj)[0], prop; if (depth && typ == 'o') { for (prop in obj) { try { result.push(flatten(obj[prop], depth - 1)); } catch (e) {} } } return (result.length ? result : typ == 's' ? obj : obj + '\0'); } function mixkey(seed, key) { var stringseed = seed + '', smear, j = 0; while (j < stringseed.length) { key[mask & j] = mask & ((smear ^= key[mask & j] * 19) + stringseed.charCodeAt(j++)); } return tostring(key); } function tostring(a) { return String.fromCharCode.apply(0, a); } mixkey(math.random(), pool); return function(seed, use_entropy) { var key = []; var shortseed = mixkey(flatten( use_entropy ? [seed, tostring(pool)] : typeof seed !== "undefined" ? seed : [new Date().getTime(), pool], 3), key); var arc4 = new ARC4(key); mixkey(tostring(arc4.S), pool); var random = function() { var n = arc4.g(chunks), d = startdenom, x = 0; while (n < significance) { n = (n + x) * width; d *= width; x = arc4.g(1); } while (n >= overflow) { n /= 2; d /= 2; x >>>= 1; } return (n + x) / d; }; random.seed = shortseed; return random; }; }([], Math, 256, 6, 52)); ((typeof define === "function" && define.amd && function (m) { define("buster-test/browser-env", ["lodash"], m); }) || (typeof module === "object" && typeof require === "function" && function (m) { module.exports = m(require("lodash")); }) || function (m) { this.buster = this.buster || {}; this.buster.browserEnv = m(this._); })(function (_) { "use strict"; function BrowserEnv(rootElement) { this.element = rootElement; this.originalContent = ""; } BrowserEnv.prototype = { create: function (rootElement) { return new BrowserEnv(rootElement); }, listen: function (runner) { var clear = _.bind(this.clear, this); runner.on("suite:start", _.bind(function () { this.originalContent = this.element.innerHTML; }, this)); runner.on("test:success", clear); runner.on("test:failure", clear); runner.on("test:error", clear); runner.on("test:timeout", clear); }, clear: function () { this.element.innerHTML = this.originalContent; } }; return BrowserEnv.prototype; }); ((typeof define === "function" && define.amd && function (m) { define("buster-test/test-context", ["bane", "when", "lodash"], m); }) || (typeof module === "object" && typeof require === "function" && function (m) { module.exports = m(require("bane"), require("when"), require("lodash")); }) || function (m) { this.buster = this.buster || {}; this.buster.testContext = m(this.bane, this.when, this._); })(function (bane, when, _) { "use strict"; var bctx = bane.createEventEmitter(); function empty(context) { return context.tests.length === 0 && context.contexts.length === 0; } function filterContexts(contexts, filter, prefix) { return _.reduce(contexts, function (filtered, context) { var ctx = bctx.filter(context, filter, prefix); if (ctx.tests.length > 0 || ctx.contexts.length > 0) { filtered.push(ctx); } return filtered; }, []); } function filterTests(tests, filter, prefix) { return _.reduce(tests, function (filtered, test) { if (!filter || filter.test(prefix + test.name)) { filtered.push(test); } return filtered; }, []); } function makeFilter(filter) { if (typeof filter === "string") { return new RegExp(filter, "i"); } if (Object.prototype.toString.call(filter) !== "[object Array]") { return filter; } return { test: function (string) { return filter.length === 0 || _.some(filter, function (f) { return new RegExp(f).test(string); }); } }; } function parse(context) { if (!context.tests && typeof context.parse === "function") { return context.parse(); } return context; } function compile(contexts, filter) { return _.reduce(contexts, function (compiled, ctx) { if (when.isPromise(ctx)) { var deferred = when.defer(); ctx.then(function (context) { deferred.resolve(bctx.filter(parse(context), filter)); }); deferred.promise.name = ctx.name; compiled.push(deferred.promise); } else { ctx = bctx.filter(parse(ctx), filter); if (!empty(ctx)) { compiled.push(ctx); } } return compiled; }, []); } function filter(ctx, filterContent, name) { filterContent = makeFilter(filterContent); name = (name || "") + ctx.name + " "; return _.extend({}, ctx, { tests: filterTests(ctx.tests || [], filterContent, name), contexts: filterContexts(ctx.contexts || [], filterContent, name) }); } bctx.compile = compile; bctx.filter = filter; return bctx; }); ((typeof define === "function" && define.amd && function (m) { define("buster-test/spec", ["lodash", "when", "buster-test/test-context"], m); }) || (typeof module === "object" && typeof require === "function" && function (m) { module.exports = m(require("lodash"), require("when"), require("./test-context")); }) || function (m) { this.buster.spec = m(this._, this.when, this.buster.testContext); })(function (_, when, testContext) { "use strict"; var current = []; var bspec = {}; var bddNames = { contextSetUp: "beforeAll", contextTearDown: "afterAll" }; function supportRequirement(property) { return function (requirements) { return { describe: function () { var context = bspec.describe.apply(bspec, arguments); context[property] = requirements; return context; } }; }; } bspec.ifAllSupported = supportRequirement("requiresSupportForAll"); bspec.ifAnySupported = supportRequirement("requiresSupportForAny"); bspec.ifSupported = bspec.ifAllSupported; function addContext(parent, name, spec) { var context = bspec.describe.context.create(name, spec, parent).parse(); parent.contexts.push(context); return context; } function createContext(name, spec) { return bspec.describe.context.create(name, spec).parse(); } function asyncContext(name, callback) { var d = when.defer(); callback(function (spec) { d.resolver.resolve(createContext(name, spec)); }); d.promise.name = name; testContext.emit("create", d.promise); return d.promise; } var FOCUS_ROCKET = /^\s*=>\s*/; function markFocused(block, parent) { var focused = block.focused || (parent && parent.forceFocus); block.focused = focused || FOCUS_ROCKET.test(block.name); block.name = block.name.replace(FOCUS_ROCKET, ""); while (parent) { parent.focused = parent.focused || block.focused; parent = parent.parent; } } bspec.describe = function (name, spec) { if (current.length > 0) { return addContext(current[current.length - 1], name, spec); } if (spec && spec.length > 0) { return asyncContext(name, spec); } var context = createContext(name, spec); testContext.emit("create", context); return context; }; function markDeferred(spec, func) { spec.deferred = typeof func !== "function"; if (!spec.deferred && /^\/\//.test(spec.name)) { spec.deferred = true; spec.name = spec.name.replace(/^\/\/\s*/, ""); } spec.comment = spec.deferred ? func : ""; } bspec.it = function (name, func, extra) { var context = current[current.length - 1]; var spec = { name: name, func: arguments.length === 3 ? extra : func, context: context }; markDeferred(spec, func); spec.deferred = spec.deferred || context.deferred; markFocused(spec, context); context.tests.push(spec); return spec; }; bspec.itEventually = function (name, comment, func) { if (typeof comment === "function") { func = comment; comment = ""; } return bspec.it(name, comment, func); }; bspec.before = bspec.beforeEach = function (func) { var context = current[current.length - 1]; context.setUp = func; }; bspec.after = bspec.afterEach = function (func) { var context = current[current.length - 1]; context.tearDown = func; }; bspec.beforeAll = function (func) { var context = current[current.length - 1]; context.contextSetUp = func; }; bspec.afterAll = function (func) { var context = current[current.length - 1]; context.contextTearDown = func; }; function F() {} function create(object) { F.prototype = object; return new F(); } bspec.describe.context = { create: function (name, spec, parent) { if (!name || typeof name !== "string") { throw new Error("Spec name required"); } if (!spec || typeof spec !== "function") { throw new Error("spec should be a function"); } var context = create(this); context.name = name; context.parent = parent; context.spec = spec; markDeferred(context, spec); if (parent) { context.deferred = context.deferred || parent.deferred; } markFocused(context, parent); context.forceFocus = context.focused; return context; }, parse: function () { if (!this.spec) { return this; } this.testCase = { before: bspec.before, beforeEach: bspec.beforeEach, beforeAll: bspec.beforeAll, after: bspec.after, afterEach: bspec.afterEach, afterAll: bspec.afterAll, it: bspec.it, itEventually: bspec.itEventually, describe: bspec.describe, name: function (thing) { return bddNames[thing] || thing; } }; this.tests = []; current.push(this); this.contexts = []; this.spec.call(this.testCase); current.pop(); delete this.spec; return this; } }; var g = (typeof global !== "undefined" && global) || this; bspec.expose = function (env) { env = env || g; env.describe = bspec.describe; env.it = bspec.it; env.itEventually = bspec.itEventually; env.beforeAll = bspec.beforeAll; env.before = bspec.before; env.beforeEach = bspec.beforeEach; env.afterAll = bspec.afterAll; env.after = bspec.after; env.afterEach = bspec.afterEach; }; return bspec; }); ((typeof define === "function" && define.amd && function (m) { define("buster-test/test-case", ["bane", "when", "buster-test/test-context"], m); }) || (typeof module === "object" && typeof require === "function" && function (m) { module.exports = m( require("bane"), require("when"), require("./test-context") ); }) || function (m) { this.buster.testCase = m(this.bane, this.when, this.buster.testContext); })(function (bane, when, testContext) { "use strict"; var xUnitNames = { contextSetUp: "prepare", contextTearDown: "conclude" }; var testCase = function (name, tests) { if (!name || typeof name !== "string") { throw new Error("Test case name required"); } if (!tests || (typeof tests !== "object" && typeof tests !== "function")) { throw new Error("Tests should be an object or a function"); } var context = testCase.context.create(name, tests); var d = when.defer(); when(context).then(function (ctx) { d.resolver.resolve(ctx.parse()); }); var promise = context.then ? d.promise : context; promise.name = name; testContext.emit("create", promise); return promise; }; bane.createEventEmitter(testCase); function nonTestNames(context) { return { prepare: true, conclude: true, setUp: true, tearDown: true, requiresSupportFor: true, requiresSupportForAll: true }; } var DEFERRED_PREFIX = /^\s*\/\/\s*/; var FOCUSED_PREFIX = /^\s*=>\s*/; function createContext(context, name, tests, parent) { context.name = name; context.content = tests; context.parent = parent; context.testCase = { name: function (thing) { return xUnitNames[thing] || thing; } }; return context; } function asyncContext(context, name, callback, parent) { var d = when.defer(); callback(function (tests) { d.resolver.resolve(createContext(context, name, tests, parent)); }); return d.promise; } function F() {} function create(obj) { F.prototype = obj; return new F(); } testCase.context = { create: function (name, tests, parent) { var context = create(this); if (typeof tests === "function") { return asyncContext(context, name, tests, parent); } return createContext(context, name, tests, parent); }, parse: function (forceFocus) { this.getSupportRequirements(); this.deferred = DEFERRED_PREFIX.test(this.name); if (this.parent) { this.deferred = this.deferred || this.parent.deferred; } this.focused = forceFocus || FOCUSED_PREFIX.test(this.name); this.name = this.name. replace(DEFERRED_PREFIX, ""). replace(FOCUSED_PREFIX, ""); this.tests = this.getTests(this.focused); this.contexts = this.getContexts(this.focused); this.focused = this.focused || this.contexts.focused || this.tests.focused; delete this.tests.focused; delete this.contexts.focused; this.contextSetUp = this.getContextSetUp(); this.contextTearDown = this.getContextTearDown(); this.setUp = this.getSetUp(); this.tearDown = this.getTearDown(); return this; }, getSupportRequirements: function () { this.requiresSupportForAll = this.content.requiresSupportForAll || this.content.requiresSupportFor; delete this.content.requiresSupportForAll; delete this.content.requiresSupportFor; this.requiresSupportForAny = this.content.requiresSupportForAny; delete this.content.requiresSupportForAny; }, getTests: function (focused) { var prop, isFunc, tests = []; for (prop in this.content) { isFunc = typeof this.content[prop] === "function"; if (this.isTest(prop)) { var testFocused = focused || FOCUSED_PREFIX.test(prop); tests.focused = tests.focused || testFocused; tests.push({ name: prop.replace(DEFERRED_PREFIX, ""). replace(FOCUSED_PREFIX, ""), func: this.content[prop], context: this, deferred: this.deferred || DEFERRED_PREFIX.test(prop) || !isFunc, focused: testFocused, comment: !isFunc ? this.content[prop] : "" }); } } return tests; }, getContexts: function (focused) { var ctx, prop, contexts = []; contexts.focused = focused; for (prop in this.content) { if (this.isContext(prop)) { ctx = testCase.context.create( prop, this.content[prop], this ); ctx = ctx.parse(focused); contexts.focused = contexts.focused || ctx.focused; contexts.push(ctx); } } return contexts; }, getContextSetUp: function () { return this.content.prepare; }, getContextTearDown: function () { return this.content.conclude; }, getSetUp: function () { return this.content.setUp; }, getTearDown: function () { return this.content.tearDown; }, isTest: function (prop) { var type = typeof this.content[prop]; return this.content.hasOwnProperty(prop) && (type === "function" || type === "string") && !nonTestNames(this)[prop]; }, isContext: function (prop) { return this.content.hasOwnProperty(prop) && typeof this.content[prop] === "object" && !!this.content[prop]; } }; return testCase; }); ((typeof define === "function" && define.amd && function (m) { define("buster-test/test-runner", ["bane", "when", "lodash", "async", "platform"], m); }) || (typeof module === "object" && typeof require === "function" && function (m) { require("./seedrandom"); module.exports = m( require("bane"), require("when"), require("lodash"), require("async"), require("platform"), function (cb) { process.nextTick(cb); }, true ); }) || function (m) { // In case someone overwrites/mocks out timers later on var setTimeout = window.setTimeout; this.buster = this.buster || {}; this.buster.test = this.buster.test || {}; this.buster.test.runner = m( this.bane, this.when, this._, this.async, this.platform ); })(function (bane, when, _, async, platform, nextTick, isNode) { "use strict"; var onUncaught = function () {}; var partial = function (fn) { var args = [].slice.call(arguments, 1); return function () { return fn.apply(this, args.concat([].slice.call(arguments))); }; }; function F() {} function create(obj) { F.prototype = obj; return new F(); } // Events var errorEvents = { "TimeoutError": "test:timeout", "AssertionError": "test:failure", "DeferredTestError": "test:deferred" }; function emit(runner, event, test, err, thisp) { var data = { name: test.name, runtime: runner.runtime }; if (err) { data.error = err; } if (typeof test.func === "string") { data.comment = test.func; } if (thisp) { data.testCase = thisp; } if (event === "test:success") { data.assertions = runner.assertionCount; } runner.emit(event, data); } function emitTestAsync(runner, test) { if (test && !test.async && !test.deferred) { test.async = true; emit(runner, "test:async", test); } } function testResult(runner, test, err) { if (!test) { err.runtime = runner.runtime; return runner.emit("uncaughtException", err); } if (test.complete) { return; } test.complete = true; var event = "test:success"; if (err) { event = errorEvents[err.name] || "test:error"; if (err.name === "TimeoutError") { emitTestAsync(runner, test); } } emit(runner, event, test, err); if (event === "test:error") { runner.results.errors += 1; } if (event === "test:failure") { runner.results.failures += 1; } if (event === "test:timeout") { runner.results.timeouts += 1; } if (event === "test:deferred") { runner.results.deferred += 1; } else { runner.results.assertions += runner.assertionCount; runner.results.tests += 1; } } function emitIfAsync(runner, test, isAsync) { if (isAsync) { emitTestAsync(runner, test); } } function emitUnsupported(runner, context, requirements) { runner.emit("context:unsupported", { runtime: runner.runtime, context: context, unsupported: requirements }); } // Data helper functions function setUps(context) { var setUpFns = []; while (context) { if (context.setUp) { setUpFns.unshift(context.setUp); } context = context.parent; } return setUpFns; } function tearDowns(context) { var tearDownFns = []; while (context) { if (context.tearDown) { tearDownFns.push(context.tearDown); } context = context.parent; } return tearDownFns; } function satiesfiesRequirement(requirement) { if (typeof requirement === "function") { return !!requirement(); } return !!requirement; } function unsatiesfiedRequirements(context) { var name, requirements = context.requiresSupportForAll; for (name in requirements) { if (!satiesfiesRequirement(requirements[name])) { return [name]; } } var unsatiesfied = []; requirements = context.requiresSupportForAny; for (name in requirements) { if (satiesfiesRequirement(requirements[name])) { return []; } else { unsatiesfied.push(name); } } return unsatiesfied; } function isAssertionError(err) { return err && err.name === "AssertionError"; } function prepareResults(results) { return _.extend({}, results, { ok: results.failures + results.errors + results.timeouts === 0 }); } function propWithDefault(obj, prop, defaultValue) { return obj && obj.hasOwnProperty(prop) ? obj[prop] : defaultValue; } // Async flow function promiseSeries(objects, fn) { var deferred = when.defer(); async.series(_.map(objects, function (obj) { return function (next) { var value = fn(obj); value.then(partial(next, null), next); return value; }; }), function (err) { if (err) { return deferred.reject(err); } deferred.resolve(); }); return deferred.promise; } function asyncDone(resolver) { function resolve(method, err) { try { resolver[method](err); } catch (e) { throw new Error("done() was already called"); } } return function (fn) { if (typeof fn !== "function") { return resolve("resolve"); } return function () { try { var retVal = fn.apply(this, arguments); resolve("resolve"); return retVal; } catch (up) { resolve("reject", up); } }; }; } function asyncFunction(fn, thisp) { if (fn.length > 0) { var deferred = when.defer(); fn.call(thisp, asyncDone(deferred.resolver)); return deferred.promise; } return fn.call(thisp); } function timeoutError(ms) { return { name: "TimeoutError", message: "Timed out after " + ms + "ms" }; } function timebox(promise, timeout, callbacks) { var timedout, complete, timer; function handler(method) { return function () { complete = true; clearTimeout(timer); if (!timedout) { callbacks[method].apply(this, arguments); } }; } when(promise).then(handler("resolve"), handler("reject")); var ms = typeof timeout === "function" ? timeout() : timeout; timer = setTimeout(function () { timedout = true; if (!complete) { callbacks.timeout(timeoutError(ms)); } }, ms); } function callAndWait(func, thisp, timeout, next) { var reject = function (err) { next(err || {}); }; var promise = asyncFunction(func, thisp); timebox(promise, timeout, { resolve: partial(next, null), reject: reject, timeout: reject }); return promise; } function callSerially(functions, thisp, timeout, source) { var d = when.defer(); var fns = functions.slice(); var isAsync = false; function next(err) { if (err) { err.source = source; return d.reject(err); } if (fns.length === 0) { return d.resolve(isAsync); } try { var promise = callAndWait(fns.shift(), thisp, timeout, next); isAsync = isAsync || when.isPromise(promise); } catch (e) { return d.reject(e); } } next(); return d.promise; } function asyncWhen(value) { if (when.isPromise(value)) { return value; } else { var d = when.defer(); TestRunner.prototype.nextTick(partial(d.resolve, value)); return d.promise; } } function chainPromises(fn, resolution) { var r = typeof resolution === "function" ? [resolution, resolution] : resolution; return function () { fn().then(partial(resolution, null), r[0], r[1]); }; } function rejected(deferred) { if (!deferred) { deferred = when.defer(); } deferred.reject(); return deferred.promise; } function listenForUncaughtExceptions() { var listener, listening = false; onUncaught = function (l) { listener = l; if (!listening) { listening = true; process.on("uncaughtException", function (e) { if (listener) { listener(e); } }); } }; } if (typeof process === "object") { listenForUncaughtExceptions(); } // Private runner functions function callTestFn(runner, test, thisp, next) { emit(runner, "test:start", test, null, thisp); if (test.deferred) { return next({ name: "DeferredTestError" }); } try { var promise = asyncFunction(test.func, thisp); if (when.isPromise(promise)) { emitTestAsync(runner, test); } timebox(promise, thisp.timeout || runner.timeout, { resolve: function () { // When the promise resolves, it's a success so we don't // want to propagate the resolution value. If we do, Buster // will think the value represents an error, and will fail // the test. return next.apply(this); }, reject: next, timeout: function (err) { err.source = "test function"; next(err); } }); } catch (e) { next(e); } } function checkAssertions(runner, expected) { if (runner.failOnNoAssertions && runner.assertionCount === 0) { return { name: "AssertionError", message: "No assertions!" }; } var actual = runner.assertionCount; if (typeof expected === "number" && actual !== expected) { return { name: "AssertionError", message: "Expected " + expected + " assertions, ran " + actual }; } } function triggerOnCreate(listeners, runner) { _.each(listeners, function (listener) { listener(runner); }); } function initializeResults() { return { contexts: 0, tests: 0, errors: 0, failures: 0, assertions: 0, timeouts: 0, deferred: 0 }; } function focused(items) { return _.filter(items, function (item) { return item.focused; }); } function dynamicTimeout(testCase, runner) { return function () { return testCase.timeout || runner.timeout; }; } // Craaaazy stuff // https://gist.github.com/982883 function uuid(a) { if (a) { return (a ^ Math.random() * 16 >> a/4).toString(16); } return ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, uuid); } function parseRuntime(env) { if (!env) { return null; } var runtime = platform.parse(env); runtime.uuid = uuid(); return runtime; } function countTests(context) { if (!context) { return 0; } if (!_.isArray(context)) { return (context.tests || []).length + countTests(context.contexts); } return _.reduce(context, function (num, ctx) { return num + countTests(ctx); }, 0); } function emitConfiguration(runner, ctxs) { runner.emit("suite:configuration", { runtime: runner.runtime, name: runner.configuration, tests: countTests(ctxs), seed: runner.seed }); } function TestRunner(opt) { triggerOnCreate(TestRunner.prototype.onCreateListeners, this); this.results = initializeResults(); this.runtime = parseRuntime(opt.runtime); this.configuration = opt.configuration; this.clients = 1; this.concurrent = false; if (opt.random === false) { this.randomize = function (coll) { return coll; }; } else { var random = seedRandom(opt.randomSeed); this.seed = random.seed; this.randomize = function (coll) { return coll.sort(function () { return Math.round(random() * 2) - 1; }); }; } this.failOnNoAssertions = propWithDefault( opt, "failOnNoAssertions", false ); if (typeof opt.timeout === "number") { this.timeout = opt.timeout; } } TestRunner.prototype = bane.createEventEmitter({ timeout: 250, onCreateListeners: [], create: function (opt) { return new TestRunner(opt || {}); }, onCreate: function (listener) { this.onCreateListeners.push(listener); }, runSuite: function (ctxs) { this.focusMode = _.some(ctxs, function (c) { return c.focused; }); this.results = initializeResults(); onUncaught(_.bind(function (err) { testResult(this, this.currentTest, err); }, this)); var d = when.defer(); this.emit("suite:start", { runtime: this.runtime }); if (this.runtime) { emitConfiguration(this, ctxs); } if (this.focusMode) { this.emit("runner:focus", { runtime: this.runtime }); } this.results.contexts = ctxs.length; this.runContexts(ctxs).then(_.bind(function () { var res = prepareResults(this.results); res.runtime = this.runtime; this.emit("suite:end", res); d.resolve(res); }, this), d.reject); return d.promise; }, runContexts: function (contexts, thisProto) { var self = this; if (this.focusMode) { contexts = focused(contexts); } return promiseSeries( this.randomize(contexts || []), function (context) { return self.runContext(context, thisProto); } ); }, runContext: function (context, thisProto) { if (!context) { return rejected(); } var reqs = unsatiesfiedRequirements(context); if (reqs.length > 0) { return when(emitUnsupported(this, context, reqs)); } var d = when.defer(), s = this, thisp, ctx; var emitAndResolve = function () { s.emit("context:end", _.extend(context, { runtime: s.runtime })); d.resolve(); }; var end = function (err) { s.runContextUpDown(ctx, "contextTearDown", thisp).then( emitAndResolve, emitAndResolve ); }; this.emit("context:start", _.extend(context, { runtime: this.runtime })); asyncWhen(context).then(function (c) { ctx = c; thisp = create(thisProto || c.testCase); var fns = s.randomize(c.tests); var runTests = chainPromises( _.bind(s.runTests, s, fns, setUps(c), tearDowns(c), thisp), end ); s.runContextUpDown(ctx, "contextSetUp", thisp).then( function () { s.runContexts(c.contexts, thisp).then(runTests); }, end ); }); return d; }, runContextUpDown: function (context, prop, thisp) { var fn = context[prop]; if (!fn) { return when(); } var d = when.defer(); var s = this; var reject = function (err) { err = err || new Error(); err.message = context.name + " " + thisp.name(prop) + "(n) " + (/Timeout/.test(err.name) ? "timed out" : "failed") + ": " + err.message; err.runtime = s.runtime; s.emit("uncaughtException", err); d.reject(err); }; try { var timeout = dynamicTimeout(thisp, this); timebox(asyncFunction(fn, thisp), timeout, { resolve: d.resolve, reject: reject, timeout: reject }); } catch (e) { reject(e); } return d.promise; }, callSetUps: function (test, setUps, thisp) { if (test.deferred) { return when(); } emit(this, "test:setUp", test, null, thisp); var timeout = dynamicTimeout(thisp, this); var emitAsync = partial(emitIfAsync, this, test); return callSerially(setUps, thisp, timeout, "setUp").then( emitAsync ); }, callTearDowns: function (test, tearDowns, thisp) { if (test.deferred) { return when(); } emit(this, "test:tearDown", test, null, thisp); var timeout = dynamicTimeout(thisp, this); var emitAsync = partial(emitIfAsync, this, test); return callSerially(tearDowns, thisp, timeout, "tearDown").then( emitAsync ); }, runTests: function (tests, setUps, tearDowns, thisp) { if (this.focusMode) { tests = focused(tests); } return promiseSeries(tests, _.bind(function (test) { return this.runTest(test, setUps, tearDowns, create(thisp)); }, this)); }, runTest: function (test, setUps, tearDowns, thisp) { this.running = true; var d = when.defer(); test = create(test); this.assertionCount = 0; this.currentTest = test; var callSetUps = _.bind(this.callSetUps, this, test, setUps, thisp); var callTearDowns = _.bind( this.callTearDowns, this, test, tearDowns, thisp ); var callTest = partial(callTestFn, this, test, thisp); var tearDownEmitResolve = _.bind(function (err) { var resolution = _.bind(function (err2) { var e = err || err2 || this.queued; this.running = false; this.queued = null; e = e || checkAssertions(this, thisp.expectedAssertions); testResult(this, test, e); delete this.currentTest; d.resolve(); }, this); callTearDowns().then(partial(resolution, null), resolution); }, this); var callTestAndTearDowns = partial(callTest, tearDownEmitResolve); callSetUps().then(callTestAndTearDowns, tearDownEmitResolve); return d.promise; }, assertionPass: function () { this.assertionCount += 1; }, error: function (error, test) { if (this.running) { if (!this.queued) { this.queued = error; } return; } testResult(this, test || this.currentTest, error); }, // To be removed assertionFailure: function (error) { this.error(error); } }); TestRunner.prototype.nextTick = nextTick || function (cb) { setTimeout(cb, 0); }; return TestRunner.prototype; }); ((typeof define === "function" && define.amd && function (m) { define("buster-test/reporters/runtime-throttler", ["bane", "lodash"], m); }) || (typeof module === "object" && typeof require === "function" && function (m) { module.exports = m(require("bane"), require("lodash")); }) || function (m) { this.buster = this.buster || {}; this.buster.reporters = this.buster.reporters || {}; this.buster.reporters.runtimeThrottler = m(this.bane, this._); } )(function (bane, _) { "use strict"; function runtime(env) { return { uuid: env.uuid, contexts: 0, events: [], queue: function (name, data) { this.events.push({ name: name, data: data }); }, flush: function (emitter) { _.forEach(this.events, function (event) { emitter.emit(event.name, event.data); }); } }; } function getRuntime(runtimes, env) { return runtimes.filter(function (r) { return r.uuid === env.uuid; })[0]; } function proxy(name) { return function (e) { var rt = getRuntime(this.runtimes, e.runtime); if (rt && rt.contexts > 0) { rt.queue(name, e); } else { this.emit(name, e); } }; } function RuntimeThrottler() { this.runtimes = []; this.results = []; } RuntimeThrottler.prototype = bane.createEventEmitter({ create: function () { return new RuntimeThrottler(); }, listen: function (runner) { runner.bind(this); if (runner.console) { runner.console.on("log", this.log, this); } return this; }, "suite:start": function (e) { if (this.runtimes.length === 0) { this.emit("suite:start", {}); } this.runtimes.push(runtime(e.runtime)); }, "suite:configuration": function (e) { this.emit("suite:configuration", e); }, "context:unsupported": function (e) { var rt = getRuntime(this.runtimes, e.runtime); if (rt.contexts === 0) { this.emit("context:unsupported", e); } else { rt.queue("context:unsupported", e); } }, "context:start": function (e) { var rt = getRuntime(this.runtimes, e.runtime); if (this.runtimes.length > 1) { rt.queue("context:start", e); rt.contexts += 1; } else { this.emit("context:start", e); } }, "test:setUp": proxy("test:setUp"), "test:tearDown": proxy("test:tearDown"), "test:start": proxy("test:start"), "test:error": proxy("test:error"), "test:failure": proxy("test:failure"), "test:timeout": proxy("test:timeout"), "test:success": proxy("test:success"), "test:async": proxy("test:async"), "test:deferred": proxy("test:deferred"), "context:end": function (e) { var rt = getRuntime(this.runtimes, e.runtime); if (rt) { rt.queue("context:end", e); rt.contexts -= 1; if (rt.contexts <= 0) { rt.contexts = 0; rt.flush(this); } } else { this.emit("context:end", e); } }, "suite:end": function (e) { this.results.push(e); if (this.results.length === this.runtimes.length || this.runtimes.length === 0) { this.emit("suite:end", _.reduce(this.results, function (res, r) { return { contexts: (res.contexts || 0) + r.contexts, tests: (res.tests || 0) + r.tests, errors: (res.errors || 0) + r.errors, failures: (res.failures || 0) + r.failures, assertions: (res.assertions || 0) + r.assertions, timeouts: (res.timeouts || 0) + r.timeouts, deferred: (res.deferred || 0) + r.deferred, ok: res.ok && r.ok }; }, { ok: true })); } }, "runner:focus": function () { if (!this.runnerFocus) { this.emit("runner:focus"); this.runnerFocus = true; } }, uncaughtException: function (e) { this.emit("uncaughtException", e); }, log: function (e) { this.emit("log", e); } }); return RuntimeThrottler.prototype; }); ((typeof define === "function" && define.amd && function (m) { define("buster-test/reporters/html", ["buster-test/reporters/runtime-throttler"], m); }) || (typeof module === "object" && typeof require === "function" && function (m) { try { var jsdom = require("jsdom").jsdom; } catch (e) { // Is handled when someone actually tries using the HTML reporter // on node without jsdom } module.exports = m(require("./runtime-throttler"), jsdom, true); }) || function (m) { this.buster = this.buster || {}; this.buster.reporters = this.buster.reporters || {}; this.buster.reporters.html = m(this.buster.reporters.runtimeThrottler); } )(function (runtimeThrottler, jsdom, isNodeJS) { "use strict"; function filterStack(reporter, stack) { if (!stack) { return []; } if (reporter.stackFilter) { return reporter.stackFilter.filter(stack); } return stack.split("\n"); } function getDoc(options) { return options && options.document || (typeof document != "undefined" ? document : createDocument()); } function addCSS(head, cssPath) { if (isNodeJS) { var fs = require("fs"); var path = require("path"); head.appendChild(el(head.ownerDocument, "style", { type: "text/css", innerHTML: fs.readFileSync( path.join(__dirname, "../../resources/buster-test.css") ) })); } else { head.appendChild(el(document, "link", { rel: "stylesheet", type: "text/css", media: "all", href: cssPath })); } } function insertTitle(doc, body, title) { if (doc.getElementsByTagName("h1").length == 0) { body.insertBefore(el(doc, "h1", { innerHTML: "<span class=\"title\">" + title + "</span>" }), body.firstChild); } } function insertLogo(h1) { h1.innerHTML = "<span class=\"buster-logo\"></span>" + h1.innerHTML; } function createDocument() { if (!jsdom) { util.puts("Unable to load jsdom, html reporter will not work " + "for node runs. Spectacular fail coming up."); } var dom = jsdom("<!DOCTYPE html><html><head></head><body></body></html>"); return dom.createWindow().document; } function pluralize(num, phrase) { num = typeof num == "undefined" ? 0 : num; return num + " " + (num == 1 ? phrase : phrase + "s"); } function el(doc, tagName, properties) { var el = doc.createElement(tagName), value; for (var prop in properties) { value = properties[prop]; if (prop == "http-equiv") { el.setAttribute(prop, value); } if (prop == "text") { prop = "innerHTML"; } el[prop] = value; } return el; } function addListItem(tagName, test, className) { var prefix = tagName ? "<" + tagName + ">" : ""; var suffix = tagName ? "</" + tagName + ">" : ""; var item = el(this.doc, "li", { className: className, text: prefix + test.name + suffix }); this.list().appendChild(item); return item; } function addException(reporter, li, error) { if (!error) { return; } var name = error.name == "AssertionError" ? "" : error.name + ": "; li.appendChild(el(li.ownerDocument || document, "p", { innerHTML: name + error.message, className: "error-message" })); var stack = filterStack(reporter, error.stack); if (stack.length > 0) { if (stack[0].indexOf(error.message) >= 0) { stack.shift(); } li.appendChild(el(li.ownerDocument || document, "ul", { className: "stack", innerHTML: "<li>" + stack.join("</li><li>") + "</li>" })); } } function busterTestPath(document) { var scripts = document.getElementsByTagName("script"); for (var i = 0, l = scripts.length; i < l; ++i) { if (/buster-test\.js$/.test(scripts[i].src)) { return scripts[i].src.replace("buster-test.js", ""); } } return ""; } function getOutputStream(opt) { if (opt.outputStream) { return opt.outputStream; } if (isNodeJS) { var util = require("util"); return { write: function (bytes) { util.print(bytes); } }; } } function HtmlReporter(opt) { opt = opt || {}; this._listStack = []; this.doc = getDoc(opt); var cssPath = opt.cssPath; if (!cssPath && opt.detectCssPath !== false) { cssPath = busterTestPath(this.doc) + "buster-test.css"; } this.setRoot(opt.root || this.doc.body, cssPath); this.out = getOutputStream(opt); this.stackFilter = opt.stackFilter; } HtmlReporter.prototype = { create: function (opt) { return new HtmlReporter(opt); }, setRoot: function (root, cssPath) { this.root = root; this.root.className += " buster-test"; var body = this.doc.body; if (this.root == body) { var head = this.doc.getElementsByTagName("head")[0]; head.parentNode.className += " buster-test"; head.appendChild(el(this.doc, "meta", { "name": "viewport", "content": "width=device-width, initial-scale=1.0" })); head.appendChild(el(this.doc, "meta", { "http-equiv": "Content-Type", "content": "text/html; charset=utf-8" })); if (cssPath) addCSS(head, cssPath); insertTitle(this.doc, body, this.doc.title || "Buster.JS Test case"); insertLogo(this.doc.getElementsByTagName("h1")[0]); } }, listen: function (runner) { var proxy = runtimeThrottler.create(); proxy.listen(runner).bind(this); if (runner.console) { runner.console.on("log", this.log, this); } return this; }, "context:start": function (context) { var container = this.root; if (this._list) { container = el(this.doc, "li"); this._list.appendChild(container); } container.appendChild(el(this.doc, "h2", { text: context.name })); this._list = el(this.doc, "ul"); container.appendChild(this._list); this._listStack.unshift(this._list); }, "context:end": function (context) { this._listStack.shift(); this._list = this._listStack[0]; }, "test:success": function (test) { var li = addListItem.call(this, "h3", test, "success"); this.addMessages(li); }, "test:failure": function (test) { var li = addListItem.call(this, "h3", test, "failure"); this.addMessages(li); addException(this, li, test.error); }, "test:error": function (test) { var li = addListItem.call(this, "h3", test, "error"); this.addMessages(li); addException(this, li, test.error); }, "test:deferred": function (test) { var li = addListItem.call(this, "h3", test, "deferred"); }, "test:timeout": function (test) { var li = addListItem.call(this, "h3", test, "timeout"); var source = test.error && test.error.source; if (source) { li.firstChild.innerHTML += " (" + source + " timed out)"; } this.addMessages(li); }, log: function (msg) { this.messages = this.messages || []; this.messages.push(msg); }, addMessages: function (li) { var messages = this.messages || []; var html = ""; if (messages.length == 0) { return; } for (var i = 0, l = messages.length; i < l; ++i) { html += "<li class=\"" + messages[i].level + "\">"; html += messages[i].message + "</li>"; } li.appendChild(el(this.doc, "ul", { className: "messages", innerHTML: html })); this.messages = []; }, success: function (stats) { return stats.failures == 0 && stats.errors == 0 && stats.tests > 0 && stats.assertions > 0; }, startTimer: function () { this.startedAt = new Date(); }, "suite:end": function (stats) { var diff = (new Date() - this.startedAt) / 1000; var className = "stats " + (this.success(stats) ? "success" : "failure"); var statsEl = el(this.doc, "div", { className: className }); var h1 = this.doc.getElementsByTagName("h1")[0]; this.root.insertBefore(statsEl, h1.nextSibling); statsEl.appendChild(el(this.doc, "h2", { text: this.success(stats) ? "Tests OK" : "Test failures!" })); var html = ""; html += "<li>" + pluralize(stats.contexts, "test case") + "</li>"; html += "<li>" + pluralize(stats.tests, "test") + "</li>"; html += "<li>" + pluralize(stats.assertions, "assertion") + "</li>"; html += "<li>" + pluralize(stats.failures, "failure") + "</li>"; html += "<li>" + pluralize(stats.errors, "error") + "</li>"; html += "<li>" + pluralize(stats.timeouts, "timeout") + "</li>"; if (stats.deferred > 0) { html += "<li>" + stats.deferred + " deferred</li>"; } statsEl.appendChild(el(this.doc, "ul", { innerHTML: html })); statsEl.appendChild(el(this.doc, "p", { className: "time", innerHTML: "Finished in " + diff + "s" })); this.writeIO(); }, list: function () { if (!this._list) { this._list = el(this.doc, "ul", { className: "test-results" }); this._listStack.unshift(this._list); this.root.appendChild(this._list); } return this._list; }, writeIO: function () { if (!this.out) { return; } this.out.write(this.doc.doctype.toString()); this.out.write(this.doc.innerHTML); } }; return HtmlReporter.prototype; }); if (typeof module === "object" && typeof require === "function") { module.exports = { specification: require("./reporters/specification"), jsonProxy: require("./reporters/json-proxy"), xml: require("./reporters/xml"), tap: require("./reporters/tap"), brief: require("./reporters/brief"), html: require("./reporters/html"), teamcity: require("./reporters/teamcity"), load: function (reporter) { if (module.exports[reporter]) { return module.exports[reporter]; } return require(reporter); } }; module.exports.defaultReporter = module.exports.brief; } else if (typeof define === "function") { define("buster-test/reporters", ["buster-test/reporters/html"], function (html) { var reporters = { html: html, load: function (reporter) { return reporters[reporter]; } }; reporters.defaultReporter = reporters.brief; return reporters; }); } else { buster.reporters = buster.reporters || {}; buster.reporters.defaultReporter = buster.reporters.brief; buster.reporters.load = function (reporter) { return buster.reporters[reporter]; }; } ((typeof define === "function" && define.amd && function (m) { define("buster-test/auto-run", ["lodash", "buster-test/test-context", "buster-test/test-runner", "buster-test/reporters"], m); }) || (typeof module === "object" && typeof require === "function" && function (m) { module.exports = m( require("lodash"), require("./test-context"), require("./test-runner"), require("./reporters") ); }) || function (m) { this.buster.autoRun = m( this._, this.buster.testContext, this.buster.testRunner, this.buster.reporters ); })(function (_, testContext, testRunner, reporters) { "use strict"; function browserEnv() { var env = {}; var key, value, pieces, params = window.location.search.slice(1).split("&"); for (var i = 0, l = params.length; i < l; ++i) { pieces = params[i].split("="); key = pieces.shift(); value = pieces.join("=") || "1"; if (key) { key = "BUSTER_" + key.match(/(^|[A-Z])[a-z]+/g).join("_").toUpperCase(); env[key] = value; } } return env; } function env() { if (typeof process !== "undefined") { return process.env; } if (typeof window === "undefined") { return {}; } return browserEnv(); } function autoRun(opt, callbacks) { var runners = 0, contexts = [], timer; testRunner.onCreate(function (runner) { runners += 1; }); if (typeof opt === "function") { callbacks = opt; opt = {}; } if (typeof callbacks !== "object") { callbacks = { end: callbacks }; } return function (tc) { contexts.push(tc); clearTimeout(timer); timer = setTimeout(function () { if (runners === 0) { opt = _.extend(autoRun.envOptions(env()), opt); autoRun.run(contexts, opt, callbacks); } }, 10); }; } autoRun.envOptions = function (env) { return { reporter: env.BUSTER_REPORTER, filters: (env.BUSTER_FILTERS || "").split(","), color: env.BUSTER_COLOR === "false" ? false : true, bright: env.BUSTER_BRIGHT === "false" ? false : true, timeout: env.BUSTER_TIMEOUT && parseInt(env.BUSTER_TIMEOUT, 10), failOnNoAssertions: env.BUSTER_FAIL_ON_NO_ASSERTIONS === "false" ? false : true, random: env.BUSTER_RANDOM === "0" || env.BUSTER_RANDOM === "false" ? false : true, randomSeed: env.BUSTER_RANDOM_SEED }; }; function initializeReporter(runner, opt) { var reporter; if (typeof document !== "undefined" && document.getElementById) { reporter = "html"; opt.root = document.getElementById("buster") || document.body; } else { reporter = opt.reporter || "brief"; } reporter = reporters.load(reporter).create(opt); reporter.listen(runner); if (typeof reporter.log === "function" && typeof buster === "object" && typeof buster.console === "function") { buster.console.on("log", reporter.log, reporter); } } function ua() { if (typeof navigator !== "undefined") { return navigator.userAgent; } return [process.title, process.version + ",", process.platform, process.arch].join(" "); } autoRun.run = function (contexts, opt, callbacks) { callbacks = callbacks || {}; if (contexts.length === 0) { return; } opt = _.extend({ color: true, bright: true }, opt); var runner = testRunner.create(_.extend({ timeout: 750, failOnNoAssertions: false, runtime: ua(), random: typeof opt.random === "boolean" ? opt.random : true, randomSeed: opt.randomSeed }, opt)); if (typeof callbacks.start === "function") { callbacks.start(runner); } initializeReporter(runner, opt); if (typeof callbacks.end === "function") { runner.on("suite:end", callbacks.end); } runner.runSuite(testContext.compile(contexts, opt.filters)); }; return autoRun; }); ((typeof define === "function" && define.amd && function (m) { define("referee-sinon", m); }) || (typeof module === "object" && typeof require === "function" && function (m) { module.exports = m(); }) || function (m) { this.refereeSinon = m(); } )(function () { return function (referee, sinon) { sinon.expectation.pass = function (assertion) { referee.emit("pass", assertion); }; sinon.expectation.fail = function (message) { referee.fail(message); }; // Lazy bind the format method to referee's. This way, Sinon will // always format objects like referee does, even if referee is configured // after referee-sinon is loaded sinon.format = function () { return referee.format.apply(referee, arguments); }; function verifyFakes() { var method, isNot, i, l; for (i = 0, l = arguments.length; i < l; ++i) { method = arguments[i]; isNot = (method || "fake") + " is not "; if (!method) { this.fail(isNot + "a spy"); } if (typeof method !== "function") { this.fail(isNot + "a function"); } if (typeof method.getCall !== "function") { this.fail(isNot + "stubbed"); } } return true; } var sf = sinon.spy.formatters; var spyValues = function (spy) { return [spy, sf.c(spy), sf.C(spy)]; }; referee.add("called", { assert: function (spy) { verifyFakes.call(this, spy); return spy.called; }, assertMessage: "Expected ${0} to be called at least once but was " + "never called", refuteMessage: "Expected ${0} to not be called but was called ${1}${2}", expectation: "toHaveBeenCalled", values: spyValues }); function slice(arr, from, to) { return [].slice.call(arr, from, to); } referee.add("callOrder", { assert: function (spy) { var type = Object.prototype.toString.call(spy); var isArray = type === "[object Array]"; var args = isArray ? spy : arguments; verifyFakes.apply(this, args); if (sinon.calledInOrder(args)) { return true; } this.expected = [].join.call(args, ", "); this.actual = sinon.orderByFirstCall(slice(args)).join(", "); }, assertMessage: "Expected ${expected} to be called in order but " + "were called as ${actual}", refuteMessage: "Expected ${expected} not to be called in order" }); function addCallCountAssertion(count) { var c = count.toLowerCase(); referee.add("called" + count, { assert: function (spy) { verifyFakes.call(this, spy); return spy["called" + count]; }, assertMessage: "Expected ${0} to be called " + c + " but was called ${1}${2}", refuteMessage: "Expected ${0} to not be called exactly " + c + "${2}", expectation: "toHaveBeenCalled" + count, values: spyValues }); } addCallCountAssertion("Once"); addCallCountAssertion("Twice"); addCallCountAssertion("Thrice"); function valuesWithThis(spy, thisObj) { return [spy, thisObj, (spy.printf && spy.printf("%t")) || ""]; } referee.add("calledOn", { assert: function (spy, thisObj) { verifyFakes.call(this, spy); return spy.calledOn(thisObj); }, assertMessage: "Expected ${0} to be called with ${1} as this but was " + "called on ${2}", refuteMessage: "Expected ${0} not to be called with ${1} as this", expectation: "toHaveBeenCalledOn", values: valuesWithThis }); referee.add("alwaysCalledOn", { assert: function (spy, thisObj) { verifyFakes.call(this, spy); return spy.alwaysCalledOn(thisObj); }, assertMessage: "Expected ${0} to always be called with ${1} as this " + "but was called on ${2}", refuteMessage: "Expected ${0} not to always be called with ${1} " + "as this", expectation: "toHaveAlwaysBeenCalledOn", values: valuesWithThis }); function formattedArgs(args, i) { var l, result; for (l = args.length, result = []; i < l; ++i) { result.push(sinon.format(args[i])); } return result.join(", "); } function spyAndCalls(spy) { return [ spy, formattedArgs(arguments, 1), spy.printf && spy.printf("%C") ]; } referee.add("calledWith", { assert: function (spy) { verifyFakes.call(this, spy); return spy.calledWith.apply(spy, slice(arguments, 1)); }, assertMessage: "Expected ${0} to be called with arguments ${1}${2}", refuteMessage: "Expected ${0} not to be called with arguments ${1}${2}", expectation: "toHaveBeenCalledWith", values: spyAndCalls }); referee.add("alwaysCalledWith", { assert: function (spy) { verifyFakes.call(this, spy); return spy.alwaysCalledWith.apply(spy, slice(arguments, 1)); }, assertMessage: "Expected ${0} to always be called with " + "arguments ${1}${2}", refuteMessage: "Expected ${0} not to always be called with " + "arguments${1}${2}", expectation: "toHaveAlwaysBeenCalledWith", values: spyAndCalls }); referee.add("calledOnceWith", { assert: function (spy) { verifyFakes.call(this, spy); return spy.calledOnce && spy.calledWith.apply(spy, slice(arguments, 1)); }, assertMessage: "Expected ${0} to be called once with " + "arguments ${1}${2}", refuteMessage: "Expected ${0} not to be called once with " + "arguments ${1}${2}", expectation: "toHaveBeenCalledOnceWith", values: spyAndCalls }); referee.add("calledWithExactly", { assert: function (spy) { verifyFakes.call(this, spy); return spy.calledWithExactly.apply(spy, slice(arguments, 1)); }, assertMessage: "Expected ${0} to be called with exact " + "arguments ${1}${2}", refuteMessage: "Expected ${0} not to be called with exact " + "arguments${1}${2}", expectation: "toHaveBeenCalledWithExactly", values: spyAndCalls }); referee.add("alwaysCalledWithExactly", { assert: function (spy) { verifyFakes.call(this, spy); return spy.alwaysCalledWithExactly.apply(spy, slice(arguments, 1)); }, assertMessage: "Expected ${0} to always be called with exact " + "arguments ${1}${2}", refuteMessage: "Expected ${0} not to always be called with exact " + "arguments${1}${2}", expectation: "toHaveAlwaysBeenCalledWithExactly", values: spyAndCalls }); referee.add("calledWithMatch", { assert: function (spy) { verifyFakes.call(this, spy); return spy.calledWithMatch.apply(spy, slice(arguments, 1)); }, assertMessage: "Expected ${0} to be called with matching " + "arguments ${1}${2}", refuteMessage: "Expected ${0} not to be called with matching " + "arguments${1}${2}", expectation: "toHaveAlwaysBeenCalledWithExactly", values: spyAndCalls }); referee.add("alwaysCalledWithMatch", { assert: function (spy) { verifyFakes.call(this, spy); return spy.alwaysCalledWithMatch.apply(spy, slice(arguments, 1)); }, assertMessage: "Expected ${0} to always be called with matching " + "arguments ${1}${2}", refuteMessage: "Expected ${0} not to always be called with matching " + "arguments${1}${2}", expectation: "toHaveAlwaysBeenCalledWithExactly", values: spyAndCalls }); function spyAndException(spy, exception) { return [spy, spy.printf && spy.printf("%C")]; } referee.add("threw", { assert: function (spy) { verifyFakes.call(this, spy); return spy.threw(arguments[1]); }, assertMessage: "Expected ${0} to throw an exception${1}", refuteMessage: "Expected ${0} not to throw an exception${1}", expectation: "toHaveThrown", values: spyAndException }); referee.add("alwaysThrew", { assert: function (spy) { verifyFakes.call(this, spy); return spy.alwaysThrew(arguments[1]); }, assertMessage: "Expected ${0} to always throw an exception${1}", refuteMessage: "Expected ${0} not to always throw an exception${1}", expectation: "toAlwaysHaveThrown", values: spyAndException }); }; }); ((typeof define === "function" && define.amd && function (m) { define("buster-sinon", m); }) || (typeof module === "object" && typeof require === "function" && function (m) { module.exports = m(); }) || function (m) { this.busterSinon = m(); } )(function () { return function (sinon, bt, stackFilter, formatio) { if (stackFilter) { stackFilter.filters.push("lib/sinon"); } bt.testRunner.onCreate(function (runner) { runner.on("test:setUp", function (test) { var config = sinon.getConfig(sinon.config); config.useFakeServer = false; var sandbox = sinon.sandbox.create(); sandbox.inject(test.testCase); test.testCase.useFakeTimers = function () { return sandbox.useFakeTimers.apply(sandbox, arguments); }; test.testCase.useFakeServer = function () { return sandbox.useFakeServer.apply(sandbox, arguments); }; test.testCase.sandbox = sandbox; var testFunc = test.func; }); runner.on("test:tearDown", function (test) { try { test.testCase.sandbox.verifyAndRestore(); } catch (e) { runner.assertionFailure(e); } }); sinon.expectation.pass = function () { runner.assertionPass(); }; }); }; }); (function (glbl, buster, sinon) { if (typeof require == "function" && typeof module == "object") { var busterTest = require("buster-test"); var path = require("path"); var fs = require("fs"); var referee = require("referee"); var stackFilter = require("stack-filter"); sinon = require("sinon"); buster = module.exports = { testCase: busterTest.testCase, spec: busterTest.spec, testRunner: busterTest.testRunner, testContext: busterTest.testContext, reporters: busterTest.reporters, autoRun: busterTest.autoRun, referee: referee, assertions: referee, formatio: require("formatio"), eventedLogger: require("evented-logger"), frameworkExtension: require("./framework-extension"), wiringExtension: require("./wiring-extension"), sinon: require("buster-sinon"), refereeSinon: require("referee-sinon") }; Object.defineProperty(buster, "VERSION", { get: function () { if (!this.version) { var pkgJSON = path.resolve(__dirname, "..", "package.json"); var pkg = JSON.parse(fs.readFileSync(pkgJSON, "utf8")); this.version = pkg.version; } return this.version; } }); } var logFormatter = buster.formatio.configure({ quoteStrings: false }); var asciiFormat = function () { return logFormatter.ascii.apply(logFormatter, arguments); }; if (asciiFormat) { buster.console = buster.eventedLogger.create({ formatter: asciiFormat, logFunctions: true }); } buster.log = function () { return buster.console.log.apply(buster.console, arguments); }; buster.captureConsole = function () { glbl.console = buster.console; if (glbl.console !== buster.console) { glbl.console.log = buster.log; } }; if (asciiFormat) { buster.referee.format = asciiFormat; } buster.assert = buster.referee.assert; buster.refute = buster.referee.refute; buster.expect = buster.referee.expect; if (Object.defineProperty) { Object.defineProperty(buster, "assertions", { get: function () { console.log("buster.assertions is provided for backwards compatibility. Please update your code to use buster.referee"); return buster.referee; } }); Object.defineProperty(buster, "format", { get: function () { console.log("buster.format is provided for backwards compatibility. Please update your code to use buster.formatio"); return buster.formatio; } }); } else { buster.assertions = buster.referee; buster.format = buster.formatio; } buster.testRunner.onCreate(function (runner) { buster.referee.on("pass", function () { runner.assertionPass(); }); buster.referee.on("failure", function (err) { runner.assertionFailure(err); }); runner.on("test:async", function () { buster.referee.throwOnFailure = false; }); runner.on("test:setUp", function () { buster.referee.throwOnFailure = true; }); runner.on("context:start", function (context) { if (context.testCase) { context.testCase.log = buster.log; } }); }); var sf = typeof stackFilter !== "undefined" && stackFilter; buster.sinon(sinon, buster, sf, logFormatter); buster.refereeSinon(buster.referee, sinon); }(typeof global != "undefined" ? global : this, typeof buster == "object" ? buster : null, typeof sinon == "object" ? sinon : null)); (function (B) { B.env = B.env || {}; // Globally uncaught errors will be emitted as messages through // the test runner function uncaughtErrors(runner) { window.onerror = function (message, url, line) { if (arguments.length === 3) { var cp = B.env.contextPath || window.location; var index = (url || "").indexOf(cp); if (index >= 0) { url = "." + url.slice(index + cp.length); } if (line === 1 && message === "Error loading script") { message = "Unable to load script " + url; } else { message = url + ":" + line + " " + message; } } runner.emit("uncaughtException", { name: "UncaughtError", message: message }); return true; }; } // Emit messages from the evented logger buster.console through // the test runner function logger(runner) { B.console.on("log", function (msg) { runner.emit("log", msg); }); } // Collect test cases and specs created with buster.testCase // and buster.spec.describe function testContexts() { var contexts = []; B.addTestContext = function (context) { contexts.push(context); }; B.testContext.on("create", B.addTestContext); return contexts; } // Clear scripts and use the browserEnv object from buster-test to // reset the document between tests runs function documentState(runner) { var scripts = document.getElementsByTagName("script"), script; while ((script = scripts[0])) { script.parentNode.removeChild(script); } var env = B.browserEnv.create(document.body); env.listen(runner); } function shouldAutoRun(config) { var autoRunPropertyIsSet = config.hasOwnProperty("autoRun"); return config.autoRun || !autoRunPropertyIsSet; } function shouldResetDoc(config) { var resetDocumentPropertyIsSet = config.hasOwnProperty("resetDocument"); return config.resetDocument || !resetDocumentPropertyIsSet; } // Wire up the test runner. It will start running tests when // the environment is ready and when we've been told to run. // Note that run() and ready() may occur in any order, and // we cannot do anything until both have happened. // // When running tests with buster-server, we'll be ready() when // the server sends the "tests:run" message. This message is sent // by the server when it receives the "loaded all scripts" message // from the browser. We'll usually run as soon as we're ready. // However, if the autoRun option is false, we will not run // until buster.run() is explicitly called. // // For static browser runs, the environment is ready() when // ready() is called, which happens after all files have been // loaded in the browser. Tests will run immediately for autoRun: // true, and on run() otherwise. // function testRunner(runner) { var ctxts = B.wire.testContexts(); var ready, started, alreadyRunning, config; function attemptRun() { if (!ready || !started || alreadyRunning) { return; } alreadyRunning = true; if (typeof runner === "function") { runner = runner(); } if (shouldResetDoc(config)) { B.wire.documentState(runner); } if (config.captureConsole) { B.captureConsole(); } for (var prop in config) { runner[prop] = config[prop]; } runner.runSuite(B.testContext.compile(ctxts, config.filters)); } return { ready: function (options) { config = options || {}; ready = true; started = started || shouldAutoRun(config); attemptRun(); }, run: function () { started = true; attemptRun(); } }; } B.wire = function (testRunner) { var wiring = B.wire.testRunner(testRunner); B.ready = wiring.ready; B.run = wiring.run; return wiring; }; B.wire.uncaughtErrors = uncaughtErrors; B.wire.logger = logger; B.wire.testContexts = testContexts; B.wire.documentState = documentState; B.wire.testRunner = testRunner; }(buster)); // TMP Performance fix (function () { var i = 0; buster.nextTick = function (cb) { i += 1; if (i === 10) { setTimeout(function () { cb(); }, 0); i = 0; } else { cb(); } }; }()); buster.sinon = sinon; delete this.sinon; delete this.define; delete this.when; delete this.async; delete this.platform; delete this._; if (typeof module === "object" && typeof require === "function") { var buster = module.exports = require("./buster/buster-wiring"); } (function (glbl) { var tc = buster.testContext; if (tc.listeners && (tc.listeners.create || []).length > 0) { return; } tc.on("create", buster.autoRun({ cwd: typeof process != "undefined" ? process.cwd() : null })); }(typeof global != "undefined" ? global : this));<|fim▁end|>
proxyCall.callId = id;
<|file_name|>Migrate.js<|end_file_name|><|fim▁begin|>/* ************************************************************************ qooxdoo - the new era of web development http://qooxdoo.org Copyright: 2017 Christian Boulanger License: MIT: https://opensource.org/licenses/MIT See the LICENSE file in the project's top-level directory for details. Authors: * Christian Boulanger ([email protected], @cboulanger) ************************************************************************ */ const process = require("process"); const path = require("upath"); const semver = require("semver"); const fs = qx.tool.utils.Promisify.fs; /** * Installs a package */ qx.Class.define("qx.tool.cli.commands.package.Migrate", { extend: qx.tool.cli.commands.Package, statics: { /** * Flag to prevent recursive call to process() */ migrationInProcess: false, /** * Return the Yargs configuration object * @return {{}} */ getYargsCommand: function() { return { command: "migrate", describe: "migrates the package system to a newer version.", builder: { "verbose": { alias: "v", describe: "Verbose logging" }, "quiet": { alias: "q", describe: "No output" } } }; } }, members: { /** * Announces or applies a migration * @param {Boolean} announceOnly If true, announce the migration without * applying it. */ process: async function(announceOnly=false) { const self = qx.tool.cli.commands.package.Migrate; if (self.migrationInProcess) { return; } self.migrationInProcess = true; let needFix = false; // do not call this.base(arguments) here! let pkg = qx.tool.cli.commands.Package; let cwd = process.cwd(); let migrateFiles = [ [ path.join(cwd, pkg.lockfile.filename), path.join(cwd, pkg.lockfile.legacy_filename) ], [ path.join(cwd, pkg.cache_dir), path.join(cwd, pkg.legacy_cache_dir) ], [ path.join(qx.tool.cli.ConfigDb.getDirectory(), pkg.package_cache_name), path.join(qx.tool.cli.ConfigDb.getDirectory(), pkg.legacy_package_cache_name) ] ]; if (this.checkFilesToRename(migrateFiles).length) { let replaceInFiles = [{ files: path.join(cwd, ".gitignore"), from: pkg.legacy_cache_dir + "/", to: pkg.cache_dir + "/" }];<|fim▁hole|> await this.migrate(migrateFiles, replaceInFiles, announceOnly); if (announceOnly) { needFix = true; } else { if (!this.argv.quiet) { qx.tool.compiler.Console.info("Fixing path names in the lockfile..."); } this.argv.reinstall = true; await (new qx.tool.cli.commands.package.Upgrade(this.argv)).process(); } } // Migrate all manifest in a package const registryModel = qx.tool.config.Registry.getInstance(); let manifestModels =[]; if (await registryModel.exists()) { // we have a qooxdoo.json index file containing the paths of libraries in the repository await registryModel.load(); let libraries = registryModel.getLibraries(); for (let library of libraries) { manifestModels.push((new qx.tool.config.Abstract(qx.tool.config.Manifest.config)).set({baseDir: path.join(cwd, library.path)})); } } else if (fs.existsSync(qx.tool.config.Manifest.config.fileName)) { manifestModels.push(qx.tool.config.Manifest.getInstance()); } for (const manifestModel of manifestModels) { await manifestModel.set({warnOnly: true}).load(); manifestModel.setValidate(false); needFix = false; let s = ""; if (!qx.lang.Type.isArray(manifestModel.getValue("info.authors"))) { needFix = true; s += " missing info.authors\n"; } if (!semver.valid(manifestModel.getValue("info.version"))) { needFix = true; s += " missing or invalid info.version\n"; } let obj = { "info.qooxdoo-versions": null, "info.qooxdoo-range": null, "provides.type": null, "requires.qxcompiler": null, "requires.qooxdoo-sdk": null, "requires.qooxdoo-compiler": null }; if (manifestModel.keyExists(obj)) { needFix = true; s += " obsolete entry:\n"; for (let key in obj) { if (obj[key]) { s += " " + key + "\n"; } } } if (needFix) { if (announceOnly) { qx.tool.compiler.Console.warn("*** Manifest(s) need to be updated:\n" + s); } else { manifestModel .transform("info.authors", authors => { if (authors === "") { return []; } else if (qx.lang.Type.isString(authors)) { return [{name: authors}]; } else if (qx.lang.Type.isObject(authors)) { return [{ name: authors.name, email: authors.email }]; } else if (qx.lang.Type.isArray(authors)) { return authors.map(r => qx.lang.Type.isObject(r) ? { name: r.name, email: r.email } : { name: r } ); } return []; }) .transform("info.version", version => { let coerced = semver.coerce(version); if (coerced === null) { qx.tool.compiler.Console.warn(`*** Version string '${version}' could not be interpreted as semver, changing to 1.0.0`); return "1.0.0"; } return String(coerced); }) .unset("info.qooxdoo-versions") .unset("info.qooxdoo-range") .unset("provides.type") .unset("requires.qxcompiler") .unset("requires.qooxdoo-compiler") .unset("requires.qooxdoo-sdk"); await manifestModel.save(); if (!this.argv.quiet) { qx.tool.compiler.Console.info(`Updated settings in ${manifestModel.getRelativeDataPath()}.`); } } } // check framework and compiler dependencies // if none are given in the Manifest, use the present framework and compiler const compiler_version = qx.tool.compiler.Version.VERSION; const compiler_range = manifestModel.getValue("requires.@qooxdoo/compiler") || compiler_version; const framework_version = await this.getLibraryVersion(await this.getGlobalQxPath()); const framework_range = manifestModel.getValue("requires.@qooxdoo/framework") || framework_version; if ( !semver.satisfies(compiler_version, compiler_range) || !semver.satisfies(framework_version, framework_range)) { needFix = true; if (announceOnly) { qx.tool.compiler.Console.warn(`*** Mismatch between installed framework version (${framework_version}) and/or compiler version (${compiler_version}) and the declared dependencies in the Manifest.`); } else { manifestModel .setValue("requires.@qooxdoo/compiler", "^" + compiler_version) .setValue("requires.@qooxdoo/framework", "^" + framework_version); manifestModel.setWarnOnly(false); // now model should validate await manifestModel.save(); if (!this.argv.quiet) { qx.tool.compiler.Console.info(`Updated dependencies in ${manifestModel.getRelativeDataPath()}.`); } } } manifestModel.setValidate(true); } if (!announceOnly) { let compileJsonFilename = path.join(process.cwd(), "compile.json"); let replaceInFiles = [{ files: compileJsonFilename, from: "\"qx/browser\"", to: "\"@qooxdoo/qx/browser\"" }]; await this.migrate([compileJsonFilename], replaceInFiles); } let compileJsFilename = path.join(process.cwd(), "compile.js"); if (await fs.existsAsync(compileJsFilename)) { let data = await fs.readFileAsync(compileJsFilename, "utf8"); if (data.indexOf("module.exports") < 0) { qx.tool.compiler.Console.warn("*** Your compile.js appears to be missing a `module.exports` statement - please see https://git.io/fjBqU for more details"); } } self.migrationInProcess = false; if (needFix) { if (announceOnly) { qx.tool.compiler.Console.error(`*** Try executing 'qx package migrate' to apply the changes. Alternatively, upgrade or downgrade framework and/or compiler to match the library dependencies.`); process.exit(1); } qx.tool.compiler.Console.info("Migration completed."); } else if (!announceOnly && !this.argv.quiet) { qx.tool.compiler.Console.info("Everything is up-to-date. No migration necessary."); } } } });<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/* */ var htmlparser = require('htmlparser2'); var _ = require('lodash'); var quoteRegexp = require('regexp-quote'); module.exports = sanitizeHtml; // Ignore the _recursing flag; it's there for recursive // invocation as a guard against this exploit: // https://github.com/fb55/htmlparser2/issues/105 function sanitizeHtml(html, options, _recursing) { var result = ''; <|fim▁hole|> function Frame(tag, attribs) { var that = this; this.tag = tag; this.attribs = attribs || {}; this.tagPosition = result.length; this.text = ''; // Node inner text this.updateParentNodeText = function() { if (stack.length) { var parentFrame = stack[stack.length - 1]; parentFrame.text += that.text; } }; } if (!options) { options = sanitizeHtml.defaults; } else { _.defaults(options, sanitizeHtml.defaults); } // Tags that contain something other than HTML. If we are not allowing // these tags, we should drop their content too. For other tags you would // drop the tag but keep its content. var nonTextTagsMap = { script: true, style: true }; var allowedTagsMap; if(options.allowedTags) { allowedTagsMap = {}; _.each(options.allowedTags, function(tag) { allowedTagsMap[tag] = true; }); } var selfClosingMap = {}; _.each(options.selfClosing, function(tag) { selfClosingMap[tag] = true; }); var allowedAttributesMap; var allowedAttributesGlobMap; if(options.allowedAttributes) { allowedAttributesMap = {}; allowedAttributesGlobMap = {}; _.each(options.allowedAttributes, function(attributes, tag) { allowedAttributesMap[tag] = {}; var globRegex = []; _.each(attributes, function(name) { if(name.indexOf('*') >= 0) { globRegex.push(quoteRegexp(name).replace(/\\\*/g, '.*')); } else { allowedAttributesMap[tag][name] = true; } }); allowedAttributesGlobMap[tag] = new RegExp('^(' + globRegex.join('|') + ')$'); }); } var allowedClassesMap = {}; _.each(options.allowedClasses, function(classes, tag) { // Implicitly allows the class attribute if(allowedAttributesMap) { if (!allowedAttributesMap[tag]) { allowedAttributesMap[tag] = {}; } allowedAttributesMap[tag]['class'] = true; } allowedClassesMap[tag] = {}; _.each(classes, function(name) { allowedClassesMap[tag][name] = true; }); }); var transformTagsMap = {}; _.each(options.transformTags, function(transform, tag){ if (typeof transform === 'function') { transformTagsMap[tag] = transform; } else if (typeof transform === "string") { transformTagsMap[tag] = sanitizeHtml.simpleTransform(transform); } }); var depth = 0; var stack = []; var skipMap = {}; var transformMap = {}; var skipText = false; var parser = new htmlparser.Parser({ onopentag: function(name, attribs) { var frame = new Frame(name, attribs); stack.push(frame); var skip = false; if (_.has(transformTagsMap, name)) { var transformedTag = transformTagsMap[name](name, attribs); frame.attribs = attribs = transformedTag.attribs; if (name !== transformedTag.tagName) { frame.name = name = transformedTag.tagName; transformMap[depth] = transformedTag.tagName; } } if (allowedTagsMap && !_.has(allowedTagsMap, name)) { skip = true; if (_.has(nonTextTagsMap, name)) { skipText = true; } skipMap[depth] = true; } depth++; if (skip) { // We want the contents but not this tag return; } result += '<' + name; if (!allowedAttributesMap || _.has(allowedAttributesMap, name)) { _.each(attribs, function(value, a) { if (!allowedAttributesMap || _.has(allowedAttributesMap[name], a) || (_.has(allowedAttributesGlobMap, name) && allowedAttributesGlobMap[name].test(a))) { if ((a === 'href') || (a === 'src')) { if (naughtyHref(value)) { delete frame.attribs[a]; return; } } if (a === 'class') { value = filterClasses(value, allowedClassesMap[name]); if (!value.length) { delete frame.attribs[a]; return; } } result += ' ' + a; if (value.length) { result += '="' + escapeHtml(value) + '"'; } } else { delete frame.attribs[a]; } }); } if (_.has(selfClosingMap, name)) { result += " />"; } else { result += ">"; } }, ontext: function(text) { if (skipText) { return; } var tag = stack[stack.length-1] && stack[stack.length-1].tag; if (_.has(nonTextTagsMap, tag)) { result += text; } else { var escaped = escapeHtml(text); if (options.textFilter) { result += options.textFilter(escaped); } else { result += escaped; } } if (stack.length) { var frame = stack[stack.length - 1]; frame.text += text; } }, onclosetag: function(name) { var frame = stack.pop(); if (!frame) { // Do not crash on bad markup return; } skipText = false; depth--; if (skipMap[depth]) { delete skipMap[depth]; frame.updateParentNodeText(); return; } if (transformMap[depth]) { name = transformMap[depth]; delete transformMap[depth]; } if (options.exclusiveFilter && options.exclusiveFilter(frame)) { result = result.substr(0, frame.tagPosition); return; } frame.updateParentNodeText(); if (_.has(selfClosingMap, name)) { // Already output /> return; } result += "</" + name + ">"; } }, { decodeEntities: true }); parser.write(html); parser.end(); return result; function escapeHtml(s) { if (typeof(s) !== 'string') { s = s + ''; } return s.replace(/\&/g, '&amp;').replace(/</g, '&lt;').replace(/\>/g, '&gt;').replace(/\"/g, '&quot;'); } function naughtyHref(href) { // Browsers ignore character codes of 32 (space) and below in a surprising // number of situations. Start reading here: // https://www.owasp.org/index.php/XSS_Filter_Evasion_Cheat_Sheet#Embedded_tab href = href.replace(/[\x00-\x20]+/g, ''); // Clobber any comments in URLs, which the browser might // interpret inside an XML data island, allowing // a javascript: URL to be snuck through href = href.replace(/<\!\-\-.*?\-\-\>/g, ''); // Case insensitive so we don't get faked out by JAVASCRIPT #1 var matches = href.match(/^([a-zA-Z]+)\:/); if (!matches) { // No scheme = no way to inject js (right?) return false; } var scheme = matches[1].toLowerCase(); return (!_.contains(options.allowedSchemes, scheme)); } function filterClasses(classes, allowed) { if (!allowed) { // The class attribute is allowed without filtering on this tag return classes; } classes = classes.split(/\s+/); return _.filter(classes, function(c) { return _.has(allowed, c); }).join(' '); } } // Defaults are accessible to you so that you can use them as a starting point // programmatically if you wish sanitizeHtml.defaults = { allowedTags: [ 'h3', 'h4', 'h5', 'h6', 'blockquote', 'p', 'a', 'ul', 'ol', 'nl', 'li', 'b', 'i', 'strong', 'em', 'strike', 'code', 'hr', 'br', 'div', 'table', 'thead', 'caption', 'tbody', 'tr', 'th', 'td', 'pre' ], allowedAttributes: { a: [ 'href', 'name', 'target' ], // We don't currently allow img itself by default, but this // would make sense if we did img: [ 'src' ] }, // Lots of these won't come up by default because we don't allow them selfClosing: [ 'img', 'br', 'hr', 'area', 'base', 'basefont', 'input', 'link', 'meta' ], // URL schemes we permit allowedSchemes: [ 'http', 'https', 'ftp', 'mailto' ] }; sanitizeHtml.simpleTransform = function(newTagName, newAttribs, merge) { merge = (merge === undefined) ? true : merge; newAttribs = newAttribs || {}; return function(tagName, attribs) { var attrib; if (merge) { for (attrib in newAttribs) { attribs[attrib] = newAttribs[attrib]; } } else { attribs = newAttribs; } return { tagName: newTagName, attribs: attribs }; }; };<|fim▁end|>
<|file_name|>async-gen-method-yield-as-identifier-reference-escaped.js<|end_file_name|><|fim▁begin|>// This file was procedurally generated from the following sources: // - src/async-generators/yield-as-identifier-reference-escaped.case // - src/async-generators/syntax/async-class-expr-method.template /*--- description: yield is a reserved keyword within generator function bodies and may not be used as an identifier reference. (Async generator method as a ClassExpression element) esid: prod-AsyncGeneratorMethod features: [async-iteration] flags: [generated] negative: phase: parse type: SyntaxError info: | ClassElement : MethodDefinition MethodDefinition : AsyncGeneratorMethod Async Generator Function Definitions AsyncGeneratorMethod : async [no LineTerminator here] * PropertyName ( UniqueFormalParameters ) { AsyncGeneratorBody } IdentifierReference : Identifier It is a Syntax Error if this production has a [Yield] parameter and StringValue of Identifier is "yield". <|fim▁hole|>---*/ throw "Test262: This statement should not be evaluated."; var C = class { async *gen() { void yi\u0065ld; }};<|fim▁end|>
<|file_name|>issue_371.rs<|end_file_name|><|fim▁begin|>//! Checks that `executor.look_ahead().field_name()` is correct in presence of //! multiple query fields. //! See [#371](https://github.com/graphql-rust/juniper/issues/371) for details. //! //! Original author of this test is [@davidpdrsn](https://github.com/davidpdrsn). use juniper::{ graphql_object, graphql_vars, EmptyMutation, EmptySubscription, Executor, LookAheadMethods as _, RootNode, ScalarValue, }; pub struct Context; impl juniper::Context for Context {} pub struct Query; #[graphql_object(context = Context)] impl Query { fn users<__S: ScalarValue>(executor: &Executor<'_, '_, Context, __S>) -> Vec<User> { let lh = executor.look_ahead(); assert_eq!(lh.field_name(), "users"); vec![User] } fn countries<__S: ScalarValue>(executor: &Executor<'_, '_, Context, __S>) -> Vec<Country> { let lh = executor.look_ahead(); assert_eq!(lh.field_name(), "countries"); vec![Country] } } #[derive(Clone)] pub struct User; #[graphql_object(context = Context)] impl User { fn id() -> i32 { 1 } } #[derive(Clone)] pub struct Country; <|fim▁hole|>impl Country { fn id() -> i32 { 2 } } type Schema = RootNode<'static, Query, EmptyMutation<Context>, EmptySubscription<Context>>; #[tokio::test] async fn users() { let query = "{ users { id } }"; let schema = Schema::new(Query, EmptyMutation::new(), EmptySubscription::new()); let (_, errors) = juniper::execute(query, None, &schema, &graphql_vars! {}, &Context) .await .unwrap(); assert_eq!(errors.len(), 0); } #[tokio::test] async fn countries() { let query = "{ countries { id } }"; let schema = Schema::new(Query, EmptyMutation::new(), EmptySubscription::new()); let (_, errors) = juniper::execute(query, None, &schema, &graphql_vars! {}, &Context) .await .unwrap(); assert_eq!(errors.len(), 0); } #[tokio::test] async fn both() { let query = "{ countries { id } users { id } }"; let schema = Schema::new(Query, EmptyMutation::new(), EmptySubscription::new()); let (_, errors) = juniper::execute(query, None, &schema, &graphql_vars! {}, &Context) .await .unwrap(); assert_eq!(errors.len(), 0); } #[tokio::test] async fn both_in_different_order() { let query = "{ users { id } countries { id } }"; let schema = Schema::new(Query, EmptyMutation::new(), EmptySubscription::new()); let (_, errors) = juniper::execute(query, None, &schema, &graphql_vars! {}, &Context) .await .unwrap(); assert_eq!(errors.len(), 0); }<|fim▁end|>
#[graphql_object]
<|file_name|>connector.oracle.ts<|end_file_name|><|fim▁begin|>import {DBConnector, SqlBinding, SqlMaker} from './index'; class OracleSqlMaker extends SqlMaker { quote(value: string) { return `"${value.replace(/"/g, '""')}"`; } insert(table: string, columns: string[], sql_values: SqlBinding[], output_columns: string[]) : SqlBinding { let sql =`INSERT INTO ${this.quote(table)} (${columns.map(c => this.quote(c)).join(',')}) VALUES (${this.join_sqls(sql_values, ',')})`; if (output_columns.length > 0) sql += ` RETURNING ${output_columns.map((c, i) => this.quote(c)).join(',')} INTO ${output_columns.map((c, i) => `:r${i}`).join(',')}`; return { sql: sql, bind: this.join_bindings(sql_values) }; } admin_create_table_column_type(type: SqlMaker.ColumnType) { switch (type.is) { case 'integer': switch (type.bytes) { case 2: return 'NUMBER( 5,0)'; case 4: return 'NUMBER(10,0)'; case 8: return 'NUMBER(19,0)'; } return 'NUMBER(10,0)'; case 'autoincrement': return type.bytes === 4 ? 'NUMBER(10,0)' : 'NUMBER(19,0)'; case 'string': return `NVARCHAR2(${type.max_bytes})`; case 'text': return `NCLOB`; case 'decimal': return `NUMBER(${type.precision}, ${type.scale})`; case 'binary': return 'BLOB'; case 'double': return 'BINARY_DOUBLE'; case 'float': return 'BINARY_FLOAT'; case 'boolean': return 'NUMBER(1)'; } } admin_create_table(table: SqlMaker.Table) : SqlBinding[] { let ret = super.admin_create_table(table); for (let column of table.columns) { if (column.type.is === "autoincrement") { ret.push({ sql: `CREATE SEQUENCE ${this.quote(`${table.name}_${column.name}_seq`)} START WITH 1`, bind: [] }); ret.push({ sql: `CREATE OR REPLACE TRIGGER ${this.quote(`${table.name}_${column.name}_autoincrement`)} BEFORE INSERT ON ${this.quote(table.name)} FOR EACH ROW BEGIN SELECT ${this.quote(`${table.name}_${column.name}_seq`)}.NEXTVAL INTO :new."id" FROM dual; END;`, bind: [] }); } } return ret; } select_table_list() : SqlBinding { return { sql: `SELECT OWNER || "." || TABLE_NAME table_name FROM ALL_TABLES`, bind: [] }; } select_index_list() : SqlBinding { return { sql: `SELECT OWNER || "." || INDEX_NAME index_name, OWNER || "." || TABLE_NAME table_name FROM ALL_INDEXES`, bind: [] }; } } export const OracleDBConnectorFactory = DBConnector.createSimple<any, { connectString: string, user?: string, password?: string }, any>({ maker: new OracleSqlMaker(), create(oracledb, options) { return oracledb.getConnection(options);<|fim▁hole|> return db.close(); }, select(oracledb, db, sql_select: SqlBinding) : Promise<object[]> { return new Promise<any>((resolve, reject) => { db.execute(sql_select.sql, sql_select.bind, (err, result) => { if (err) return reject(err); let rows = result.rows.map(row => { let r = {}; row.map((v, i) => r[result.metaData[i].name] = v); return r; }); resolve(rows); }); }); }, update(oracledb, db, sql_update: SqlBinding) : Promise<number> { return new Promise<any>((resolve, reject) => { db.execute(sql_update.sql, sql_update.bind, (err, result) => err ? reject(err) : resolve(result.rowsAffected)); }); }, delete(oracledb, db, sql_update: SqlBinding) : Promise<number> { return new Promise<any>((resolve, reject) => { db.execute(sql_update.sql, sql_update.bind, (err, result) => err ? reject(err) : resolve(result.rowsAffected)); }); }, insert(oracledb, db, sql_insert: SqlBinding, output_columns) : Promise<any[]> { return new Promise<any>((resolve, reject) => { let bind: object = {}; sql_insert.bind.forEach((v, i) => bind[`i${i}`] = v); output_columns.forEach((v, i) => bind[`r${i}`] = { type: oracledb.NUMBER, dir: oracledb.BIND_OUT }); db.execute(sql_insert.sql, bind, (err, result) => { err ? reject(err) : resolve(output_columns.map((c, i) => result.outBinds[`r${i}`][0])) }); }); }, run(oracledb, db, sql: SqlBinding) : Promise<any> { return new Promise<any>((resolve, reject) => { db.execute(sql.sql, sql.bind, (err, result) => err ? reject(err) : resolve()); }); }, beginTransaction(oracledb, db): Promise<void> { return new Promise<any>((resolve, reject) => { db.execute("SET TRANSACTION READ WRITE", [], (err) => err ? reject(err) : resolve()) }); }, commit(oracledb, db): Promise<void> { return db.commit(); }, rollback(oracledb, db): Promise<void> { return db.rollback(); }, transform(sql) { return DBConnector.transformBindings(sql, idx => `:i${idx}`); }, });<|fim▁end|>
}, destroy(oracledb, db) {
<|file_name|>Bluetooth.py<|end_file_name|><|fim▁begin|>import serial from sys import platform as platform import serial.tools.list_ports import serial.threaded from pymouse import PyMouse from Voice.GoogleTTS import speak import threading import math import copy import time import json data_repository_right = { "id" : [], "name" : [], "shortcuts" : [], "time_period": [], "0":[], # "max_acc_@R_x" : [], "1":[], # "max_acc_^R_x": [], "2":[], # "max_acc_#R_x": [], "3":[], # "max_acc_$R_x": [], "4":[], # "max_acc_%R_x": [], "5":[], # "max_acc_@R_y" : [], "6":[], # "max_acc_^R_y": [], "7":[], # "max_acc_#R_y": [], "8":[], # "max_acc_$R_y": [], "9":[], # "max_acc_%R_y": [], "10":[], # "max_acc_@R_z": [], "11":[], # "max_acc_^R_z": [], "12":[], # "max_acc_#R_z": [], "13":[], # "max_acc_$R_z": [], "14":[], # "max_acc_%R_z": [], "15":[], # "min_acc_@R_x": [], "16":[], # "min_acc_^R_x": [], "17":[], # "min_acc_#R_x": [], "18":[], # "min_acc_$R_x": [], "19":[], # "min_acc_%R_x": [], "20":[], # "min_acc_@R_y": [], "21":[], # "min_acc_^R_y": [], "22":[], # "min_acc_#R_y": [], "23":[], # "min_acc_$R_y": [], "24":[], # "min_acc_%R_y": [], "25":[], # "min_acc_@R_z": [], "26":[], # "min_acc_^R_z": [], "27":[], # "min_acc_#R_z": [], "28":[], # "min_acc_$R_z": [], "29":[], # "min_acc_%R_z": [], "30":[], # "start_angle_@R_x":[], "31":[], # "start_angle_^R_x": [], "32":[], # "start_angle_#R_x": [], "33":[], # "start_angle_$R_x": [], "34":[], # "start_angle_%R_x": [], "35":[], # "start_angle_@R_y": [], "36":[], # "start_angle_^R_y": [], "37":[], # "start_angle_#R_y": [], "38":[], # "start_angle_$R_y": [], "39":[], # "start_angle_%R_y": [], "40":[], # "start_angle_@R_z": [], "41":[], # "start_angle_^R_z": [], "42":[], # "start_angle_#R_z": [], "43":[], # "start_angle_$R_z": [], "44":[], # "start_angle_%R_z": [], "45":[], # "end_angle_@R_x": [], "46":[], # "end_angle_^R_x": [], "47":[], # "end_angle_#R_x": [], "48":[], # "end_angle_$R_x": [], "49":[], # "end_angle_%R_x": [], "50":[], # "end_angle_@R_y": [], "51":[], # "end_angle_^R_y": [], "52":[], # "end_angle_#R_y": [], "53":[], # "end_angle_$R_y": [], "54":[], # "end_angle_%R_y": [], "55":[], # "end_angle_@R_z": [], "56":[], # "end_angle_^R_z": [], "57":[], # "end_angle_#R_z": [], "58":[], # "end_angle_$R_z": [], "59":[], # "end_angle_%R_z": [], } data_repository_left = { "id": [], "name": [], "shortcuts": [], "time_period": [], 0: [], # "max_acc_@L_x" : [], 1: [], # "max_acc_^L_x": [], 2: [], # "max_acc_#L_x": [], 3: [], # "max_acc_$L_x": [], 4: [], # "max_acc_%L_x": [], 5: [], # "max_acc_@L_y" : [], 6: [], # "max_acc_^L_y": [], 7: [], # "max_acc_#L_y": [], 8: [], # "max_acc_$L_y": [], 9: [], # "max_acc_%L_y": [], 10: [], # "max_acc_@L_z": [], 11: [], # "max_acc_^L_z": [], 12: [], # "max_acc_#L_z": [], 13: [], # "max_acc_$L_z": [], 14: [], # "max_acc_%L_z": [], 15: [], # "min_acc_@L_x": [], 16: [], # "min_acc_^L_x": [], 17: [], # "min_acc_#L_x": [], 18: [], # "min_acc_$L_x": [], 19: [], # "min_acc_%L_x": [], 20: [], # "min_acc_@L_y": [], 21: [], # "min_acc_^L_y": [], 22: [], # "min_acc_#L_y": [], 23: [], # "min_acc_$L_y": [], 24: [], # "min_acc_%L_y": [], 25: [], # "min_acc_@L_z": [], 26: [], # "min_acc_^L_z": [], 27: [], # "min_acc_#L_z": [], 28: [], # "min_acc_$L_z": [], 29: [], # "min_acc_%L_z": [], 30: [], # "start_angle_@L_x":[], 31: [], # "start_angle_^L_x": [], 32: [], # "start_angle_#L_x": [], 33: [], # "start_angle_$L_x": [], 34: [], # "start_angle_%L_x": [], 35: [], # "start_angle_@L_y": [], 36: [], # "start_angle_^L_y": [], 37: [], # "start_angle_#L_y": [], 38: [], # "start_angle_$L_y": [], 39: [], # "start_angle_%L_y": [], 40: [], # "start_angle_@L_z": [], 41: [], # "start_angle_^L_z": [], 42: [], # "start_angle_#L_z": [], 43: [], # "start_angle_$L_z": [], 44: [], # "start_angle_%L_z": [], 45: [], # "end_angle_@L_x": [], 46: [], # "end_angle_^L_x": [], 47: [], # "end_angle_#L_x": [], 48: [], # "end_angle_$L_x": [], 49: [], # "end_angle_%L_x": [], 50: [], # "end_angle_@L_y": [], 51: [], # "end_angle_^L_y": [], 52: [], # "end_angle_#L_y": [], 53: [], # "end_angle_$L_y": [], 54: [], # "end_angle_%L_y": [], 55: [], # "end_angle_@L_z": [], 56: [], # "end_angle_^L_z": [], 57: [], # "end_angle_#L_z": [], 58: [], # "end_angle_$L_z": [], 59: [], # "end_angle_%L_z": [], } right_data = { 0: 0, # "acc_@R_x" 1: 0, # "acc_^R_x" 2: 0, # "acc_#R_x" 3: 0, # "acc_$R_x" 4: 0, # "acc_%R_x" 5: 0, # "acc_@R_y" 6: 0, # "acc_^R_y" 7: 0, # "acc_#R_y" 8: 0, # "acc_$R_y" 9: 0, # "acc_%R_y" 10: 0, # "acc_@R_z" 11: 0, # "acc_^R_z" 12: 0, # "acc_#R_z" 13: 0, # "acc_$R_z" 14: 0, # "acc_%R_z" 15: 0, # "angle_@R_x" 16: 0, # "angle_^R_x" 17: 0, # "angle_#R_x" 18: 0, # "angle_$R_x" 19: 0, # "angle_%R_x" 20: 0, # "angle_@R_y" 21: 0, # "angle_^R_y" 22: 0, # "angle_#R_y" 23: 0, # "angle_$R_y" 24: 0, # "angle_%R_y" 25: 0, # "angle_@R_z" 26: 0, # "angle_^R_z" 27: 0, # "angle_#R_z" 28: 0, # "angle_$R_z" 29: 0 # "angle_%R_z" } left_data = { 0: 0, # "acc_@L_x" 1: 0, # "acc_^L_x" 2: 0, # "acc_#L_x" 3: 0, # "acc_$L_x" 4: 0, # "acc_%L_x" 5: 0, # "acc_@L_y" 6: 0, # "acc_^L_y" 7: 0, # "acc_#L_y" 8: 0, # "acc_$L_y" 9: 0, # "acc_%L_y" 10: 0, # "acc_@L_z" 11: 0, # "acc_^L_z" 12: 0, # "acc_#L_z" 13: 0, # "acc_$L_z" 14: 0, # "acc_%L_z" 15: 0, # "angle_@L_x" 16: 0, # "angle_^L_x" 17: 0, # "angle_#L_x" 18: 0, # "angle_$L_x" 19: 0, # "angle_%L_x" 20: 0, # "angle_@L_y" 21: 0, # "angle_^L_y" 22: 0, # "angle_#L_y" 23: 0, # "angle_$L_y" 24: 0, # "angle_%L_y" 25: 0, # "angle_@L_z" 26: 0, # "angle_^L_z" 27: 0, # "angle_#L_z" 28: 0, # "angle_$L_z" 29: 0 # "angle_%L_z" } pre_right_data = copy.deepcopy(right_data) pre_left_data = copy.deepcopy(left_data) average_right_data = copy.deepcopy(right_data) movement_Sensitivity_x= 2 movement_Sensitivity_y= 2 movement_Sensitivity_z= 2 threshold_movement_Sensitivity = 14000 recognition_Gap_Interval = 200 initial_Gap_Interval = 200 angle_tolerance = 5 acc_tolerance = 0.5 def get_OS_Right(): port = "/dev/tty.Right-DevB" # LINUX if platform == "linux" or platform == "linux2": port = "/dev/tty.Right-DevB" # MAC OS elif platform == "darwin": port = "/dev/tty.Right-DevB" # WINDOWS elif platform == "win32": port = "COM4" return port def get_OS_Left(): port = "/dev/tty.LEFT-DevB" # LINUX if platform == "linux" or platform == "linux2": port = "/dev/tty.LEFT-DevB" # MAC OS elif platform == "darwin": port = "/dev/tty.LEFT-DevB" # WINDOWS elif platform == "win32": port = "COM4" return port def bluetooth(serRight, serLeft, recognitionFlag=0): global pre_right_data global pre_left_data global average_right_data global right_data global left_data global data_repository_right iteration_Count = 0 averageFlag = True #------Recognition variables-------------- recognitionCount = 0 recognitionGapCount = 0 start_time = 0 recognitionMode = False #Get current id try: curr_id = data_repository_right["id"][-1] + 1 except: curr_id = 0 initialize_data_repository_right() while True: # %: Pinky finger, ^: index finger, @: thumb, $: ring #-------------RIGHT HAND-------------------------------- try: line = serRight.readline() line = line.decode('utf-8') line = line.strip('\r') line = line.strip('\n') if "@" in line: #THUMB #print(line[0]) right_data[0] = get_data(serRight) #print(right_data[0]) right_data[5] = get_data(serRight) # Meter per seconds square #print(right_data[5]) right_data[10] = get_data(serRight) #print(right_data[10]) right_data[15] = get_data(serRight) #print(right_data[15]) right_data[20] = get_data(serRight) # Angle in degrees #print(right_data[20]) right_data[25] = get_data(serRight) #print(right_data[25]) elif "^" in line: #INDEX FINGER #print(line[0]) right_data[1] = get_data(serRight) #print(right_data[1]) right_data[6] = get_data(serRight) # Meter per seconds square #print(right_data[6]) right_data[11] = get_data(serRight) #print(right_data[11]) right_data[16] = get_data(serRight) #print(right_data[16]) right_data[21] = get_data(serRight) # Angle in degrees #print(right_data[21]) right_data[26] = get_data(serRight) #print(right_data[26]) elif "#" in line: #MIDDLE FINGER #print(line[0]) right_data[2] = get_data(serRight) #print(right_data[2]) right_data[7] = get_data(serRight) # Meter per seconds square #print(right_data[7]) right_data[12] = get_data(serRight) #print(right_data[12]) right_data[17] = get_data(serRight) #print(right_data[17]) right_data[22] = get_data(serRight) # Angle in degrees #print(right_data[22]) right_data[27] = get_data(serRight) #print(right_data[27]) elif "$" in line: #RING FINGER #print(line[0]) right_data[3] = get_data(serRight) #print(right_data[3]) right_data[8] = get_data(serRight) # Meter per seconds square #print(right_data[8]) right_data[13] = get_data(serRight) #print(right_data[13]) right_data[18] = get_data(serRight) #print(right_data[18]) right_data[23] = get_data(serRight) # Angle in degrees #print(right_data[23]) right_data[28] = get_data(serRight) #print(right_data[28]) elif "%" in line: #PINKY FINGER #print(line[0]) right_data[4] = get_data(serRight) #print(right_data[4]) right_data[9] = get_data(serRight) # Meter per seconds square #print(right_data[9]) right_data[14] = get_data(serRight) #print(right_data[14]) right_data[19] = get_data(serRight) #print(right_data[19]) right_data[24] = get_data(serRight) # Angle in degrees #print(right_data[14]) right_data[29] = get_data(serRight) #print(right_data[29]) except Exception as e: print("Exception", format(e)) pass # Refining by taking average of values if iteration_Count < initial_Gap_Interval and averageFlag == True: count = 0 for curr_Key in right_data: if count > 14: break average_right_data[curr_Key] += right_data[curr_Key] elif iteration_Count >= initial_Gap_Interval and averageFlag == True: count = 0 for curr_Key in right_data: if count > 14: break try: average_right_data[curr_Key] /= initial_Gap_Interval except: pass count += 1 averageFlag = False elif iteration_Count >= initial_Gap_Interval and averageFlag == False: count = 0 for curr_Key in right_data: if count > 14: break try: right_data[curr_Key] /= average_right_data[curr_Key] except: pass count += 1 if recognitionFlag != 1: for eachID in data_repository_right["id"]: fingerCount = 0 #Finger Recognised count for max_x, max_y, max_z, min_x, min_y, min_z, start_angle_x, start_angle_y, start_angle_z, right_x, right_y, right_z, right_angle_x, right_angle_y, right_angle_z in zip(list(range(0,5)), list(range(5, 10)), list(range(10, 15)), list(range(15, 20)), list(range(20, 25)), list(range(25, 30)), list(range(30, 35)), list(range(35, 40)), list(range(40, 45)), list(range(0, 5)), list(range(5, 10)),list(range(10, 15)),list(range(15, 20)),list(range(20, 25)),list(range(25, 30))): if (right_data[right_x] > data_repository_right[str(max_x)][eachID] - acc_tolerance)\ and (right_data[right_x] < data_repository_right[str(max_x)][eachID] + acc_tolerance)\ and (right_data[right_y] > data_repository_right[str(max_y)][eachID] - acc_tolerance)\ and (right_data[right_y] < data_repository_right[str(max_y)][eachID] + acc_tolerance)\ and (right_data[right_z] > data_repository_right[str(max_z)][eachID] - acc_tolerance)\ and (right_data[right_z] < data_repository_right[str(max_z)][eachID] + acc_tolerance)\ and (right_data[right_angle_x] < (data_repository_right[str(start_angle_x)][eachID] + angle_tolerance))\ and (right_data[right_angle_x] > (data_repository_right[str(start_angle_x)][eachID] - angle_tolerance))\ and (right_data[right_angle_y] < (data_repository_right[str(start_angle_y)][eachID] + angle_tolerance))\ and (right_data[right_angle_y] > (data_repository_right[str(start_angle_y)][eachID] - angle_tolerance))\ and (right_data[right_angle_z] < (data_repository_right[str(start_angle_z)][eachID] + angle_tolerance))\ and (right_data[right_angle_z] > (data_repository_right[str(start_angle_z)][eachID] - angle_tolerance)): fingerCount += 1 if fingerCount == 3: print("Initial condition true") else: print("not matched", "\t", fingerCount) #print(data_repository_right, end="\n\n") #print(right_data, end="\n\n") # ----------------RECOGNITION---------------------------- i=0 j=0 pos=0 match = False while(i<len(data_repository_right.get(0))): while(j+15<60): #If current data of Thumb (angles and accln) is greater than min and less than max value if(right_data.get(j) < data_repository_right.get(j)[i]) and (right_data.get(j) > data_repository_right.get(j+15)[i]): pos = i match = True else: match = False j = j+5 if (j==15): j=30 i+=1 if match: shortcut = data_repository_right.get("shortcuts")[pos] #Implement Shortcut if recognitionFlag == 1 and iteration_Count > initial_Gap_Interval: if recognitionCount > 5: print(data_repository_right) print("Ok Recognized") recognitionFlag = 0 try: with open('DataRepositoryRight.json', 'w') as outfile: json.dump(data_repository_right, outfile) except: print("Could not write DataRepositoryRight.json") #return else: print("Repeat", recognitionCount) curr_time = time.time() for x_values, y_values, z_values in zip(list(range(5)), list(range(5, 10)),list(range(10, 15))): #only x, y, z acceleration values of each finger if math.fabs(right_data[x_values]) > movement_Sensitivity_x and math.fabs(right_data[y_values]) > movement_Sensitivity_y and math.fabs(right_data[z_values]) > movement_Sensitivity_z: if recognitionMode == False: print("Recognition period ON", "True") start_time = curr_time store_gesture(False, "right",name="Dummy", shortcuts="dummy", curr_id= curr_id) recognitionMode = True elif recognitionMode == True and recognitionGapCount > recognition_Gap_Interval: recognitionMode = False time_period = curr_time - start_time store_gesture(True, "right", time=time_period , curr_id=curr_id) print("Recognition period OFF", "False") recognitionCount += 1 recognitionGapCount = 0 break #----------------------------------------END---------------- pre_right_data = copy.deepcopy(right_data) pre_left_data = copy.deepcopy(left_data) iteration_Count += 1 if recognitionMode == True: recognitionGapCount += 1 def initialize_data_repository_right(): global data_repository_right data_repository_right["id"].append(0) data_repository_right["name"].append(" ") data_repository_right["shortcuts"].append(" ") data_repository_right["time_period"].append(0) for i in list(range(60)): data_repository_right[str(i)].append(0) def store_gesture(recognitionModeEnd, hand="right", time= 0, name="Dummy", shortcuts="dummy", curr_id = 0): if hand == "right": if recognitionModeEnd == False: data_repository_right["id"][curr_id] = curr_id data_repository_right["name"][curr_id] = name data_repository_right["shortcuts"][curr_id] = shortcuts for i in list(range(15)): # Max Acceleration # val = get_data_from_Data_Repository(str(i), curr_id) # if val < right_data[i]: data_repository_right[str(i)][curr_id] = right_data[i] for i, j in zip(list(range(15,30)), list(range(15))): #Min Acceleration # val = get_data_from_Data_Repository(str(i), curr_id) # if val > right_data[j] or val == 0: data_repository_right[str(i)][curr_id] = right_data[j] for i, j in zip(list(range(30, 45)), list(range(15, 30))): #Start Index # val = get_data_from_Data_Repository(str(i),curr_id) #if val == 0: # data_repository_right[str(i)][curr_id] = right_data[j] #else: data_repository_right[str(i)][curr_id] = right_data[j] #Average #------------------------------------------------------------------------------------------------ elif recognitionModeEnd == True: for i, j in zip(list(range(45, 60)), list(range(15, 30))): #End Index # val = get_data_from_Data_Repository(str(i), curr_id) <|fim▁hole|> # if val == 0: # data_repository_right[str(i)][curr_id] = right_data[j] # else: data_repository_right[str(i)][curr_id] = right_data[j] # val = get_data_from_Data_Repository("time_period", curr_id) # if val == 0: data_repository_right["time_period"][curr_id] = time # Time period # else: # data_repository_right["time_period"][curr_id] = (time + val) / 2 # Time period elif hand == "left": pass return def get_data_from_Data_Repository(key, curr_id): global data_repository_right try: val = data_repository_right[key][curr_id] except: val = 0 return val def mouse(acc_x, acc_y, acc_z, angle_x, angle_y, angle_z, pre_coor_x, pre_coor_y): # Condition for mouse ''' current_coor_x = dim_x current_coor_y = dim_y pre_coor_x = 0 pre_coor_y = 0 ''' m = PyMouse() dim_x, dim_y = m.screen_size() sensitivity = 10000 * 1.5 #between pixel_accel_x = (angle_x * 3779.5275591) / sensitivity # pixel per second square pixel_accel_y = (angle_y * 3779.5275591) / sensitivity pixel_accel_z = (angle_z * 3779.5275591) / sensitivity temp_dist_x = 0.5 * pixel_accel_x temp_dist_y = 0.5 * pixel_accel_y if temp_dist_x + pre_coor_x <= dim_x and temp_dist_x + pre_coor_x >= 0: current_coor_x = int(pre_coor_x + temp_dist_x) if temp_dist_y + pre_coor_y <= dim_y and temp_dist_y + pre_coor_y >= 0: current_coor_y = int(pre_coor_y + temp_dist_y) #m.move(current_coor_x, current_coor_y) print(current_coor_x, "\t", current_coor_y) pre_coor_x = current_coor_x pre_coor_y = current_coor_y return pre_coor_x, pre_coor_y def get_data(ser): line = ser.readline() line = line.decode() line = line.strip('\r') line = line.strip('\n') try: return int(line) except: return 0 def gesture_Recognition(): global data_repository_right global data_repository_left #Left and Right hand connection--------------------------------------------------------- serRight = serial.Serial(get_OS_Right(), baudrate=115200, timeout=1) print("Connected Right") # serLeft = serial.Serial(get_OS_Left(), baudrate=115200, timeout=1) # print("Connected Left") #Load Data repository ----------------------------------------------------------------------- try: with open('DataRepositoryRight.json', 'r') as inputFile: data_repository_right = json.load(inputFile) except: print("DataRepositoryRight.json file not found") try: with open('DataRepositoryLeft.json', 'r') as inputFile: data_repository_left = json.load(inputFile) except: print("DataRepositoryLeft.json file not found") #Connection----------------------------------------------------------------------------------------- if serRight.isOpen():# or serLeft.isOpen(): bluetooth(serRight,0, recognitionFlag=0) else: print("Both are unreachable") return 0 def main(): pass if __name__ == '__main__': gesture_Recognition()<|fim▁end|>
<|file_name|>articles.js<|end_file_name|><|fim▁begin|>/** * Shopware 5 * Copyright (c) shopware AG * * According to our dual licensing model, this program can be used either * under the terms of the GNU Affero General Public License, version 3, * or under a proprietary license. * * The texts of the GNU Affero General Public License with an additional * permission and of our proprietary license can be found at and * in the LICENSE file you have received along with this program. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * "Shopware" is a registered trademark of shopware AG. * The licensing of the program under the AGPLv3 does not imply a * trademark license. Therefore any rights, title and interest in * our trademarks remain entirely with us. * * @category Shopware * @package CanceledOrder * @subpackage Store * @version $Id$ * @author shopware AG */ /** * Shopware Store - canceled baskets' articles */ //{block name="backend/canceled_order/store/articles"} Ext.define('Shopware.apps.CanceledOrder.store.Articles', { extend: 'Ext.data.Store', // Do not load data, when not explicitly requested autoLoad: false, model : 'Shopware.apps.CanceledOrder.model.Articles', remoteFilter: true, remoteSort: true, /** * Configure the data communication * @object */ proxy: { type: 'ajax', /** * Configure the url mapping * @object */<|fim▁hole|> /** * Configure the data reader * @object */ reader: { type: 'json', root: 'data', totalProperty:'total' } } }); //{/block}<|fim▁end|>
api: { read: '{url controller=CanceledOrder action="getArticle"}' },
<|file_name|>feature-gate-crate_visibility_modifier.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. crate struct Bender { //~ ERROR `crate` visibility modifier is experimental earth: bool,<|fim▁hole|> fn main() {}<|fim▁end|>
fire: bool, air: bool, water: bool, }
<|file_name|>sched.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::mem; use std::rt::local::Local; use std::rt::mutex::NativeMutex; use std::rt::rtio::{RemoteCallback, PausableIdleCallback, Callback, EventLoop}; use std::rt::task::BlockedTask; use std::rt::task::Task; use std::sync::deque; use std::raw; use std::rand::{XorShiftRng, Rng, Rand}; use TaskState; use context::Context; use coroutine::Coroutine; use sleeper_list::SleeperList; use stack::StackPool; use task::{TypeSched, GreenTask, HomeSched, AnySched}; use message_queue as msgq; /// A scheduler is responsible for coordinating the execution of Tasks /// on a single thread. The scheduler runs inside a slightly modified /// Rust Task. When not running this task is stored in the scheduler /// struct. The scheduler struct acts like a baton, all scheduling /// actions are transfers of the baton. /// /// FIXME: This creates too many callbacks to run_sched_once, resulting /// in too much allocation and too many events. pub struct Scheduler { /// ID number of the pool that this scheduler is a member of. When /// reawakening green tasks, this is used to ensure that tasks aren't /// reawoken on the wrong pool of schedulers. pub pool_id: uint, /// The pool of stacks that this scheduler has cached pub stack_pool: StackPool, /// Bookkeeping for the number of tasks which are currently running around /// inside this pool of schedulers pub task_state: TaskState, /// There are N work queues, one per scheduler. work_queue: deque::Worker<Box<GreenTask>>, /// Work queues for the other schedulers. These are created by /// cloning the core work queues. work_queues: Vec<deque::Stealer<Box<GreenTask>>>, /// The queue of incoming messages from other schedulers. /// These are enqueued by SchedHandles after which a remote callback /// is triggered to handle the message. message_queue: msgq::Consumer<SchedMessage>, /// Producer used to clone sched handles from message_producer: msgq::Producer<SchedMessage>, /// A shared list of sleeping schedulers. We'll use this to wake /// up schedulers when pushing work onto the work queue. sleeper_list: SleeperList, /// Indicates that we have previously pushed a handle onto the /// SleeperList but have not yet received the Wake message. /// Being `true` does not necessarily mean that the scheduler is /// not active since there are multiple event sources that may /// wake the scheduler. It just prevents the scheduler from pushing /// multiple handles onto the sleeper list. sleepy: bool, /// A flag to indicate we've received the shutdown message and should /// no longer try to go to sleep, but exit instead. no_sleep: bool, /// The scheduler runs on a special task. When it is not running /// it is stored here instead of the work queue. sched_task: Option<Box<GreenTask>>, /// An action performed after a context switch on behalf of the /// code running before the context switch cleanup_job: Option<CleanupJob>, /// If the scheduler shouldn't run some tasks, a friend to send /// them to. friend_handle: Option<SchedHandle>, /// Should this scheduler run any task, or only pinned tasks? run_anything: bool, /// A fast XorShift rng for scheduler use rng: XorShiftRng, /// A toggleable idle callback idle_callback: Option<Box<PausableIdleCallback + Send>>, /// A countdown that starts at a random value and is decremented /// every time a yield check is performed. When it hits 0 a task /// will yield. yield_check_count: uint, /// A flag to tell the scheduler loop it needs to do some stealing /// in order to introduce randomness as part of a yield steal_for_yield: bool, // n.b. currently destructors of an object are run in top-to-bottom in order // of field declaration. Due to its nature, the pausable idle callback // must have some sort of handle to the event loop, so it needs to get // destroyed before the event loop itself. For this reason, we destroy // the event loop last to ensure that any unsafe references to it are // destroyed before it's actually destroyed. /// The event loop used to drive the scheduler and perform I/O pub event_loop: Box<EventLoop + Send>, } /// An indication of how hard to work on a given operation, the difference /// mainly being whether memory is synchronized or not #[deriving(PartialEq)] enum EffortLevel { DontTryTooHard, GiveItYourBest } static MAX_YIELD_CHECKS: uint = 20000; fn reset_yield_check(rng: &mut XorShiftRng) -> uint { let r: uint = Rand::rand(rng); r % MAX_YIELD_CHECKS + 1 } impl Scheduler { // * Initialization Functions pub fn new(pool_id: uint, event_loop: Box<EventLoop + Send>, work_queue: deque::Worker<Box<GreenTask>>, work_queues: Vec<deque::Stealer<Box<GreenTask>>>, sleeper_list: SleeperList, state: TaskState) -> Scheduler { Scheduler::new_special(pool_id, event_loop, work_queue, work_queues, sleeper_list, true, None, state) } pub fn new_special(pool_id: uint, event_loop: Box<EventLoop + Send>, work_queue: deque::Worker<Box<GreenTask>>, work_queues: Vec<deque::Stealer<Box<GreenTask>>>, sleeper_list: SleeperList, run_anything: bool, friend: Option<SchedHandle>, state: TaskState) -> Scheduler { let (consumer, producer) = msgq::queue(); let mut sched = Scheduler { pool_id: pool_id, sleeper_list: sleeper_list, message_queue: consumer, message_producer: producer, sleepy: false, no_sleep: false, event_loop: event_loop, work_queue: work_queue, work_queues: work_queues, stack_pool: StackPool::new(), sched_task: None, cleanup_job: None, run_anything: run_anything, friend_handle: friend, rng: new_sched_rng(), idle_callback: None, yield_check_count: 0, steal_for_yield: false, task_state: state, }; sched.yield_check_count = reset_yield_check(&mut sched.rng); return sched; } // FIXME: This may eventually need to be refactored so that // the scheduler itself doesn't have to call event_loop.run. // That will be important for embedding the runtime into external // event loops. // Take a main task to run, and a scheduler to run it in. Create a // scheduler task and bootstrap into it. pub fn bootstrap(mut self: Box<Scheduler>) { // Build an Idle callback. let cb = box SchedRunner as Box<Callback + Send>; self.idle_callback = Some(self.event_loop.pausable_idle_callback(cb)); // Create a task for the scheduler with an empty context. let sched_task = GreenTask::new_typed(Some(Coroutine::empty()), TypeSched); // Before starting our first task, make sure the idle callback // is active. As we do not start in the sleep state this is // important. self.idle_callback.get_mut_ref().resume(); // Now, as far as all the scheduler state is concerned, we are inside // the "scheduler" context. The scheduler immediately hands over control // to the event loop, and this will only exit once the event loop no // longer has any references (handles or I/O objects). rtdebug!("starting scheduler {}", self.sched_id()); let mut sched_task = self.run(sched_task); // Close the idle callback. let mut sched = sched_task.sched.take().unwrap(); sched.idle_callback.take(); // Make one go through the loop to run the close callback. let mut stask = sched.run(sched_task); // Now that we are done with the scheduler, clean up the // scheduler task. Do so by removing it from TLS and manually // cleaning up the memory it uses. As we didn't actually call // task.run() on the scheduler task we never get through all // the cleanup code it runs. rtdebug!("stopping scheduler {}", stask.sched.get_ref().sched_id()); // Should not have any messages let message = stask.sched.get_mut_ref().message_queue.pop(); rtassert!(match message { msgq::Empty => true, _ => false }); stask.task.take().unwrap().drop(); } // This does not return a scheduler, as the scheduler is placed // inside the task. pub fn run(mut self: Box<Scheduler>, stask: Box<GreenTask>) -> Box<GreenTask> { // This is unsafe because we need to place the scheduler, with // the event_loop inside, inside our task. But we still need a // mutable reference to the event_loop to give it the "run" // command. unsafe { let event_loop: *mut Box<EventLoop + Send> = &mut self.event_loop; // Our scheduler must be in the task before the event loop // is started. stask.put_with_sched(self); (*event_loop).run(); } // This is a serious code smell, but this function could be done away // with if necessary. The ownership of `stask` was transferred into // local storage just before the event loop ran, so it is possible to // transmute `stask` as a uint across the running of the event loop to // re-acquire ownership here. // // This would involve removing the Task from TLS, removing the runtime, // forgetting the runtime, and then putting the task into `stask`. For // now, because we have `GreenTask::convert`, I chose to take this // method for cleanliness. This function is *not* a fundamental reason // why this function should exist. GreenTask::convert(Local::take()) } // * Execution Functions - Core Loop Logic // This function is run from the idle callback on the uv loop, indicating // that there are no I/O events pending. When this function returns, we will // fall back to epoll() in the uv event loop, waiting for more things to // happen. We may come right back off epoll() if the idle callback is still // active, in which case we're truly just polling to see if I/O events are // complete. // // The model for this function is to execute as much work as possible while // still fairly considering I/O tasks. Falling back to epoll() frequently is // often quite expensive, so we attempt to avoid it as much as possible. If // we have any active I/O on the event loop, then we're forced to fall back // to epoll() in order to provide fairness, but as long as we're doing work // and there's no active I/O, we can continue to do work. // // If we try really hard to do some work, but no work is available to be // done, then we fall back to epoll() to block this thread waiting for more // work (instead of busy waiting). fn run_sched_once(mut self: Box<Scheduler>, stask: Box<GreenTask>) { // Make sure that we're not lying in that the `stask` argument is indeed // the scheduler task for this scheduler. assert!(self.sched_task.is_none()); // Assume that we need to continue idling unless we reach the // end of this function without performing an action. self.idle_callback.get_mut_ref().resume(); // First we check for scheduler messages, these are higher // priority than regular tasks. let (mut sched, mut stask, mut did_work) = self.interpret_message_queue(stask, DontTryTooHard); // After processing a message, we consider doing some more work on the // event loop. The "keep going" condition changes after the first // iteration because we don't want to spin here infinitely. // // Once we start doing work we can keep doing work so long as the // iteration does something. Note that we don't want to starve the // message queue here, so each iteration when we're done working we // check the message queue regardless of whether we did work or not. let mut keep_going = !did_work || !sched.event_loop.has_active_io(); while keep_going { let (a, b, c) = match sched.do_work(stask) { (sched, task, false) => { sched.interpret_message_queue(task, GiveItYourBest) } (sched, task, true) => { let (sched, task, _) = sched.interpret_message_queue(task, GiveItYourBest); (sched, task, true) } }; sched = a; stask = b; did_work = c; // We only keep going if we managed to do something productive and // also don't have any active I/O. If we didn't do anything, we // should consider going to sleep, and if we have active I/O we need // to poll for completion. keep_going = did_work && !sched.event_loop.has_active_io(); } // If we ever did some work, then we shouldn't put our scheduler // entirely to sleep just yet. Leave the idle callback active and fall // back to epoll() to see what's going on. if did_work { return stask.put_with_sched(sched); } // If we got here then there was no work to do. // Generate a SchedHandle and push it to the sleeper list so // somebody can wake us up later. if !sched.sleepy && !sched.no_sleep { rtdebug!("scheduler has no work to do, going to sleep"); sched.sleepy = true; let handle = sched.make_handle(); sched.sleeper_list.push(handle); // Since we are sleeping, deactivate the idle callback. sched.idle_callback.get_mut_ref().pause(); } else { rtdebug!("not sleeping, already doing so or no_sleep set"); // We may not be sleeping, but we still need to deactivate // the idle callback. sched.idle_callback.get_mut_ref().pause(); } // Finished a cycle without using the Scheduler. Place it back // in TLS. stask.put_with_sched(sched); } // This function returns None if the scheduler is "used", or it // returns the still-available scheduler. At this point all // message-handling will count as a turn of work, and as a result // return None. fn interpret_message_queue(mut self: Box<Scheduler>, stask: Box<GreenTask>, effort: EffortLevel) -> (Box<Scheduler>, Box<GreenTask>, bool) { let msg = if effort == DontTryTooHard { self.message_queue.casual_pop() } else { // When popping our message queue, we could see an "inconsistent" // state which means that we *should* be able to pop data, but we // are unable to at this time. Our options are: // // 1. Spin waiting for data // 2. Ignore this and pretend we didn't find a message // // If we choose route 1, then if the pusher in question is currently // pre-empted, we're going to take up our entire time slice just // spinning on this queue. If we choose route 2, then the pusher in // question is still guaranteed to make a send() on its async // handle, so we will guaranteed wake up and see its message at some // point. // // I have chosen to take route #2. match self.message_queue.pop() { msgq::Data(t) => Some(t), msgq::Empty | msgq::Inconsistent => None } }; match msg { Some(PinnedTask(task)) => { let mut task = task; task.give_home(HomeSched(self.make_handle())); let (sched, task) = self.resume_task_immediately(stask, task); (sched, task, true) } Some(TaskFromFriend(task)) => { rtdebug!("got a task from a friend. lovely!"); let (sched, task) = self.process_task(stask, task, Scheduler::resume_task_immediately_cl); (sched, task, true) } Some(RunOnce(task)) => { // bypass the process_task logic to force running this task once // on this home scheduler. This is often used for I/O (homing). let (sched, task) = self.resume_task_immediately(stask, task); (sched, task, true) } Some(Wake) => { self.sleepy = false; (self, stask, true) } Some(Shutdown) => { rtdebug!("shutting down"); if self.sleepy { // There may be an outstanding handle on the // sleeper list. Pop them all to make sure that's // not the case. loop { match self.sleeper_list.pop() { Some(handle) => { let mut handle = handle; handle.send(Wake); } None => break } } } // No more sleeping. After there are no outstanding // event loop references we will shut down. self.no_sleep = true; self.sleepy = false; (self, stask, true) } Some(NewNeighbor(neighbor)) => { self.work_queues.push(neighbor); (self, stask, false) } None => (self, stask, false) } } fn do_work(mut self: Box<Scheduler>, stask: Box<GreenTask>) -> (Box<Scheduler>, Box<GreenTask>, bool) { rtdebug!("scheduler calling do work"); match self.find_work() { Some(task) => { rtdebug!("found some work! running the task"); let (sched, task) = self.process_task(stask, task, Scheduler::resume_task_immediately_cl); (sched, task, true) } None => { rtdebug!("no work was found, returning the scheduler struct"); (self, stask, false) } } } // Workstealing: In this iteration of the runtime each scheduler // thread has a distinct work queue. When no work is available // locally, make a few attempts to steal work from the queues of // other scheduler threads. If a few steals fail we end up in the // old "no work" path which is fine. // First step in the process is to find a task. This function does // that by first checking the local queue, and if there is no work // there, trying to steal from the remote work queues. fn find_work(&mut self) -> Option<Box<GreenTask>> { rtdebug!("scheduler looking for work"); if !self.steal_for_yield { match self.work_queue.pop() { Some(task) => { rtdebug!("found a task locally"); return Some(task) } None => { rtdebug!("scheduler trying to steal"); return self.try_steals(); } } } else { // During execution of the last task, it performed a 'yield', // so we're doing some work stealing in order to introduce some // scheduling randomness. Otherwise we would just end up popping // that same task again. This is pretty lame and is to work around // the problem that work stealing is not designed for 'non-strict' // (non-fork-join) task parallelism. self.steal_for_yield = false; match self.try_steals() { Some(task) => { rtdebug!("stole a task after yielding"); return Some(task); } None => { rtdebug!("did not steal a task after yielding"); // Back to business return self.find_work(); } } } } // Try stealing from all queues the scheduler knows about. This // naive implementation can steal from our own queue or from other // special schedulers. fn try_steals(&mut self) -> Option<Box<GreenTask>> { let work_queues = &mut self.work_queues; let len = work_queues.len(); let start_index = self.rng.gen_range(0, len); for index in range(0, len).map(|i| (i + start_index) % len) { match work_queues.get_mut(index).steal() { deque::Data(task) => { rtdebug!("found task by stealing"); return Some(task) } _ => () } }; rtdebug!("giving up on stealing"); return None; } // * Task Routing Functions - Make sure tasks send up in the right // place. fn process_task(mut self: Box<Scheduler>, cur: Box<GreenTask>, mut next: Box<GreenTask>, schedule_fn: SchedulingFn) -> (Box<Scheduler>, Box<GreenTask>) { rtdebug!("processing a task"); match next.take_unwrap_home() { HomeSched(home_handle) => { if home_handle.sched_id != self.sched_id() { rtdebug!("sending task home"); next.give_home(HomeSched(home_handle)); Scheduler::send_task_home(next); (self, cur) } else { rtdebug!("running task here"); next.give_home(HomeSched(home_handle)); schedule_fn(self, cur, next) } } AnySched if self.run_anything => { rtdebug!("running anysched task here"); next.give_home(AnySched); schedule_fn(self, cur, next) } AnySched => { rtdebug!("sending task to friend"); next.give_home(AnySched); self.send_to_friend(next); (self, cur) } } } fn send_task_home(task: Box<GreenTask>) { let mut task = task; match task.take_unwrap_home() { HomeSched(mut home_handle) => home_handle.send(PinnedTask(task)), AnySched => rtabort!("error: cannot send anysched task home"), } } /// Take a non-homed task we aren't allowed to run here and send /// it to the designated friend scheduler to execute. fn send_to_friend(&mut self, task: Box<GreenTask>) { rtdebug!("sending a task to friend"); match self.friend_handle { Some(ref mut handle) => { handle.send(TaskFromFriend(task)); } None => { rtabort!("tried to send task to a friend but scheduler has no friends"); } } } /// Schedule a task to be executed later. /// /// Pushes the task onto the work stealing queue and tells the /// event loop to run it later. Always use this instead of pushing /// to the work queue directly. pub fn enqueue_task(&mut self, task: Box<GreenTask>) { // We push the task onto our local queue clone. assert!(!task.is_sched()); self.work_queue.push(task); match self.idle_callback { Some(ref mut idle) => idle.resume(), None => {} // allow enqueuing before the scheduler starts } // We've made work available. Notify a // sleeping scheduler. match self.sleeper_list.casual_pop() { Some(handle) => { let mut handle = handle; handle.send(Wake) } None => { (/* pass */) } }; } // * Core Context Switching Functions // The primary function for changing contexts. In the current // design the scheduler is just a slightly modified GreenTask, so // all context swaps are from GreenTask to GreenTask. The only difference // between the various cases is where the inputs come from, and // what is done with the resulting task. That is specified by the // cleanup function f, which takes the scheduler and the // old task as inputs. pub fn change_task_context(mut self: Box<Scheduler>, mut current_task: Box<GreenTask>, mut next_task: Box<GreenTask>, f: |&mut Scheduler, Box<GreenTask>|) -> Box<GreenTask> { let f_opaque = ClosureConverter::from_fn(f); let current_task_dupe = &mut *current_task as *mut GreenTask; // The current task is placed inside an enum with the cleanup // function. This enum is then placed inside the scheduler. self.cleanup_job = Some(CleanupJob::new(current_task, f_opaque)); // The scheduler is then placed inside the next task. next_task.sched = Some(self); // However we still need an internal mutable pointer to the // original task. The strategy here was "arrange memory, then // get pointers", so we crawl back up the chain using // transmute to eliminate borrowck errors. unsafe { let sched: &mut Scheduler = mem::transmute(&**next_task.sched.get_mut_ref()); let current_task: &mut GreenTask = match sched.cleanup_job { Some(CleanupJob { task: ref mut task, .. }) => &mut **task, None => rtabort!("no cleanup job") }; let (current_task_context, next_task_context) = Scheduler::get_contexts(current_task, &mut *next_task); // Done with everything - put the next task in TLS. This // works because due to transmute the borrow checker // believes that we have no internal pointers to // next_task. mem::forget(next_task); // The raw context swap operation. The next action taken // will be running the cleanup job from the context of the // next task. Context::swap(current_task_context, next_task_context); } // When the context swaps back to this task we immediately // run the cleanup job, as expected by the previously called // swap_contexts function. let mut current_task: Box<GreenTask> = unsafe { mem::transmute(current_task_dupe) }; current_task.sched.get_mut_ref().run_cleanup_job(); // See the comments in switch_running_tasks_and_then for why a lock // is acquired here. This is the resumption points and the "bounce" // that it is referring to. unsafe { let _guard = current_task.nasty_deschedule_lock.lock(); } return current_task; } // Returns a mutable reference to both contexts involved in this // swap. This is unsafe - we are getting mutable internal // references to keep even when we don't own the tasks. It looks // kinda safe because we are doing transmutes before passing in // the arguments. pub fn get_contexts<'a>(current_task: &mut GreenTask, next_task: &mut GreenTask) -> (&'a mut Context, &'a mut Context) { let current_task_context = &mut current_task.coroutine.get_mut_ref().saved_context; let next_task_context = &mut next_task.coroutine.get_mut_ref().saved_context; unsafe { (mem::transmute(current_task_context), mem::transmute(next_task_context)) } } // * Context Swapping Helpers - Here be ugliness! pub fn resume_task_immediately(self: Box<Scheduler>, cur: Box<GreenTask>, next: Box<GreenTask>) -> (Box<Scheduler>, Box<GreenTask>) { assert!(cur.is_sched()); let mut cur = self.change_task_context(cur, next, |sched, stask| { assert!(sched.sched_task.is_none()); sched.sched_task = Some(stask); }); (cur.sched.take().unwrap(), cur) } fn resume_task_immediately_cl(sched: Box<Scheduler>, cur: Box<GreenTask>, next: Box<GreenTask>) -> (Box<Scheduler>, Box<GreenTask>) { sched.resume_task_immediately(cur, next) } /// Block a running task, context switch to the scheduler, then pass the /// blocked task to a closure. /// /// # Safety note /// /// The closure here is a *stack* closure that lives in the /// running task. It gets transmuted to the scheduler's lifetime /// and called while the task is blocked. /// /// This passes a Scheduler pointer to the fn after the context switch /// in order to prevent that fn from performing further scheduling operations. /// Doing further scheduling could easily result in infinite recursion. /// /// Note that if the closure provided relinquishes ownership of the /// BlockedTask, then it is possible for the task to resume execution before /// the closure has finished executing. This would naturally introduce a /// race if the closure and task shared portions of the environment. /// /// This situation is currently prevented, or in other words it is /// guaranteed that this function will not return before the given closure /// has returned. pub fn deschedule_running_task_and_then(mut self: Box<Scheduler>, cur: Box<GreenTask>, f: |&mut Scheduler, BlockedTask|) { // Trickier - we need to get the scheduler task out of self // and use it as the destination. let stask = self.sched_task.take().unwrap(); // Otherwise this is the same as below. self.switch_running_tasks_and_then(cur, stask, f) } pub fn switch_running_tasks_and_then(self: Box<Scheduler>, cur: Box<GreenTask>, next: Box<GreenTask>, f: |&mut Scheduler, BlockedTask|) { // And here comes one of the sad moments in which a lock is used in a // core portion of the rust runtime. As always, this is highly // undesirable, so there's a good reason behind it. // // There is an excellent outline of the problem in issue #8132, and it's // summarized in that `f` is executed on a sched task, but its // environment is on the previous task. If `f` relinquishes ownership of // the BlockedTask, then it may introduce a race where `f` is using the // environment as well as the code after the 'deschedule' block. // // The solution we have chosen to adopt for now is to acquire a // task-local lock around this block. The resumption of the task in // context switching will bounce on the lock, thereby waiting for this // block to finish, eliminating the race mentioned above. // fail!("should never return!"); // // To actually maintain a handle to the lock, we use an unsafe pointer // to it, but we're guaranteed that the task won't exit until we've // unlocked the lock so there's no worry of this memory going away. let cur = self.change_task_context(cur, next, |sched, mut task| { let lock: *mut NativeMutex = &mut task.nasty_deschedule_lock; unsafe { let _guard = (*lock).lock(); f(sched, BlockedTask::block(task.swap())); } }); cur.put(); } fn switch_task(sched: Box<Scheduler>, cur: Box<GreenTask>, next: Box<GreenTask>) -> (Box<Scheduler>, Box<GreenTask>) { let mut cur = sched.change_task_context(cur, next, |sched, last_task| { if last_task.is_sched() { assert!(sched.sched_task.is_none()); sched.sched_task = Some(last_task); } else { sched.enqueue_task(last_task); } }); (cur.sched.take().unwrap(), cur) } // * Task Context Helpers /// Called by a running task to end execution, after which it will /// be recycled by the scheduler for reuse in a new task. pub fn terminate_current_task(mut self: Box<Scheduler>, cur: Box<GreenTask>) -> ! { // Similar to deschedule running task and then, but cannot go through // the task-blocking path. The task is already dying. let stask = self.sched_task.take().unwrap(); let _cur = self.change_task_context(cur, stask, |sched, mut dead_task| { let coroutine = dead_task.coroutine.take().unwrap(); coroutine.recycle(&mut sched.stack_pool); sched.task_state.decrement(); }); fail!("should never return!"); } pub fn run_task(self: Box<Scheduler>, cur: Box<GreenTask>, next: Box<GreenTask>) { let (sched, task) = self.process_task(cur, next, Scheduler::switch_task); task.put_with_sched(sched); } pub fn run_task_later(mut cur: Box<GreenTask>, next: Box<GreenTask>) { let mut sched = cur.sched.take().unwrap(); sched.enqueue_task(next); cur.put_with_sched(sched); } /// Yield control to the scheduler, executing another task. This is guaranteed /// to introduce some amount of randomness to the scheduler. Currently the /// randomness is a result of performing a round of work stealing (which /// may end up stealing from the current scheduler). pub fn yield_now(mut self: Box<Scheduler>, cur: Box<GreenTask>) { // Async handles trigger the scheduler by calling yield_now on the local // task, which eventually gets us to here. See comments in SchedRunner // for more info on this. if cur.is_sched() { assert!(self.sched_task.is_none()); self.run_sched_once(cur); } else { self.yield_check_count = reset_yield_check(&mut self.rng); // Tell the scheduler to start stealing on the next iteration self.steal_for_yield = true; let stask = self.sched_task.take().unwrap(); let cur = self.change_task_context(cur, stask, |sched, task| { sched.enqueue_task(task); }); cur.put() } } pub fn maybe_yield(mut self: Box<Scheduler>, cur: Box<GreenTask>) { // It's possible for sched tasks to possibly call this function, and it // just means that they're likely sending on channels (which // occasionally call this function). Sched tasks follow different paths // when executing yield_now(), which may possibly trip the assertion // below. For this reason, we just have sched tasks bail out soon. // // Sched tasks have no need to yield anyway because as soon as they // return they'll yield to other threads by falling back to the event // loop. Additionally, we completely control sched tasks, so we can make // sure that they never execute more than enough code. if cur.is_sched() { return cur.put_with_sched(self) } // The number of times to do the yield check before yielding, chosen // arbitrarily. rtassert!(self.yield_check_count > 0); self.yield_check_count -= 1; if self.yield_check_count == 0 { self.yield_now(cur); } else { cur.put_with_sched(self); } } // * Utility Functions pub fn sched_id(&self) -> uint { self as *const Scheduler as uint } pub fn run_cleanup_job(&mut self) { let cleanup_job = self.cleanup_job.take().unwrap(); cleanup_job.run(self) } pub fn make_handle(&mut self) -> SchedHandle { let remote = self.event_loop.remote_callback(box SchedRunner); return SchedHandle { remote: remote, queue: self.message_producer.clone(), sched_id: self.sched_id() } } } // Supporting types type SchedulingFn = fn(Box<Scheduler>, Box<GreenTask>, Box<GreenTask>) -> (Box<Scheduler>, Box<GreenTask>); pub enum SchedMessage { Wake, Shutdown, NewNeighbor(deque::Stealer<Box<GreenTask>>), PinnedTask(Box<GreenTask>), TaskFromFriend(Box<GreenTask>), RunOnce(Box<GreenTask>), } pub struct SchedHandle { remote: Box<RemoteCallback + Send>, queue: msgq::Producer<SchedMessage>, pub sched_id: uint } impl SchedHandle { pub fn send(&mut self, msg: SchedMessage) { self.queue.push(msg); self.remote.fire(); } } struct SchedRunner; impl Callback for SchedRunner { fn call(&mut self) { // In theory, this function needs to invoke the `run_sched_once` // function on the scheduler. Sadly, we have no context here, except for // knowledge of the local `Task`. In order to avoid a call to // `GreenTask::convert`, we just call `yield_now` and the scheduler will // detect when a sched task performs a yield vs a green task performing // a yield (and act accordingly). // // This function could be converted to `GreenTask::convert` if // absolutely necessary, but for cleanliness it is much better to not // use the conversion function. let task: Box<Task> = Local::take(); task.yield_now(); } } struct CleanupJob { task: Box<GreenTask>, f: UnsafeTaskReceiver } impl CleanupJob { pub fn new(task: Box<GreenTask>, f: UnsafeTaskReceiver) -> CleanupJob { CleanupJob { task: task, f: f } } pub fn run(self, sched: &mut Scheduler) { let CleanupJob { task: task, f: f } = self; f.to_fn()(sched, task) } } // FIXME: Some hacks to put a || closure in Scheduler without borrowck // complaining type UnsafeTaskReceiver = raw::Closure; trait ClosureConverter { fn from_fn(|&mut Scheduler, Box<GreenTask>|) -> Self; fn to_fn(self) -> |&mut Scheduler, Box<GreenTask>|:'static ; } impl ClosureConverter for UnsafeTaskReceiver { fn from_fn(f: |&mut Scheduler, Box<GreenTask>|) -> UnsafeTaskReceiver { unsafe { mem::transmute(f) } } fn to_fn(self) -> |&mut Scheduler, Box<GreenTask>|:'static { unsafe { mem::transmute(self) } } } // On unix, we read randomness straight from /dev/urandom, but the // default constructor of an XorShiftRng does this via io::fs, which // relies on the scheduler existing, so we have to manually load // randomness. Windows has its own C API for this, so we don't need to // worry there. #[cfg(windows)] fn new_sched_rng() -> XorShiftRng { use std::rand::OsRng; match OsRng::new() { Ok(mut r) => r.gen(), Err(e) => { rtabort!("sched: failed to create seeded RNG: {}", e) } } } #[cfg(unix)] fn new_sched_rng() -> XorShiftRng { use libc; use std::mem; use std::rand::SeedableRng; let fd = "/dev/urandom".with_c_str(|name| { unsafe { libc::open(name, libc::O_RDONLY, 0) } }); if fd == -1 { rtabort!("could not open /dev/urandom for reading.") } let mut seeds = [0u32, .. 4]; let size = mem::size_of_val(&seeds); loop { let nbytes = unsafe { libc::read(fd, seeds.as_mut_ptr() as *mut libc::c_void, size as libc::size_t) }; rtassert!(nbytes as uint == size); if !seeds.iter().all(|x| *x == 0) { break; } } unsafe {libc::close(fd);} SeedableRng::from_seed(seeds) } #[cfg(test)] mod test { use rustuv; use std::rt::task::TaskOpts; use std::rt::task::Task; use std::rt::local::Local; use std::time::Duration; use {TaskState, PoolConfig, SchedPool}; use basic; use sched::{TaskFromFriend, PinnedTask}; use task::{GreenTask, HomeSched, AnySched}; fn pool() -> SchedPool { SchedPool::new(PoolConfig { threads: 1, event_loop_factory: basic::event_loop, }) } fn run(f: proc():Send) { let mut pool = pool(); pool.spawn(TaskOpts::new(), f); pool.shutdown(); } fn sched_id() -> uint { let mut task = Local::borrow(None::<Task>); match task.maybe_take_runtime::<GreenTask>() { Some(green) => { let ret = green.sched.get_ref().sched_id(); task.put_runtime(green); return ret; } None => fail!() } } #[test] fn trivial_run_in_newsched_task_test() { let mut task_ran = false; let task_ran_ptr: *mut bool = &mut task_ran; run(proc() { unsafe { *task_ran_ptr = true }; rtdebug!("executed from the new scheduler") }); assert!(task_ran); } #[test] fn multiple_task_test() { let total = 10; let mut task_run_count = 0; let task_run_count_ptr: *mut uint = &mut task_run_count; // with only one thread this is safe to run in without worries of // contention. run(proc() { for _ in range(0u, total) { spawn(proc() { unsafe { *task_run_count_ptr = *task_run_count_ptr + 1}; }); } }); assert!(task_run_count == total); } #[test] fn multiple_task_nested_test() { let mut task_run_count = 0; let task_run_count_ptr: *mut uint = &mut task_run_count; run(proc() { spawn(proc() { unsafe { *task_run_count_ptr = *task_run_count_ptr + 1 }; spawn(proc() { unsafe { *task_run_count_ptr = *task_run_count_ptr + 1 }; spawn(proc() { unsafe { *task_run_count_ptr = *task_run_count_ptr + 1 }; }) }) }) }); assert!(task_run_count == 3); } // A very simple test that confirms that a task executing on the // home scheduler notices that it is home. #[test] fn test_home_sched() { let mut pool = pool(); let (dtx, drx) = channel(); { let (tx, rx) = channel(); let mut handle1 = pool.spawn_sched(); let mut handle2 = pool.spawn_sched(); handle1.send(TaskFromFriend(pool.task(TaskOpts::new(), proc() { tx.send(sched_id()); }))); let sched1_id = rx.recv(); let mut task = pool.task(TaskOpts::new(), proc() { assert_eq!(sched_id(), sched1_id); dtx.send(()); }); task.give_home(HomeSched(handle1)); handle2.send(TaskFromFriend(task)); } drx.recv(); pool.shutdown(); } // An advanced test that checks all four possible states that a // (task,sched) can be in regarding homes. #[test] fn test_schedule_home_states() { use sleeper_list::SleeperList; use super::{Shutdown, Scheduler, SchedHandle}; use std::rt::thread::Thread; use std::sync::deque::BufferPool; Thread::start(proc() { let sleepers = SleeperList::new(); let pool = BufferPool::new(); let (normal_worker, normal_stealer) = pool.deque(); let (special_worker, special_stealer) = pool.deque(); let queues = vec![normal_stealer, special_stealer]; let (_p, state) = TaskState::new(); // Our normal scheduler let mut normal_sched = box Scheduler::new( 1, basic::event_loop(), normal_worker, queues.clone(), sleepers.clone(), state.clone()); let normal_handle = normal_sched.make_handle(); let friend_handle = normal_sched.make_handle(); // Our special scheduler let mut special_sched = box Scheduler::new_special( 1, basic::event_loop(), special_worker, queues.clone(), sleepers.clone(), false, Some(friend_handle), state); let special_handle = special_sched.make_handle(); let t1_handle = special_sched.make_handle(); let t4_handle = special_sched.make_handle(); // Four test tasks: // 1) task is home on special // 2) task not homed, sched doesn't care // 3) task not homed, sched requeues // 4) task not home, send home // Grab both the scheduler and the task from TLS and check if the // task is executing on an appropriate scheduler. fn on_appropriate_sched() -> bool { use task::{TypeGreen, TypeSched, HomeSched}; let task = GreenTask::convert(Local::take()); let sched_id = task.sched.get_ref().sched_id(); let run_any = task.sched.get_ref().run_anything; let ret = match task.task_type { TypeGreen(Some(AnySched)) => { run_any } TypeGreen(Some(HomeSched(SchedHandle { sched_id: ref id, .. }))) => { *id == sched_id } TypeGreen(None) => { fail!("task without home"); } TypeSched => { fail!("expected green task"); } }; task.put(); ret } let task1 = GreenTask::new_homed(&mut special_sched.stack_pool, None, HomeSched(t1_handle), proc() { rtassert!(on_appropriate_sched()); }); let task2 = GreenTask::new(&mut normal_sched.stack_pool, None, proc() { rtassert!(on_appropriate_sched()); }); let task3 = GreenTask::new(&mut normal_sched.stack_pool, None, proc() { rtassert!(on_appropriate_sched()); }); let task4 = GreenTask::new_homed(&mut special_sched.stack_pool, None, HomeSched(t4_handle), proc() { rtassert!(on_appropriate_sched()); }); // Signal from the special task that we are done. let (tx, rx) = channel::<()>(); fn run(next: Box<GreenTask>) { let mut task = GreenTask::convert(Local::take()); let sched = task.sched.take().unwrap(); sched.run_task(task, next) } let normal_task = GreenTask::new(&mut normal_sched.stack_pool, None, proc() { run(task2); run(task4); rx.recv(); let mut nh = normal_handle; nh.send(Shutdown); let mut sh = special_handle; sh.send(Shutdown); }); normal_sched.enqueue_task(normal_task); let special_task = GreenTask::new(&mut special_sched.stack_pool, None, proc() { run(task1); run(task3); tx.send(()); }); special_sched.enqueue_task(special_task); let normal_sched = normal_sched; let normal_thread = Thread::start(proc() { normal_sched.bootstrap() }); let special_sched = special_sched; let special_thread = Thread::start(proc() { special_sched.bootstrap() }); normal_thread.join(); special_thread.join(); }).join(); } //#[test] //fn test_stress_schedule_task_states() { // if util::limit_thread_creation_due_to_osx_and_valgrind() { return; } // let n = stress_factor() * 120; // for _ in range(0, n as int) { // test_schedule_home_states(); // } //} #[test] fn test_io_callback() { use std::io::timer; let mut pool = SchedPool::new(PoolConfig {<|fim▁hole|> // This is a regression test that when there are no schedulable tasks in // the work queue, but we are performing I/O, that once we do put // something in the work queue again the scheduler picks it up and // doesn't exit before emptying the work queue pool.spawn(TaskOpts::new(), proc() { spawn(proc() { timer::sleep(Duration::milliseconds(10)); }); }); pool.shutdown(); } #[test] fn wakeup_across_scheds() { let (tx1, rx1) = channel(); let (tx2, rx2) = channel(); let mut pool1 = pool(); let mut pool2 = pool(); pool1.spawn(TaskOpts::new(), proc() { let id = sched_id(); tx1.send(()); rx2.recv(); assert_eq!(id, sched_id()); }); pool2.spawn(TaskOpts::new(), proc() { let id = sched_id(); rx1.recv(); assert_eq!(id, sched_id()); tx2.send(()); }); pool1.shutdown(); pool2.shutdown(); } // A regression test that the final message is always handled. // Used to deadlock because Shutdown was never recvd. #[test] fn no_missed_messages() { let mut pool = pool(); let task = pool.task(TaskOpts::new(), proc()()); pool.spawn_sched().send(TaskFromFriend(task)); pool.shutdown(); } #[test] fn multithreading() { run(proc() { let mut rxs = vec![]; for _ in range(0u, 10) { let (tx, rx) = channel(); spawn(proc() { tx.send(()); }); rxs.push(rx); } loop { match rxs.pop() { Some(rx) => rx.recv(), None => break, } } }); } #[test] fn thread_ring() { run(proc() { let (end_tx, end_rx) = channel(); let n_tasks = 10; let token = 2000; let (tx1, mut rx) = channel(); tx1.send((token, end_tx)); let mut i = 2; while i <= n_tasks { let (tx, next_rx) = channel(); let imm_i = i; let imm_rx = rx; spawn(proc() { roundtrip(imm_i, n_tasks, &imm_rx, &tx); }); rx = next_rx; i += 1; } let rx = rx; spawn(proc() { roundtrip(1, n_tasks, &rx, &tx1); }); end_rx.recv(); }); fn roundtrip(id: int, n_tasks: int, rx: &Receiver<(int, Sender<()>)>, tx: &Sender<(int, Sender<()>)>) { loop { match rx.recv() { (1, end_tx) => { debug!("{}\n", id); end_tx.send(()); return; } (token, end_tx) => { debug!("thread: {} got token: {}", id, token); tx.send((token - 1, end_tx)); if token <= n_tasks { return; } } } } } } #[test] fn start_closure_dtor() { // Regression test that the `start` task entrypoint can // contain dtors that use task resources run(proc() { struct S { field: () } impl Drop for S { fn drop(&mut self) { let _foo = box 0i; } } let s = S { field: () }; spawn(proc() { let _ss = &s; }); }); } #[test] fn dont_starve_1() { let mut pool = SchedPool::new(PoolConfig { threads: 2, // this must be > 1 event_loop_factory: basic::event_loop, }); pool.spawn(TaskOpts::new(), proc() { let (tx, rx) = channel(); // This task should not be able to starve the sender; // The sender should get stolen to another thread. spawn(proc() { while rx.try_recv().is_err() { } }); tx.send(()); }); pool.shutdown(); } #[test] fn dont_starve_2() { run(proc() { let (tx1, rx1) = channel(); let (tx2, _rx2) = channel(); // This task should not be able to starve the other task. // The sends should eventually yield. spawn(proc() { while rx1.try_recv().is_err() { tx2.send(()); } }); tx1.send(()); }); } // Regression test for a logic bug that would cause single-threaded // schedulers to sleep forever after yielding and stealing another task. #[test] fn single_threaded_yield() { use std::task::deschedule; run(proc() { for _ in range(0u, 5) { deschedule(); } }); } #[test] fn test_spawn_sched_blocking() { use std::rt::mutex::{StaticNativeMutex, NATIVE_MUTEX_INIT}; static mut LOCK: StaticNativeMutex = NATIVE_MUTEX_INIT; // Testing that a task in one scheduler can block in foreign code // without affecting other schedulers for _ in range(0u, 20) { let mut pool = pool(); let (start_tx, start_rx) = channel(); let (fin_tx, fin_rx) = channel(); let mut handle = pool.spawn_sched(); handle.send(PinnedTask(pool.task(TaskOpts::new(), proc() { unsafe { let guard = LOCK.lock(); start_tx.send(()); guard.wait(); // block the scheduler thread guard.signal(); // let them know we have the lock } fin_tx.send(()); }))); drop(handle); let mut handle = pool.spawn_sched(); handle.send(PinnedTask(pool.task(TaskOpts::new(), proc() { // Wait until the other task has its lock start_rx.recv(); fn pingpong(po: &Receiver<int>, ch: &Sender<int>) { let mut val = 20; while val > 0 { val = po.recv(); let _ = ch.send_opt(val - 1); } } let (setup_tx, setup_rx) = channel(); let (parent_tx, parent_rx) = channel(); spawn(proc() { let (child_tx, child_rx) = channel(); setup_tx.send(child_tx); pingpong(&child_rx, &parent_tx); }); let child_tx = setup_rx.recv(); child_tx.send(20); pingpong(&parent_rx, &child_tx); unsafe { let guard = LOCK.lock(); guard.signal(); // wakeup waiting scheduler guard.wait(); // wait for them to grab the lock } }))); drop(handle); fin_rx.recv(); pool.shutdown(); } unsafe { LOCK.destroy(); } } }<|fim▁end|>
threads: 2, event_loop_factory: rustuv::event_loop, });
<|file_name|>prepare.js<|end_file_name|><|fim▁begin|>// list of points object, map_type dependant // fields: // .pt_index // .pt var river_points = []; var current_parcours_id; var current_deb; // debarquement (1) ou embarquement (0) var current_river_obj; /* Creates a point object */ function Point(lat,lon,previous_pt) { this.lat = lat; // latitude in decimal degrees this.lon = lon; // longitude in decimal degrees //[optional] ele : elevation in m //[optional] elelbl : elevation label if(typeof(previous_pt)=='undefined') { this.dist = 0.0; } else { //console.log('plat='+previous_pt.lat); this.dist = previous_pt.dist + geodeticDist(previous_pt.lat,previous_pt.lon,lat,lon); } //console.log('thisdist='+this.dist); } function getRiver(name) { $.getJSON('/river/'+name, loadRiverToMap).fail(function(err){$("#svrresponse").html(err.responseText);}); } function nextRiver(name) { console.log('nextRiver'); } function prevRiver(name) { console.log('prevRiver'); } function setEmbDeb(elem,river,parcours_id,deb) { $(".emb_deb").css("color","black"); elem.style.color = 'red'; current_parcours_id=parcours_id; current_deb=deb; console.log('setEmbDeb(%s,%d,%d)',river,parcours_id,deb); } function setEmb(elem,river,parcours_id) { setEmbDeb(elem,river,parcours_id,0); } function setDeb(elem,river,parcours_id) { setEmbDeb(elem,river,parcours_id,1); } function save() { $.get('/flush',function(){$("#svrresponse").html('OK');}).fail(function(err){$("#svrresponse").html(err.responseText);}); } function search_river(evt,river_name) { if(evt.keyCode==13) { // Enter getRiver(river_name); } else if(evt.keyCode==40) { // Down nextRiver(river_name); } else if(evt.keyCode==38) { // Up prevRiver(river_name); } } function selPathsChange() { var i; var sel=$('input[name=paths_sel]'); for(i=0;i<sel.length;i++) { toogleRiverPath(i,sel[i].checked); } } function splitPaths() { if ($('#split_btn').attr('value')==='Apply split') { var i; var sel=$('input[name=paths_sel]'); var names=[]; for(i=0;i<sel.length;i++) { names.push($('#path'+i+'_name').val()); } console.log("names=%o",names); $.getJSON('/split_paths/'+current_river_obj._id+'/'+names.join('^'), function(river_obj){ $("#svrresponse").html('OK'); clearMapObjects(); $("#river_name_input").attr('value',''); }).fail(function(err){$("#svrresponse").html(err.responseText);}); } else { var i; var sel=$('input[name=paths_sel]'); var html=''; for(i=0;i<sel.length;i++) { html += 'Path'+i+'<input type="text" id="path'+i+'_name" name="path'+i+'_name" value="'+current_river_obj._id+'"/>'; } $('#split_btn').attr('value','Apply split'); $('#split_names').html(html); } } function removeUnselectedPaths() { var i; var sel=$('input[name=paths_sel]'); to_remove_list = []; for(i=0;i<sel.length;i++) { if (!sel[i].checked) { to_remove_list.push(i); } } $.getJSON('/remove_paths/'+current_river_obj._id+'/'+to_remove_list.join(','), function(river_obj){ $("#svrresponse").html('OK'); loadRiverToMap(river_obj); }).fail(function(err){$("#svrresponse").html(err.responseText);}); } function mergeSelectedPaths() { var i; var sel=$('input[name=paths_sel]'); var to_merge_list = []; for(i=0;i<sel.length;i++) { if (sel[i].checked) { to_merge_list.push(i); } } console.log("to_merge_list=%o",to_merge_list); if(to_merge_list.length!=2) { $("#svrresponse").html('Error: Can only merge two paths'); } else { // Try to merge consecutive paths var d1 = geodeticDist(current_river_obj.osm.paths[to_merge_list[0]][0][0], current_river_obj.osm.paths[to_merge_list[0]][0][1], current_river_obj.osm.paths[to_merge_list[1]][current_river_obj.osm.paths[to_merge_list[1]].length-1][0], current_river_obj.osm.paths[to_merge_list[1]][current_river_obj.osm.paths[to_merge_list[1]].length-1][1]); var d2 = geodeticDist(current_river_obj.osm.paths[to_merge_list[1]][0][0], current_river_obj.osm.paths[to_merge_list[1]][0][1], current_river_obj.osm.paths[to_merge_list[0]][current_river_obj.osm.paths[to_merge_list[0]].length-1][0], current_river_obj.osm.paths[to_merge_list[0]][current_river_obj.osm.paths[to_merge_list[0]].length-1][1]); console.log('d1='+d1+' d2='+d2); if ((d1>1000)&&(d2>1000)) { $("#svrresponse").html('Error: Paths are more than 1km one from another'); } else { if (d1<d2) { $.getJSON('/merge_paths_a_after_b/'+current_river_obj._id+'/'+to_merge_list[0]+'/'+to_merge_list[1], function(river_obj){ $("#svrresponse").html('OK'); loadRiverToMap(river_obj); }).fail(function(err){$("#svrresponse").html(err.responseText);}); } else { $.getJSON('/merge_paths_a_after_b/'+current_river_obj._id+'/'+to_merge_list[1]+'/'+to_merge_list[0], function(river_obj){ $("#svrresponse").html('OK'); loadRiverToMap(river_obj); }).fail(function(err){$("#svrresponse").html(err.responseText);}); } } } /* var j; var to_merge_commands = []; while(1) { // Get closest paths var min_d = 1000; var min_a = -1; var min_b = -1; for(i=0;i<to_merge_list.length;i++) { for(j=0;j<to_merge_list.length;j++) { console.log("%o %o %o",current_river_obj.osm.paths,to_merge_list,i); //to_merge_list is bijective if ((i!=j)&&(typeof(current_river_obj.osm.paths[to_merge_list[i]]!=='undefined'))&&(typeof(current_river_obj.osm.paths[to_merge_list[j]]!=='undefined'))) { var d = geodeticDist(current_river_obj.osm.paths[to_merge_list[i]][0][0], current_river_obj.osm.paths[to_merge_list[i]][0][1], current_river_obj.osm.paths[to_merge_list[j]][current_river_obj.osm.paths[to_merge_list[j]].length-1][0], current_river_obj.osm.paths[to_merge_list[j]][current_river_obj.osm.paths[to_merge_list[j]].length-1][1]); if (d<min_d) { min_d = d; min_a = to_merge_list[i]; min_b = to_merge_list[j]; } } } } if (min_d<1000) { current_river_obj.osm.paths[min_b].push.apply(current_river_obj.osm.paths[min_b], current_river_obj.osm.paths[min_a]); delete current_river_obj.osm.paths[min_a]; to_merge_commands.push([min_a,min_b]); to_merge_list.splice( } else { break; } } var nbpaths_of_to_merge_list = 0; for(i=0;i<nbpaths_of_to_merge_list.length;i++) { if (typeof(current_river_obj.osm.paths[to_merge_list[i]]!=='undefined')) { nbpaths_of_to_merge_list += 1; } } if (nbpaths_of_to_merge_list==1) { console.log(to_merge_commands); } else { $("#svrresponse").html('Cannot find consecutive paths in selected list'); }*/ } function routeCkFiumi2Html(r) { return '<a href="'+r.src_url+'" target="_blank">' + r.name + '</a> (' + r.wwgrade + ',' + r.length + 'km,' + r.duration + 'h)' + '<ul><li><b>Start:</b> ' + r.start + '</li><li><b>End:</b> ' + r.end + '</li></ul>'; } function dispDist(d) {<|fim▁hole|> } else return Math.round(d) + '&nbsp;m'; } function loadRiverToMap(river_obj) { console.log(river_obj); clearMapObjects(); current_river_obj = river_obj; // Names $("#river_name_evo").html('name_evo' in river_obj?(river_obj['name_evo']+' ('+river_obj.evo.length+')'):'/'); $("#river_name_rivermap").html('name_rivermap' in river_obj?(river_obj['name_rivermap']+' ('+river_obj.rivermap.length+')'):'/'); $("#river_name_ckfiumi").html('name_ckfiumi' in river_obj?(river_obj['name_ckfiumi']+' ('+river_obj.ckfiumi.length+')'):'/'); // OSM river_points = []; var i; var j; var pts=[]; var pathshtml=''; for(i=0;i<river_obj.osm.paths.length;i++) { pathshtml += ' <span ondblclick="zoomToPath('+i+');" onmouseover="highlightPath(this,'+i+');" onmouseout="unhighlightPath(this,'+i+');"><input type="checkbox" name="paths_sel" value="path'+i+'" onclick="selPathsChange();" checked> Path #'+i+'</span>'; pts[i]=[]; var lg = 0.0; for(j=0;j<river_obj.osm.paths[i].length;j++) { pts[i][j] = {"lat": river_obj.osm.paths[i][j][0], "lng": river_obj.osm.paths[i][j][1], "lon": river_obj.osm.paths[i][j][1]}; if (j<river_obj.osm.paths[i].length-1) { lg += geodeticDist(river_obj.osm.paths[i][j][0],river_obj.osm.paths[i][j][1],river_obj.osm.paths[i][j+1][0],river_obj.osm.paths[i][j+1][1]); } } pathshtml += ' (' + dispDist(lg) + ')'; } pathshtml += '<input type="button" value="Merge selected" onclick="mergeSelectedPaths();"> <input type="button" value="Remove unselected" onclick="removeUnselectedPaths();"> <input type="button" id="split_btn" value="Split all" onclick="splitPaths();"><span id="split_names"></span>'; $("#paths").html(pathshtml); addRiverPaths(pts); // Evo if ("evo" in river_obj) { var html=''; for(i=0;i<river_obj.evo.length;i++) { html += '<h2>EVO</h2><span class="source">(<a href="'+river_obj.evo[i].src_url+'" target="_blank">source</a>)</span>'; if ("presentation" in river_obj.evo[i]) { html += '<h3>Presentation</h3><p>'+river_obj.evo[i].presentation+'</p>'; } if ("parcours" in river_obj.evo[i]) { var k=-1; html += '<h3>Parcours</h3><ul><li>'+river_obj.evo[i].parcours.map(function(p) { k++; return p.name + ' (' + p.cotation + ',' + p.duree + ')' + '<ul><li onClick="setEmb(this,\''+river_obj['name']+'\','+k+');" class="emb_deb">Emb:'+p.embarquement+'</li><li onClick="setDeb(this,\''+river_obj['name']+'\','+k+');" class="emb_deb">Deb:'+p.debarquement+'</li></ul>'; }).join('</li><li>')+'</li></ul>' } } $("#evo").html(html); } // RiverMap if ("rivermap" in river_obj) { var html=''; for(i=0;i<river_obj.rivermap.length;i++) { if ("routes_rivermap" in river_obj.rivermap[i]) { var k=-1; html += '<h2>RiverMap</h2><ul><li>'+river_obj.rivermap[i].routes_rivermap.map(function(p) { k++; return ''+k+': ' +p.name + ': ' + p.length + ' km '+p.ww_class }).join('</li><li>')+'</li></ul>' for(k=0;k<river_obj.rivermap[i].routes_rivermap.length;k++) { addPointRivermap(river_obj.rivermap[i].routes_rivermap[k].start,'S'+k); addPointRivermap(river_obj.rivermap[i].routes_rivermap[k].end,'E'+k); } } } $("#rivermap").html(html); } // CKFiumi if ("ckfiumi" in river_obj) { var html=''; for(i=0;i<river_obj.ckfiumi.length;i++) { html += '<h2>CKFiumi</h2>'; if ("regions" in river_obj.ckfiumi[i]) { html += river_obj.ckfiumi[i].regions + ' '; } if ("provinces" in river_obj.ckfiumi[i]) { html += river_obj.ckfiumi[i].provinces; } if ("routes_ckfiumi" in river_obj.ckfiumi[i]) { html += '<h3>Routes</h3><ul><li>'+river_obj.ckfiumi[i].routes_ckfiumi.map(routeCkFiumi2Html).join('</li><li>')+'</li></ul>'; } } $("#ckfiumi").html(html); } } function lg2pt(evt,lg) { if(evt.keyCode==13) { // Enter computelg2pt(parseInt(lg)); } } function computelg2pt(lg) { if(current_river_obj.osm.paths.length!=1) { $("#lg2pterror").html("Valid only if one path"); } else { var i=0; var l=0.0; while(l<lg) { if (i+2 > current_river_obj.osm.paths[0].length) { $("#lg2pterror").html("Length bigger than river"); return; } l += geodeticDist(current_river_obj.osm.paths[0][i][0],current_river_obj.osm.paths[0][i][1],current_river_obj.osm.paths[0][i+1][0],current_river_obj.osm.paths[0][i+1][1]); i++; } console.log(current_river_obj.osm.paths[0][i]); addPoint(new Point(current_river_obj.osm.paths[0][i][0],current_river_obj.osm.paths[0][i][1])); } }<|fim▁end|>
if (d>1000.0) { return Math.round(d/1000.0) + '&nbsp;km';
<|file_name|>ShowFileTransfersDialogCommand.cpp<|end_file_name|><|fim▁begin|>/* UOL Messenger * Copyright (c) 2005 Universo Online S/A * * Direitos Autorais Reservados * All rights reserved * * Este programa é software livre; você pode redistribuí-lo e/ou modificá-lo * sob os termos da Licença Pública Geral GNU conforme publicada pela Free * Software Foundation; tanto a versão 2 da Licença, como (a seu critério) * qualquer versão posterior. * Este programa é distribuído na expectativa de que seja útil, porém, * SEM NENHUMA GARANTIA; nem mesmo a garantia implícita de COMERCIABILIDADE * OU ADEQUAÇÃO A UMA FINALIDADE ESPECÍFICA. Consulte a Licença Pública Geral * do GNU para mais detalhes. * Você deve ter recebido uma cópia da Licença Pública Geral do GNU junto * com este programa; se não, escreva para a Free Software Foundation, Inc., * no endereço 59 Temple Street, Suite 330, Boston, MA 02111-1307 USA. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * Universo Online S/A - A/C: UOL Messenger 5o. Andar * Avenida Brigadeiro Faria Lima, 1.384 - Jardim Paulistano * São Paulo SP - CEP 01452-002 - BRASIL */ #include "StdAfx.h" #include <commands/ShowFileTransfersDialogcommand.h> #include "../UIMApplication.h" CShowFileTransfersDialogCommand::CShowFileTransfersDialogCommand() { } CShowFileTransfersDialogCommand::~CShowFileTransfersDialogCommand(void) { } <|fim▁hole|>}<|fim▁end|>
void CShowFileTransfersDialogCommand::Execute() { CUIMApplication::GetApplication()->GetUIManager()->ShowFileTransferDialog(NULL);
<|file_name|>replication_controller_utils.go<|end_file_name|><|fim▁begin|>/* Copyright 2015 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // If you make changes to this file, you should also make the corresponding change in ReplicaSet. package replication import ( "github.com/golang/glog" "k8s.io/kubernetes/pkg/api" client "k8s.io/kubernetes/pkg/client/unversioned" ) // updateReplicaCount attempts to update the Status.Replicas of the given controller, with a single GET/PUT retry. func updateReplicaCount(rcClient client.ReplicationControllerInterface, controller api.ReplicationController, numReplicas int) (updateErr error) { // This is the steady state. It happens when the rc doesn't have any expectations, since // we do a periodic relist every 30s. If the generations differ but the replicas are // the same, a caller might've resized to the same replica count. if controller.Status.Replicas == numReplicas && controller.Generation == controller.Status.ObservedGeneration { return nil } // Save the generation number we acted on, otherwise we might wrongfully indicate // that we've seen a spec update when we retry. // TODO: This can clobber an update if we allow multiple agents to write to the // same status.<|fim▁hole|> generation := controller.Generation var getErr error for i, rc := 0, &controller; ; i++ { glog.V(4).Infof("Updating replica count for rc: %v, %d->%d (need %d), sequence No: %v->%v", controller.Name, controller.Status.Replicas, numReplicas, controller.Spec.Replicas, controller.Status.ObservedGeneration, generation) rc.Status = api.ReplicationControllerStatus{Replicas: numReplicas, ObservedGeneration: generation} _, updateErr = rcClient.UpdateStatus(rc) if updateErr == nil || i >= statusUpdateRetries { return updateErr } // Update the controller with the latest resource version for the next poll if rc, getErr = rcClient.Get(controller.Name); getErr != nil { // If the GET fails we can't trust status.Replicas anymore. This error // is bound to be more interesting than the update failure. return getErr } } } // OverlappingControllers sorts a list of controllers by creation timestamp, using their names as a tie breaker. type OverlappingControllers []api.ReplicationController func (o OverlappingControllers) Len() int { return len(o) } func (o OverlappingControllers) Swap(i, j int) { o[i], o[j] = o[j], o[i] } func (o OverlappingControllers) Less(i, j int) bool { if o[i].CreationTimestamp.Equal(o[j].CreationTimestamp) { return o[i].Name < o[j].Name } return o[i].CreationTimestamp.Before(o[j].CreationTimestamp) }<|fim▁end|>
<|file_name|>capturegame.py<|end_file_name|><|fim▁begin|>__author__ = 'Tom Schaul, [email protected]' from random import choice from scipy import zeros from twoplayergame import TwoPlayerGame # TODO: undo operation class CaptureGame(TwoPlayerGame): """ the capture game is a simplified version of the Go game: the first player to capture a stone wins! Pass moves are forbidden.""" # CHECKME: suicide allowed? BLACK = 1 WHITE = -1 EMPTY = 0 startcolor = BLACK def __init__(self, size, suicideenabled = True): """ the size of the board is generally between 3 and 19. """ self.size = size self.suicideenabled = suicideenabled self.reset() def _iterPos(self): """ an iterator over all the positions of the board. """ for i in range(self.size): for j in range(self.size): yield (i,j) def reset(self): """ empty the board. """ TwoPlayerGame.reset(self) self.movesDone = 0 self.b = {} for p in self._iterPos(): self.b[p] = self.EMPTY # which stone belongs to which group self.groups = {} # how many liberties does each group have self.liberties = {} @property def indim(self): return self.size**2 @property def outdim(self): return 2*self.size**2 def getBoardArray(self): """ an array with thow boolean values per position, indicating 'white stone present' and 'black stone present' respectively. """ a = zeros(self.outdim) for i, p in enumerate(self._iterPos()): if self.b[p] == self.WHITE: a[2*i] = 1 elif self.b[p] == self.BLACK: a[2*i+1] = 1 return a def isLegal(self, c, pos): if pos not in self.b: return False elif self.b[pos] != self.EMPTY: return False elif not self.suicideenabled: return not self._suicide(c, pos) return True def doMove(self, c, pos): """ the action is a (color, position) tuple, for the next stone to move. returns True if the move was legal. """ self.movesDone += 1 if pos == 'resign': self.winner = -c return True elif not self.isLegal(c, pos): return False elif self._suicide(c, pos): assert self.suicideenabled self.b[pos] = 'y' self.winner = -c return True elif self._capture(c, pos): self.winner = c self.b[pos] = 'x' return True else: self._setStone(c, pos) return True def getSensors(self): """ just a list of the board position states. """ return map(lambda x: x[1], sorted(self.b.items())) def __str__(self): s = '' for i in range(self.size): for j in range(self.size): val = self.b[(i,j)] if val == self.EMPTY: s += ' .' elif val == self.BLACK: s += ' X' elif val == self.WHITE: s += ' O' else: s += ' '+str(val) s += '\n' if self.winner: if self.winner == self.BLACK: w = 'Black (#)' elif self.winner == self.WHITE: w = 'White (*)' else: w = self.winner s += 'Winner: '+w s += ' (moves done:'+str(self.movesDone)+')\n' return s def _neighbors(self, pos): """ the 4 neighboring positions """ res = [] if pos[1] < self.size -1: res.append((pos[0], pos[1]+1)) if pos[1] > 0: res.append((pos[0], pos[1]-1)) if pos[0] < self.size -1: res.append((pos[0]+1, pos[1])) if pos[0] > 0: res.append((pos[0]-1, pos[1])) return res def _setStone(self, c, pos): """ set stone, and update liberties and groups. """ self.b[pos] = c merge = False self.groups[pos] = self.size*pos[0]+pos[1] freen = filter(lambda n: self.b[n] == self.EMPTY, self._neighbors(pos)) self.liberties[self.groups[pos]] = set(freen) for n in self._neighbors(pos): if self.b[n] == -c: self.liberties[self.groups[n]].difference_update([pos]) elif self.b[n] == c: if merge: newg = self.groups[pos] oldg = self.groups[n]<|fim▁hole|> else: # merging 2 groups for p in self.groups.keys(): if self.groups[p] == oldg: self.groups[p] = newg self.liberties[newg].update(self.liberties[oldg]) self.liberties[newg].difference_update([pos]) del self.liberties[oldg] else: # connect to this group del self.liberties[self.groups[pos]] self.groups[pos] = self.groups[n] self.liberties[self.groups[n]].update(freen) self.liberties[self.groups[n]].difference_update([pos]) merge = True def _suicide(self, c, pos): """ would putting a stone here be suicide for c? """ # any free neighbors? for n in self._neighbors(pos): if self.b[n] == self.EMPTY: return False # any friendly neighbor with extra liberties? for n in self._neighbors(pos): if self.b[n] == c: if len(self.liberties[self.groups[n]]) > 1: return False # capture all surrounding ennemies? if self._capture(c, pos): return False return True def _capture(self, c, pos): """ would putting a stone here lead to a capture? """ for n in self._neighbors(pos): if self.b[n] == -c: if len(self.liberties[self.groups[n]]) == 1: return True return False def getLiberties(self, pos): """ how many liberties does the stone at pos have? """ if self.b[pos] == self.EMPTY: return None return len(self.liberties[self.groups[pos]]) def getGroupSize(self, pos): """ what size is the worm that this stone is part of? """ if self.b[pos] == self.EMPTY: return None g = self.groups[pos] return len(filter(lambda x: x==g, self.groups.values())) def getLegals(self, c): """ return all the legal positions for a color """ return filter(lambda p: self.b[p] == self.EMPTY, self._iterPos()) def getAcceptable(self, c): """ return all legal positions for a color that don't commit suicide. """ return filter(lambda p: not self._suicide(c, p), self.getLegals(c)) def getKilling(self, c): """ return all legal positions for a color that immediately kill the opponent. """ return filter(lambda p: self._capture(c, p), self.getAcceptable(c)) def randomBoard(self, nbmoves): """ produce a random, undecided and legal capture-game board, after at most nbmoves. @return: the number of moves actually done. """ c = self.BLACK self.reset() for i in range(nbmoves): l = set(self.getAcceptable(c)) l.difference_update(self.getKilling(c)) if len(l) == 0: return i self._setStone(c, choice(list(l))) c = -c return nbmoves def giveHandicap(self, h, color = BLACK): i = 0 for pos in self._handicapIterator(): i += 1 if i > h: return if self.isLegal(color, pos): self._setStone(color, pos) def _handicapIterator(self): s = self.size assert s > 2 yield (1,1) if s > 3: # 4 corners yield (s-2, s-2) yield (1, s-2) yield (s-2, 1) if s > 4: for i in range(2,s-2): yield (i, 1) yield (i, s-2) yield (1, i) yield (s-2, i) def playToTheEnd(self, p1, p2): """ alternate playing moves between players until the game is over. """ assert p1.color == -p2.color i = 0 p1.game = self p2.game = self players = [p1, p2] while not self.gameOver(): p = players[i] self.performAction(p.getAction()) i = (i+1)%2<|fim▁end|>
if newg == oldg: self.liberties[newg].difference_update([pos])