prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>bucketSort.cpp<|end_file_name|><|fim▁begin|>#include<iostream>
using namespace std;
int a[]={1,255,8,6,25,47,14,35,58,75,96,158,657};
const int len = sizeof(a)/sizeof(int);
int b[10][len+1] = { 0 }; //将b全部置0
void bucketSort(int a[]);//桶排序函数
void distributeElments(int a[],int b[10][len+1],int digits);
void collectElments(int a[],int b[10][len+1]);
int numOfDigits(int a[]);<|fim▁hole|>int main()
{
cout<<"原始数组:";
for(int i=0;i<len;i++)
cout<<a[i]<<",";
cout<<endl;
bucketSort(a);
cout<<"排序后数组:";
for(int i=0;i<len;i++)
cout<<a[i]<<",";
cout<<endl;
return 0;
}
void bucketSort(int a[])
{
int digits=numOfDigits(a);
for(int i=1;i<=digits;i++)
{
distributeElments(a,b,i);
collectElments(a,b);
if(i!=digits)
zeroBucket(b);
}
}
int numOfDigits(int a[])
{
int largest=0;
for(int i=0;i<len;i++)//获取最大值
if(a[i]>largest)
largest=a[i];
int digits=0;//digits为最大值的位数
while(largest)
{
digits++;
largest/=10;
}
return digits;
}
void distributeElments(int a[],int b[10][len+1],int digits)
{
int divisor=10;//除数
for(int i=1;i<digits;i++)
divisor*=10;
for(int j=0;j<len;j++)
{
int numOfDigist=(a[j]%divisor-a[j]%(divisor/10))/(divisor/10);
//numOfDigits为相应的(divisor/10)位的值,如当divisor=10时,求的是个位数
int num=++b[numOfDigist][0];//用b中第一列的元素来储存每行中元素的个数
b[numOfDigist][num]=a[j];
}
}
void collectElments(int a[],int b[10][len+1])
{
int k=0;
for(int i=0;i<10;i++)
for(int j=1;j<=b[i][0];j++)
a[k++]=b[i][j];
}
void zeroBucket(int b[][len+1])
{
for(int i=0;i<10;i++)
for(int j=0;j<len+1;j++)
b[i][j]=0;
}<|fim▁end|>
|
void zeroBucket(int b[10][len+1]);//将b数组中的全部元素置0
|
<|file_name|>box.js<|end_file_name|><|fim▁begin|>// closure to avoid namespace collision
(function(){
//-------
var html_form = '<div id="text_box-form">\
<p class="popup_submit_wrap"><span class="wpp_helper_box"><a onclick="open_win(\'http://www.youtube.com/watch?v=Y_7snOfYato&list=PLI8Gq0WzVWvJ60avoe8rMyfoV5qZr3Atm&index=10\')">Видео урок</a></span><input type="button" id="text_box_submit" class="button-primary" value="Вставить" name="submit" /></p>\
<div class="ps_text_box_form coach_box">\
<div>\
<label class="ps_text_box wppage_checkbox ps_text_box_1"><input type="radio" name="text_box_style" value="1" /></label>\
<label class="ps_text_box wppage_checkbox ps_text_box_1_1"><input type="radio" name="text_box_style" value="1_1" /></label>\
<label class="ps_text_box wppage_checkbox ps_text_box_2"><input type="radio" name="text_box_style" value="2" /></label>\
<label class="ps_text_box wppage_checkbox ps_text_box_2_1"><input type="radio" name="text_box_style" value="2_1" /></label>\
<label class="ps_text_box wppage_checkbox ps_text_box_3"><input type="radio" name="text_box_style" value="3" /></label>\
<label class="ps_text_box wppage_checkbox ps_text_box_3_1"><input type="radio" name="text_box_style" value="3_1" /></label>\
<label class="ps_text_box wppage_checkbox ps_text_box_4"><input type="radio" name="text_box_style" value="4" /></label>\
<label class="ps_text_box wppage_checkbox ps_text_box_4_1"><input type="radio" name="text_box_style" value="4_1" /></label>\
<label class="ps_text_box wppage_checkbox ps_text_box_5"><input type="radio" name="text_box_style" value="5" /></label>\
<label class="ps_text_box wppage_checkbox ps_text_box_5_1"><input type="radio" name="text_box_style" value="5_1" /></label>\
<label class="ps_text_box wppage_checkbox ps_text_box_6"><input type="radio" name="text_box_style" value="6" /></label>\
</div>\
</div></div>';
<|fim▁hole|> // creates the plugin
tinymce.create('tinymce.plugins.textbox', {
// creates control instances based on the control's id.
// our button's id is "smartresponder_button"
createControl : function(id, controlManager) {
if (id == 'text_box_button') {
// creates the button
var button = controlManager.createButton('text_box_button', {
title : 'Боксы', // title of the button
image : plugin_url+'/wppage/i/box.png', // path to the button's image
onclick : function() {
// triggers the thickbox
tb_show( 'Боксы', '#TB_inline?inlineId=text_box-form' );
jQuery('#TB_ajaxContent').css({'width': '640', 'height': (jQuery('#TB_window').height()-50)+'px'});
jQuery(window).resize(function(){
jQuery('#TB_ajaxContent').css({'width': '640', 'height': (jQuery('#TB_window').height()-50)+'px'});
});
}
});
return button;
}
return null;
}
});
// registers the plugin. DON'T MISS THIS STEP!!!
tinymce.PluginManager.add('textbox', tinymce.plugins.textbox);
// executes this when the DOM is ready
jQuery(function(){
// creates a form to be displayed everytime the button is clicked
// you should achieve this using AJAX instead of direct html code like this
var form = jQuery(html_form);
form.appendTo('body').hide();
// handles the click event of the submit button
form.find('#text_box_submit').click(function(){
// defines the options and their default values
// again, this is not the most elegant way to do this
// but well, this gets the job done nonetheless
var options = {
'id' : ''
};
var text_box_style = jQuery('input[name=text_box_style]:checked').val();
var shortcode = '<p class="aligncenter"><div class="ps_text_box ps_text_box_'+text_box_style+'"><p class="ps_text_box_text">Текст</p></div></p><p> </p>';
// inserts the shortcode into the active editor
tinyMCE.activeEditor.execCommand('mceInsertContent', 0, shortcode);
// closes Thickbox
tb_remove();
});
});
})()<|fim▁end|>
|
//-------
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import unittest
import testRObject
import testVector
import testArray
import testDataFrame
import testFormula
import testFunction
import testEnvironment
import testRobjects
import testMethods
import testPackages
import testHelp
import testLanguage
# wrap this nicely so a warning is issued if no numpy present
import testNumpyConversions
def suite():
suite_RObject = testRObject.suite()
suite_Vector = testVector.suite()
suite_Array = testArray.suite()
suite_DataFrame = testDataFrame.suite()
suite_Function = testFunction.suite()
suite_Environment = testEnvironment.suite()
suite_Formula = testFormula.suite()
suite_Robjects = testRobjects.suite()
suite_NumpyConversions = testNumpyConversions.suite()
suite_Methods = testMethods.suite()
suite_Packages = testPackages.suite()
suite_Help = testHelp.suite()
suite_Language = testLanguage.suite()
alltests = unittest.TestSuite([suite_RObject,
suite_Vector,
suite_Array,
suite_DataFrame,
suite_Function,
suite_Environment,
suite_Formula,
suite_Robjects,
suite_Methods,<|fim▁hole|> suite_Help,
suite_Language
])
return alltests
def main():
r = unittest.TestResult()
suite().run(r)
return r
if __name__ == '__main__':
tr = unittest.TextTestRunner(verbosity = 2)
suite = suite()
tr.run(suite)<|fim▁end|>
|
suite_NumpyConversions,
suite_Packages,
|
<|file_name|>sidebar.js<|end_file_name|><|fim▁begin|>export default function() {
var links = [
{
icon: "fa-sign-in",
title: "Login",
url: "/login"
},
{
icon: "fa-dashboard",
title: "Dashboard",
url: "/"
},
{
icon: "fa-calendar",
title: "Scheduler",<|fim▁hole|>
return m("#main-sidebar", [
m("ul", {class: "navigation"},
links.map(function(link) {
return m("li", [
m("a", {href: link.url, config: m.route}, [
m("i.menu-icon", {class: "fa " + link.icon}), " ", link.title
])
]);
})
)
]);
}<|fim▁end|>
|
url: "/scheduler"
}
];
|
<|file_name|>participant.model.ts<|end_file_name|><|fim▁begin|>import { Converter, json } from './../../helpers';
import { Mastery, Rune } from './../general';
import { MatchSummoner, ParticipantStats, ParticipantTimeline } from './';
import { Tier } from './../../enums';
export class Participant {
public participantId: number;
public championId: number;
@json(MatchSummoner)
public summoner: MatchSummoner;
public spell1Id: number;
public spell2Id: number;
@json('highestAchievedSeasonTier', Converter.TierConverter)
public highestSeasonLeague: Tier;
@json(ParticipantStats)
public stats: ParticipantStats;
@json(ParticipantTimeline)
public timeline: ParticipantTimeline;
@json(Rune)
public runes: Rune[];
@json(Mastery)
public masteries: Mastery[];<|fim▁hole|>
constructor() {
this.participantId = void 0;
this.championId = void 0;
this.summoner = void 0;
this.spell1Id = void 0;
this.spell2Id = void 0;
this.highestSeasonLeague = void 0;
this.stats = void 0;
this.timeline = void 0;
this.runes = void 0;
this.masteries = void 0;
}
}<|fim▁end|>
| |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from collections import OrderedDict
from django.conf import settings
from django.utils.encoding import python_2_unicode_compatible
from django.utils.text import slugify
from django.db import models
from decimal import Decimal
# Create your models here.
@python_2_unicode_compatible
class Installation(models.Model):
name = models.CharField(max_length=255, unique=True)
full_name = models.CharField(max_length=255)
is_active = models.BooleanField(default=False, help_text="Needs be active to show on dataverse.org map")
lat = models.DecimalField(max_digits=9, decimal_places=6, default=Decimal('0.0000'))
lng = models.DecimalField(max_digits=9, decimal_places=6, default=Decimal('0.0000'))
logo = models.ImageField(upload_to='logos/', null=True, blank=True)
marker = models.ImageField(upload_to='logos/', null=True, blank=True)
description = models.TextField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
slug = models.SlugField(max_length=255, blank=True, help_text='auto-filled on save')
version = models.CharField(max_length=6, blank=True, help_text='Dataversion version. e.g. "4.3", "3.6.2", etc')
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Installation, self).save(*args, **kwargs)
def view_logo_100(self):
#return self.logo.url
if self.logo:
return self.view_logo(force_width=100)
return 'n/a'
view_logo_100.allow_tags=True
def view_marker(self):
#return self.logo.url
if self.marker:
im = '<img src="%s" />' % (self.marker.url)
return im
return 'n/a'
view_marker.allow_tags=True<|fim▁hole|>
def view_logo(self, force_width=None):
#return self.logo.url
if self.logo:
if force_width:
im = ('<img src="{0}" width="{1}"/ >'
'<br />(width forced to {1}px)').format(\
self.logo.url, force_width)
return im
else:
im = '<img src="%s" />' % (self.logo.url)
return im
return 'n/a'
view_logo.allow_tags=True
def to_json(self, as_string=False, pretty=False):
"""Returns an OrderedDict of the installation attributes"""
od = OrderedDict()
od['id'] = self.id
od['name'] = self.name
od['full_name'] = self.full_name
od['is_active'] = self.is_active
od['description'] = self.description
od['lat'] = self.lat
od['lng'] = self.lng
od['logo'] = '%s://%s%s' % (settings.SWAGGER_SCHEME,
settings.SWAGGER_HOST,
self.logo.url)
#marker = self.marker
od['url'] = self.url
od['slug'] = self.slug
od['version'] = self.version if self.version else None
return od
@python_2_unicode_compatible
class Institution(models.Model):
name = models.CharField(max_length=255)
lat = models.DecimalField(max_digits=9, decimal_places=6, blank=False, default=Decimal('0.0000'))
lng = models.DecimalField(max_digits=9, decimal_places=6, blank=False, default=Decimal('0.0000'))
host = models.ForeignKey(
Installation,
on_delete=models.SET_NULL,
null=True,
blank=True
)
def __str__(self):
return self.name<|fim▁end|>
| |
<|file_name|>features.py<|end_file_name|><|fim▁begin|>from __future__ import division
import abc
import numpy as n
import scipy.linalg as linalg
import scipy.optimize as opt
import scipy.spatial.distance as dist
class Feature(object):
'''
Abstract class that represents a feature to be used
with :py:class:`pyransac.ransac.RansacFeature`
'''
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self):
pass
@abc.abstractproperty
def min_points(self):
'''int: Minimum number of points needed to define the feature.'''
pass
@abc.abstractmethod
def points_distance(self,points):
'''
This function implements a method to compute the distance
of points from the feature.
Args:
points (numpy.ndarray): a numpy array of points the distance must be
computed of.
Returns:
distances (numpy.ndarray): the computed distances of the points from the feature.
'''
pass
@abc.abstractmethod
def print_feature(self,num_points):
'''
This method returns an array of x,y coordinates for
points that are in the feature.
Args:
num_points (numpy.ndarray): the number of points to be returned
Returns:
coords (numpy.ndarray): a num_points x 2 numpy array that contains
the points coordinates
'''
class Circle(Feature):
'''
Feature class for a Circle :math:`(x-x_c)^2 + (y-y_c)^2 - r = 0`
'''
min_points = 3
'''int: Minimum number of points needed to define the circle (3).'''
def __init__(self,points):
self.radius,self.xc,self.yc = self.__gen(points)
def __gen(self,points):
'''
Compute the radius and the center coordinates of a
circumference given three points
Args:
points (numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
(tuple): A 3 elements tuple that contains the circumference radius
and center coordinates [radius,xc,yc]
Raises:
RuntimeError: If the circle computation does not succeed
a RuntimeError is raised.
'''
# Linear system for (D,E,F) in circle
# equations: D*xi + E*yi + F = -(xi**2 + yi**2)
# where xi, yi are the coordinate of the i-th point.
# Generating A matrix
A = n.array([(x,y,1) for x,y in points])
# Generating rhs
rhs = n.array([-(x**2+y**2) for x,y in points])
try:
#Solving linear system
D,E,F = linalg.lstsq(A,rhs)[0]
except linalg.LinAlgError:
raise RuntimeError('Circle calculation not successful. Please\
check the input data, probable collinear points')
xc = -D/2
yc = -E/2
r = n.sqrt(xc**2+yc**2-F)
return (r,xc,yc)
def points_distance(self,points):
r'''
Compute the distance of the points from the feature
:math:`d = \left| \sqrt{(x_i - x_c)^2 + (y_i-y_c)^2} - r \right|`
Args:
points (numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
d (numpy.ndarray): the computed distances of the points from the feature.
'''
xa = n.array([self.xc,self.yc]).reshape((1,2))
d = n.abs(dist.cdist(points,xa) - self.radius)
return d
def print_feature(self, num_points):
'''
This method returns an array of x,y coordinates for
points that are in the feature.
Args:
num_points (numpy.ndarray): the number of points to be returned
Returns:
coords (numpy.ndarray): a num_points x 2 numpy array that contains
the points coordinates
'''
theta = n.linspace(0,2*n.pi,num_points)
x = self.xc + self.radius*n.cos(theta)
y = self.yc + self.radius*n.sin(theta)
return n.vstack((x,y))
<|fim▁hole|> Feature Class for an exponential curve :math:`y=ax^{k} + b`
'''
min_points = 3
def __init__(self,points):
self.a,self.k,self.b = self.__gen(points)
def __gen(self,points):
'''
Compute the three parameters that univocally determine the
exponential curve
Args:
points(numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
exp(numpy.ndarray): A (3,) numpy array that contains the a,n,b parameters
[a,k,b]
Raises:
RuntimeError: If the circle computation does not succeed
a RuntimeError is raised.
'''
def exponential(x,points):
''' Non linear system function to use
with :py:func:`scypy.optimize.root`
'''
aa = x[0]
nn = x[1]
bb = x[2]
f = n.zeros((3,))
f[0] = n.abs(aa)*n.power(points[0,0],nn)+bb - points[0,1]
f[1] = n.abs(aa)*n.power(points[1,0],nn)+bb - points[1,1]
f[2] = n.abs(aa)*n.power(points[2,0],nn)+bb - points[2,1]
return f
exp = opt.root(exponential,[1,1,1],points,method='lm')['x']
return exp
def points_distance(self,points):
r'''
Compute the distance of the points from the feature
:math:`d = \sqrt{(x_i - x_c)^2 + (y_i-y_c)^2}`
Args:
points (numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
d (numpy.ndarray): the computed distances of the points from the feature.
'''
x = points[:,0]
xa = n.array([x,self.a*n.power(x,self.k)+self.b])
xa = xa.T
d = dist.cdist(points,xa)
return n.diag(d)
def print_feature(self, num_points, a,b):
'''
This method returns an array of x,y coordinates for
points that are in the feature in the interval [a,b].
Args:
num_points (numpy.ndarray): the number of points to be returned
a (float): left end of the interval
b (float): right end of the interval
Returns:
coords (numpy.ndarray): a num_points x 2 numpy array that contains
the points coordinates
'''
x = n.linspace(a,b,num_points)
y = self.a*x**self.k + self.b
return n.vstack((x,y))<|fim▁end|>
|
class Exponential (Feature):
'''
|
<|file_name|>btformats.py<|end_file_name|><|fim▁begin|># This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#<|fim▁hole|># MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Written by Bram Cohen
import re
from BitTorrent import BTFailure
allowed_path_re = re.compile(r'^[^/\\.~][^/\\]*$')
ints = (long, int)
def check_info(info, check_paths=True):
if type(info) != dict:
raise BTFailure, 'bad metainfo - not a dictionary'
pieces = info.get('pieces')
if type(pieces) != str or len(pieces) % 20 != 0:
raise BTFailure, 'bad metainfo - bad pieces key'
piecelength = info.get('piece length')
if type(piecelength) not in ints or piecelength <= 0:
raise BTFailure, 'bad metainfo - illegal piece length'
name = info.get('name')
if type(name) != str:
raise BTFailure, 'bad metainfo - bad name'
if not allowed_path_re.match(name):
raise BTFailure, 'name %s disallowed for security reasons' % name
if info.has_key('files') == info.has_key('length'):
raise BTFailure, 'single/multiple file mix'
if info.has_key('length'):
length = info.get('length')
if type(length) not in ints or length < 0:
raise BTFailure, 'bad metainfo - bad length'
else:
files = info.get('files')
if type(files) != list:
raise BTFailure, 'bad metainfo - "files" is not a list of files'
for f in files:
if type(f) != dict:
raise BTFailure, 'bad metainfo - bad file value'
length = f.get('length')
if type(length) not in ints or length < 0:
raise BTFailure, 'bad metainfo - bad length'
path = f.get('path')
if type(path) != list or path == []:
raise BTFailure, 'bad metainfo - bad path'
for p in path:
if type(p) != str:
raise BTFailure, 'bad metainfo - bad path dir'
if check_paths and not allowed_path_re.match(p):
raise BTFailure, 'path %s disallowed for security reasons' % p
f = ['/'.join(x['path']) for x in files]
f.sort()
i = iter(f)
try:
name2 = i.next()
while True:
name1 = name2
name2 = i.next()
if name2.startswith(name1):
if name1 == name2:
raise BTFailure, 'bad metainfo - duplicate path'
elif name2[len(name1)] == '/':
raise BTFailure('bad metainfo - name used as both '
'file and subdirectory name')
except StopIteration:
pass
def check_message(message, check_paths=True):
if type(message) != dict:
raise BTFailure, 'bad metainfo - wrong object type'
check_info(message.get('info'), check_paths)
if type(message.get('announce')) != str:
raise BTFailure, 'bad metainfo - no announce URL string'
def check_peers(message):
if type(message) != dict:
raise BTFailure
if message.has_key('failure reason'):
if type(message['failure reason']) != str:
raise BTFailure, 'non-text failure reason'
return
if message.has_key('warning message'):
if type(message['warning message']) != str:
raise BTFailure, 'non-text warning message'
peers = message.get('peers')
if type(peers) == list:
for p in peers:
if type(p) != dict:
raise BTFailure, 'invalid entry in peer list'
if type(p.get('ip')) != str:
raise BTFailure, 'invalid entry in peer list'
port = p.get('port')
if type(port) not in ints or p <= 0:
raise BTFailure, 'invalid entry in peer list'
if p.has_key('peer id'):
peerid = p.get('peer id')
if type(peerid) != str or len(peerid) != 20:
raise BTFailure, 'invalid entry in peer list'
elif type(peers) != str or len(peers) % 6 != 0:
raise BTFailure, 'invalid peer list'
interval = message.get('interval', 1)
if type(interval) not in ints or interval <= 0:
raise BTFailure, 'invalid announce interval'
minint = message.get('min interval', 1)
if type(minint) not in ints or minint <= 0:
raise BTFailure, 'invalid min announce interval'
if type(message.get('tracker id', '')) != str:
raise BTFailure, 'invalid tracker id'
npeers = message.get('num peers', 0)
if type(npeers) not in ints or npeers < 0:
raise BTFailure, 'invalid peer count'
dpeers = message.get('done peers', 0)
if type(dpeers) not in ints or dpeers < 0:
raise BTFailure, 'invalid seed count'
last = message.get('last', 0)
if type(last) not in ints or last < 0:
raise BTFailure, 'invalid "last" entry'<|fim▁end|>
|
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
<|file_name|>rbprmbuilder.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2014 CNRS
# Author: Steve Tonneau
#
# This file is part of hpp-rbprm-corba.
# hpp-rbprm-corba is free software: you can redistribute it
# and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# hpp-manipulation-corba is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# hpp-manipulation-corba. If not, see
# <http://www.gnu.org/licenses/>.
from hpp.corbaserver.rbprm import Client as RbprmClient
from hpp.corbaserver import Client as BasicClient
import hpp.gepetto.blender.exportmotion as em
## Corba clients to the various servers
#
class CorbaClient:
"""
Container for corba clients to various interfaces.
"""
def __init__ (self):
self.basic = BasicClient ()
self.rbprm = RbprmClient ()
## Load and handle a RbprmDevice robot for rbprm planning
#
# A RbprmDevice robot is a dual representation of a robots. One robot describes the <|fim▁hole|> self.tf_root = "base_link"
self.rootJointType = dict()
self.client = CorbaClient ()
self.load = load
## Virtual function to load the robot model.
#
# \param urdfName urdf description of the robot trunk,
# \param urdfNameroms either a string, or an array of strings, indicating the urdf of the different roms to add.
# \param rootJointType type of root joint among ("freeflyer", "planar",
# "anchor"),
# \param meshPackageName name of the meshpackage from where the robot mesh will be loaded
# \param packageName name of the package from where the robot will be loaded
# \param urdfSuffix optional suffix for the urdf of the robot package
# \param srdfSuffix optional suffix for the srdf of the robot package
def loadModel (self, urdfName, urdfNameroms, rootJointType, meshPackageName, packageName, urdfSuffix, srdfSuffix):
if(isinstance(urdfNameroms, list)):
for urdfNamerom in urdfNameroms:
self.client.rbprm.rbprm.loadRobotRomModel(urdfNamerom, rootJointType, packageName, urdfNamerom, urdfSuffix, srdfSuffix)
else:
self.client.rbprm.rbprm.loadRobotRomModel(urdfNameroms, rootJointType, packageName, urdfNameroms, urdfSuffix, srdfSuffix)
self.client.rbprm.rbprm.loadRobotCompleteModel(urdfName, rootJointType, packageName, urdfName, urdfSuffix, srdfSuffix)
self.name = urdfName
self.displayName = urdfName
self.tf_root = "base_link"
self.rootJointType = rootJointType
self.jointNames = self.client.basic.robot.getJointNames ()
self.allJointNames = self.client.basic.robot.getAllJointNames ()
self.client.basic.robot.meshPackageName = meshPackageName
self.meshPackageName = meshPackageName
self.rankInConfiguration = dict ()
self.rankInVelocity = dict ()
self.packageName = packageName
self.urdfName = urdfName
self.urdfSuffix = urdfSuffix
self.srdfSuffix = srdfSuffix
rankInConfiguration = rankInVelocity = 0
for j in self.jointNames:
self.rankInConfiguration [j] = rankInConfiguration
rankInConfiguration += self.client.basic.robot.getJointConfigSize (j)
self.rankInVelocity [j] = rankInVelocity
rankInVelocity += self.client.basic.robot.getJointNumberDof (j)
## Init RbprmShooter
#
def initshooter (self):
return self.client.rbprm.rbprm.initshooter ()
## Sets limits on robot orientation, described according to Euler's ZYX rotation order
#
# \param bounds 6D vector with the lower and upperBound for each rotation axis in sequence
def boundSO3 (self, bounds):
return self.client.rbprm.rbprm.boundSO3 (bounds)
## Specifies a preferred affordance for a given rom.
# This constrains the planner to accept a rom configuration only if
# it collides with a surface the normal of which has these properties.
#
# \param rom name of the rome,
# \param affordances list of affordance names
def setAffordanceFilter (self, rom, affordances):
return self.client.rbprm.rbprm.setAffordanceFilter (rom, affordances)
## Specifies a rom constraint for the planner.
# A configuration will be valid if and only if the considered rom collides
# with the environment.
#
# \param romFilter array of roms indicated by name, which determine the constraint.
def setFilter (self, romFilter):
return self.client.rbprm.rbprm.setFilter (romFilter)
## Export a computed path for blender
#
# \param problem the problem associated with the path computed for the robot
# \param stepsize increment along the path
# \param pathId if of the considered path
# \param filename name of the output file where to save the output
def exportPath (self, viewer, problem, pathId, stepsize, filename):
em.exportPath(viewer, self.client.basic.robot, problem, pathId, stepsize, filename)
## \name Degrees of freedom
# \{
## Get size of configuration
# \return size of configuration
def getConfigSize (self):
return self.client.basic.robot.getConfigSize ()
# Get size of velocity
# \return size of velocity
def getNumberDof (self):
return self.client.basic.robot.getNumberDof ()
## \}
## \name Joints
#\{
## Get joint names in the same order as in the configuration.
def getJointNames (self):
return self.client.basic.robot.getJointNames ()
## Get joint names in the same order as in the configuration.
def getAllJointNames (self):
return self.client.basic.robot.getAllJointNames ()
## Get joint position.
def getJointPosition (self, jointName):
return self.client.basic.robot.getJointPosition (jointName)
## Set static position of joint in its parent frame
def setJointPosition (self, jointName, position):
return self.client.basic.robot.setJointPosition (jointName, position)
## Get joint number degrees of freedom.
def getJointNumberDof (self, jointName):
return self.client.basic.robot.getJointNumberDof (jointName)
## Get joint number config size.
def getJointConfigSize (self, jointName):
return self.client.basic.robot.getJointConfigSize (jointName)
## set bounds for the joint
def setJointBounds (self, jointName, inJointBound):
return self.client.basic.robot.setJointBounds (jointName, inJointBound)
## Set bounds on the translation part of the freeflyer joint.
#
# Valid only if the robot has a freeflyer joint.
def setTranslationBounds (self, xmin, xmax, ymin, ymax, zmin, zmax):
self.client.basic.robot.setJointBounds \
(self.displayName + "base_joint_x", [xmin, xmax])
self.client.basic.robot.setJointBounds \
(self.displayName + "base_joint_y", [ymin, ymax])
self.client.basic.robot.setJointBounds \
(self.displayName + "base_joint_z", [zmin, zmax])
## Get link position in joint frame
#
# Joints are oriented in a different way as in urdf standard since
# rotation and uni-dimensional translation joints act around or along
# their x-axis. This method returns the position of the urdf link in
# world frame.
#
# \param jointName name of the joint
# \return position of the link in world frame.
def getLinkPosition (self, jointName):
return self.client.basic.robot.getLinkPosition (jointName)
## Get link name
#
# \param jointName name of the joint,
# \return name of the link.
def getLinkName (self, jointName):
return self.client.basic.robot.getLinkName (jointName)
## \}
## \name Access to current configuration
#\{
## Set current configuration of composite robot
#
# \param q configuration of the composite robot
def setCurrentConfig (self, q):
self.client.basic.robot.setCurrentConfig (q)
## Get current configuration of composite robot
#
# \return configuration of the composite robot
def getCurrentConfig (self):
return self.client.basic.robot.getCurrentConfig ()
## Shoot random configuration
# \return dofArray Array of degrees of freedom.
def shootRandomConfig(self):
return self.client.basic.robot.shootRandomConfig ()
## \}
## \name Bodies
# \{
## Get the list of objects attached to a joint.
# \param inJointName name of the joint.
# \return list of names of CollisionObject attached to the body.
def getJointInnerObjects (self, jointName):
return self.client.basic.robot.getJointInnerObjects (jointName)
## Get list of collision objects tested with the body attached to a joint
# \param inJointName name of the joint.
# \return list of names of CollisionObject
def getJointOuterObjects (self, jointName):
return self.client.basic.robot.getJointOuterObjects (jointName)
## Get position of robot object
# \param objectName name of the object.
# \return transformation as a hpp.Transform object
def getObjectPosition (self, objectName):
return Transform (self.client.basic.robot.getObjectPosition
(objectName))
## \brief Remove an obstacle from outer objects of a joint body
#
# \param objectName name of the object to remove,
# \param jointName name of the joint owning the body,
# \param collision whether collision with object should be computed,
# \param distance whether distance to object should be computed.
def removeObstacleFromJoint (self, objectName, jointName, collision,
distance):
return self.client.basic.obstacle.removeObstacleFromJoint \
(objectName, jointName, collision, distance)
## \}
## \name Collision checking and distance computation
# \{
## Test collision with obstacles and auto-collision.
#
# Check whether current configuration of robot is valid by calling
# CkwsDevice::collisionTest ().
# \return whether configuration is valid
# \note Deprecated. Use isConfigValid instead.
def collisionTest (self):
print "Deprecated. Use isConfigValid instead"
return self.client.basic.robot.collisionTest ()
## Check the validity of a configuration.
#
# Check whether a configuration of robot is valid.
# \param cfg a configuration
# \return whether configuration is valid
def isConfigValid (self, cfg):
return self.client.basic.robot.isConfigValid (cfg)
## Compute distances between bodies and obstacles
#
# \return list of distances,
# \return names of the objects belonging to a body
# \return names of the objects tested with inner objects,
# \return closest points on the body,
# \return closest points on the obstacles
# \note outer objects for a body can also be inner objects of another
# body.
def distancesToCollision (self):
return self.client.basic.robot.distancesToCollision ()
## \}
## \}
## \name Mass and inertia
# \{
## Get mass of robot
def getMass (self):
return self.client.basic.robot.getMass ()
## Get position of center of mass
def getCenterOfMass (self):
return self.client.basic.robot.getCenterOfMass ()
## Get Jacobian of the center of mass
def getJacobianCenterOfMass (self):
return self.client.basic.robot.getJacobianCenterOfMass ()
##\}
## Get the dimension of the extra configuration space
def getDimensionExtraConfigSpace(self):
return self.client.basic.robot.getDimensionExtraConfigSpace()
## Convert a direction vector to a quaternion (use Eigen::Quaterniond::FromTwoVectors with Z vector)
# \param u the vector director
def quaternionFromVector(self,vector):
return self.client.basic.robot.quaternionFromVector(vector)<|fim▁end|>
|
# trunk of the robot, and a set of robots describe the range of motion of each limb of the robot.
class Builder (object):
## Constructor
def __init__ (self, load = True):
|
<|file_name|>synonyms.py<|end_file_name|><|fim▁begin|>#Synonyms experiment. Pass a string to see its "synonyms"
from pyspark.sql import SparkSession, Row
from pyspark.ml.feature import Word2Vec, Tokenizer, StopWordsRemover, Word2VecModel
import sys;
from string import punctuation
def strip_punctuation(arr):
return [''.join(c for c in s if c not in punctuation) for s in arr]
def main():
spark = SparkSession.builder \
.appName("Spark CV-job ad matching") \
.config("spark.some.config.option", "some-value") \
.master("local[*]") \
.getOrCreate()
df_categories = spark.read.json("allcategories4rdd/allcategories.jsonl")
tokenizer = Tokenizer(inputCol="skillText", outputCol="words")
tokenized = tokenizer.transform(df_categories)
remover = StopWordsRemover(inputCol="words", outputCol="filtered")
removed = remover.transform(tokenized)
stripped = removed.select('filtered').rdd.map(lambda x: strip_punctuation(x[0]))\
.map(lambda x: Row(filtered=x)).toDF(['filtered'])<|fim▁hole|>
# word2vec = Word2Vec(vectorSize=100, inputCol="filtered", outputCol="result")
# model = word2vec.fit(stripped)
#model.save("word2vec-model")
model = Word2VecModel.load("word2vec-model")
synonyms = model.findSynonyms(sys.argv[1], 10)
synonyms.show(truncate=False)
# for word, cosine_distance in synonyms:
# print("{}: {}".format(word, cosine_distance))
if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>aes.rs<|end_file_name|><|fim▁begin|>/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
pub const ECB: usize=0;
pub const CBC: usize=1;
pub const CFB1: usize=2;
pub const CFB2: usize=3;
pub const CFB4: usize=5;
pub const OFB1: usize=14;
pub const OFB2: usize=15;
pub const OFB4: usize=17;
pub const OFB8:usize=21;
pub const OFB16: usize=29;
pub const CTR1: usize=30;
pub const CTR2: usize=31;
pub const CTR4: usize=33;
pub const CTR8: usize=37;
pub const CTR16: usize=45;
const INCO : [u8;4] = [0xB,0xD,0x9,0xE]; /* Inverse Coefficients */
const PTAB : [u8;256] = [
1, 3, 5, 15, 17, 51, 85, 255, 26, 46, 114, 150, 161, 248, 19, 53,
95, 225, 56, 72, 216, 115, 149, 164, 247, 2, 6, 10, 30, 34, 102, 170,
229, 52, 92, 228, 55, 89, 235, 38, 106, 190, 217, 112, 144, 171, 230, 49,
83, 245, 4, 12, 20, 60, 68, 204, 79, 209, 104, 184, 211, 110, 178, 205,
76, 212, 103, 169, 224, 59, 77, 215, 98, 166, 241, 8, 24, 40, 120, 136,
131, 158, 185, 208, 107, 189, 220, 127, 129, 152, 179, 206, 73, 219, 118, 154,
181, 196, 87, 249, 16, 48, 80, 240, 11, 29, 39, 105, 187, 214, 97, 163,
254, 25, 43, 125, 135, 146, 173, 236, 47, 113, 147, 174, 233, 32, 96, 160,
251, 22, 58, 78, 210, 109, 183, 194, 93, 231, 50, 86, 250, 21, 63, 65,
195, 94, 226, 61, 71, 201, 64, 192, 91, 237, 44, 116, 156, 191, 218, 117,
159, 186, 213, 100, 172, 239, 42, 126, 130, 157, 188, 223, 122, 142, 137, 128,
155, 182, 193, 88, 232, 35, 101, 175, 234, 37, 111, 177, 200, 67, 197, 84,
252, 31, 33, 99, 165, 244, 7, 9, 27, 45, 119, 153, 176, 203, 70, 202,
69, 207, 74, 222, 121, 139, 134, 145, 168, 227, 62, 66, 198, 81, 243, 14,
18, 54, 90, 238, 41, 123, 141, 140, 143, 138, 133, 148, 167, 242, 13, 23,
57, 75, 221, 124, 132, 151, 162, 253, 28, 36, 108, 180, 199, 82, 246, 1];
const LTAB : [u8;256] = [
0, 255, 25, 1, 50, 2, 26, 198, 75, 199, 27, 104, 51, 238, 223, 3,
100, 4, 224, 14, 52, 141, 129, 239, 76, 113, 8, 200, 248, 105, 28, 193,
125, 194, 29, 181, 249, 185, 39, 106, 77, 228, 166, 114, 154, 201, 9, 120,
101, 47, 138, 5, 33, 15, 225, 36, 18, 240, 130, 69, 53, 147, 218, 142,
150, 143, 219, 189, 54, 208, 206, 148, 19, 92, 210, 241, 64, 70, 131, 56,
102, 221, 253, 48, 191, 6, 139, 98, 179, 37, 226, 152, 34, 136, 145, 16,
126, 110, 72, 195, 163, 182, 30, 66, 58, 107, 40, 84, 250, 133, 61, 186,
43, 121, 10, 21, 155, 159, 94, 202, 78, 212, 172, 229, 243, 115, 167, 87,
175, 88, 168, 80, 244, 234, 214, 116, 79, 174, 233, 213, 231, 230, 173, 232,
44, 215, 117, 122, 235, 22, 11, 245, 89, 203, 95, 176, 156, 169, 81, 160,
127, 12, 246, 111, 23, 196, 73, 236, 216, 67, 31, 45, 164, 118, 123, 183,
204, 187, 62, 90, 251, 96, 177, 134, 59, 82, 161, 108, 170, 85, 41, 157,
151, 178, 135, 144, 97, 190, 220, 252, 188, 149, 207, 205, 55, 63, 91, 209,
83, 57, 132, 60, 65, 162, 109, 71, 20, 42, 158, 93, 86, 242, 211, 171,
68, 17, 146, 217, 35, 32, 46, 137, 180, 124, 184, 38, 119, 153, 227, 165,
103, 74, 237, 222, 197, 49, 254, 24, 13, 99, 140, 128, 192, 247, 112, 7];
const FBSUB : [u8;256] = [
99, 124, 119, 123, 242, 107, 111, 197, 48, 1, 103, 43, 254, 215, 171, 118,
202, 130, 201, 125, 250, 89, 71, 240, 173, 212, 162, 175, 156, 164, 114, 192,
183, 253, 147, 38, 54, 63, 247, 204, 52, 165, 229, 241, 113, 216, 49, 21,
4, 199, 35, 195, 24, 150, 5, 154, 7, 18, 128, 226, 235, 39, 178, 117,
9, 131, 44, 26, 27, 110, 90, 160, 82, 59, 214, 179, 41, 227, 47, 132,
83, 209, 0, 237, 32, 252, 177, 91, 106, 203, 190, 57, 74, 76, 88, 207,
208, 239, 170, 251, 67, 77, 51, 133, 69, 249, 2, 127, 80, 60, 159, 168,
81, 163, 64, 143, 146, 157, 56, 245, 188, 182, 218, 33, 16, 255, 243, 210,
205, 12, 19, 236, 95, 151, 68, 23, 196, 167, 126, 61, 100, 93, 25, 115,
96, 129, 79, 220, 34, 42, 144, 136, 70, 238, 184, 20, 222, 94, 11, 219,
224, 50, 58, 10, 73, 6, 36, 92, 194, 211, 172, 98, 145, 149, 228, 121,
231, 200, 55, 109, 141, 213, 78, 169, 108, 86, 244, 234, 101, 122, 174, 8,
186, 120, 37, 46, 28, 166, 180, 198, 232, 221, 116, 31, 75, 189, 139, 138,
112, 62, 181, 102, 72, 3, 246, 14, 97, 53, 87, 185, 134, 193, 29, 158,
225, 248, 152, 17, 105, 217, 142, 148, 155, 30, 135, 233, 206, 85, 40, 223,
140, 161, 137, 13, 191, 230, 66, 104, 65, 153, 45, 15, 176, 84, 187, 22];
const RBSUB : [u8;256] = [
82, 9, 106, 213, 48, 54, 165, 56, 191, 64, 163, 158, 129, 243, 215, 251,
124, 227, 57, 130, 155, 47, 255, 135, 52, 142, 67, 68, 196, 222, 233, 203,
84, 123, 148, 50, 166, 194, 35, 61, 238, 76, 149, 11, 66, 250, 195, 78,
8, 46, 161, 102, 40, 217, 36, 178, 118, 91, 162, 73, 109, 139, 209, 37,
114, 248, 246, 100, 134, 104, 152, 22, 212, 164, 92, 204, 93, 101, 182, 146,
108, 112, 72, 80, 253, 237, 185, 218, 94, 21, 70, 87, 167, 141, 157, 132,
144, 216, 171, 0, 140, 188, 211, 10, 247, 228, 88, 5, 184, 179, 69, 6,
208, 44, 30, 143, 202, 63, 15, 2, 193, 175, 189, 3, 1, 19, 138, 107,
58, 145, 17, 65, 79, 103, 220, 234, 151, 242, 207, 206, 240, 180, 230, 115,
150, 172, 116, 34, 231, 173, 53, 133, 226, 249, 55, 232, 28, 117, 223, 110,
71, 241, 26, 113, 29, 41, 197, 137, 111, 183, 98, 14, 170, 24, 190, 27,
252, 86, 62, 75, 198, 210, 121, 32, 154, 219, 192, 254, 120, 205, 90, 244,
31, 221, 168, 51, 136, 7, 199, 49, 177, 18, 16, 89, 39, 128, 236, 95,
96, 81, 127, 169, 25, 181, 74, 13, 45, 229, 122, 159, 147, 201, 156, 239,
160, 224, 59, 77, 174, 42, 245, 176, 200, 235, 187, 60, 131, 83, 153, 97,
23, 43, 4, 126, 186, 119, 214, 38, 225, 105, 20, 99, 85, 33, 12, 125];
const RCO : [u8;16] = [1,2,4,8,16,32,64,128,27,54,108,216,171,77,154,47];
const FTABLE : [u32;256] = [
0xa56363c6,0x847c7cf8,0x997777ee,0x8d7b7bf6,0xdf2f2ff,0xbd6b6bd6,
0xb16f6fde,0x54c5c591,0x50303060,0x3010102,0xa96767ce,0x7d2b2b56,
0x19fefee7,0x62d7d7b5,0xe6abab4d,0x9a7676ec,0x45caca8f,0x9d82821f,
0x40c9c989,0x877d7dfa,0x15fafaef,0xeb5959b2,0xc947478e,0xbf0f0fb,
0xecadad41,0x67d4d4b3,0xfda2a25f,0xeaafaf45,0xbf9c9c23,0xf7a4a453,
0x967272e4,0x5bc0c09b,0xc2b7b775,0x1cfdfde1,0xae93933d,0x6a26264c,<|fim▁hole|> 0x26ebebcd,0x6927274e,0xcdb2b27f,0x9f7575ea,0x1b090912,0x9e83831d,
0x742c2c58,0x2e1a1a34,0x2d1b1b36,0xb26e6edc,0xee5a5ab4,0xfba0a05b,
0xf65252a4,0x4d3b3b76,0x61d6d6b7,0xceb3b37d,0x7b292952,0x3ee3e3dd,
0x712f2f5e,0x97848413,0xf55353a6,0x68d1d1b9,0x0,0x2cededc1,
0x60202040,0x1ffcfce3,0xc8b1b179,0xed5b5bb6,0xbe6a6ad4,0x46cbcb8d,
0xd9bebe67,0x4b393972,0xde4a4a94,0xd44c4c98,0xe85858b0,0x4acfcf85,
0x6bd0d0bb,0x2aefefc5,0xe5aaaa4f,0x16fbfbed,0xc5434386,0xd74d4d9a,
0x55333366,0x94858511,0xcf45458a,0x10f9f9e9,0x6020204,0x817f7ffe,
0xf05050a0,0x443c3c78,0xba9f9f25,0xe3a8a84b,0xf35151a2,0xfea3a35d,
0xc0404080,0x8a8f8f05,0xad92923f,0xbc9d9d21,0x48383870,0x4f5f5f1,
0xdfbcbc63,0xc1b6b677,0x75dadaaf,0x63212142,0x30101020,0x1affffe5,
0xef3f3fd,0x6dd2d2bf,0x4ccdcd81,0x140c0c18,0x35131326,0x2fececc3,
0xe15f5fbe,0xa2979735,0xcc444488,0x3917172e,0x57c4c493,0xf2a7a755,
0x827e7efc,0x473d3d7a,0xac6464c8,0xe75d5dba,0x2b191932,0x957373e6,
0xa06060c0,0x98818119,0xd14f4f9e,0x7fdcdca3,0x66222244,0x7e2a2a54,
0xab90903b,0x8388880b,0xca46468c,0x29eeeec7,0xd3b8b86b,0x3c141428,
0x79dedea7,0xe25e5ebc,0x1d0b0b16,0x76dbdbad,0x3be0e0db,0x56323264,
0x4e3a3a74,0x1e0a0a14,0xdb494992,0xa06060c,0x6c242448,0xe45c5cb8,
0x5dc2c29f,0x6ed3d3bd,0xefacac43,0xa66262c4,0xa8919139,0xa4959531,
0x37e4e4d3,0x8b7979f2,0x32e7e7d5,0x43c8c88b,0x5937376e,0xb76d6dda,
0x8c8d8d01,0x64d5d5b1,0xd24e4e9c,0xe0a9a949,0xb46c6cd8,0xfa5656ac,
0x7f4f4f3,0x25eaeacf,0xaf6565ca,0x8e7a7af4,0xe9aeae47,0x18080810,
0xd5baba6f,0x887878f0,0x6f25254a,0x722e2e5c,0x241c1c38,0xf1a6a657,
0xc7b4b473,0x51c6c697,0x23e8e8cb,0x7cdddda1,0x9c7474e8,0x211f1f3e,
0xdd4b4b96,0xdcbdbd61,0x868b8b0d,0x858a8a0f,0x907070e0,0x423e3e7c,
0xc4b5b571,0xaa6666cc,0xd8484890,0x5030306,0x1f6f6f7,0x120e0e1c,
0xa36161c2,0x5f35356a,0xf95757ae,0xd0b9b969,0x91868617,0x58c1c199,
0x271d1d3a,0xb99e9e27,0x38e1e1d9,0x13f8f8eb,0xb398982b,0x33111122,
0xbb6969d2,0x70d9d9a9,0x898e8e07,0xa7949433,0xb69b9b2d,0x221e1e3c,
0x92878715,0x20e9e9c9,0x49cece87,0xff5555aa,0x78282850,0x7adfdfa5,
0x8f8c8c03,0xf8a1a159,0x80898909,0x170d0d1a,0xdabfbf65,0x31e6e6d7,
0xc6424284,0xb86868d0,0xc3414182,0xb0999929,0x772d2d5a,0x110f0f1e,
0xcbb0b07b,0xfc5454a8,0xd6bbbb6d,0x3a16162c];
const RTABLE : [u32;256] = [
0x50a7f451,0x5365417e,0xc3a4171a,0x965e273a,0xcb6bab3b,0xf1459d1f,
0xab58faac,0x9303e34b,0x55fa3020,0xf66d76ad,0x9176cc88,0x254c02f5,
0xfcd7e54f,0xd7cb2ac5,0x80443526,0x8fa362b5,0x495ab1de,0x671bba25,
0x980eea45,0xe1c0fe5d,0x2752fc3,0x12f04c81,0xa397468d,0xc6f9d36b,
0xe75f8f03,0x959c9215,0xeb7a6dbf,0xda595295,0x2d83bed4,0xd3217458,
0x2969e049,0x44c8c98e,0x6a89c275,0x78798ef4,0x6b3e5899,0xdd71b927,
0xb64fe1be,0x17ad88f0,0x66ac20c9,0xb43ace7d,0x184adf63,0x82311ae5,
0x60335197,0x457f5362,0xe07764b1,0x84ae6bbb,0x1ca081fe,0x942b08f9,
0x58684870,0x19fd458f,0x876cde94,0xb7f87b52,0x23d373ab,0xe2024b72,
0x578f1fe3,0x2aab5566,0x728ebb2,0x3c2b52f,0x9a7bc586,0xa50837d3,
0xf2872830,0xb2a5bf23,0xba6a0302,0x5c8216ed,0x2b1ccf8a,0x92b479a7,
0xf0f207f3,0xa1e2694e,0xcdf4da65,0xd5be0506,0x1f6234d1,0x8afea6c4,
0x9d532e34,0xa055f3a2,0x32e18a05,0x75ebf6a4,0x39ec830b,0xaaef6040,
0x69f715e,0x51106ebd,0xf98a213e,0x3d06dd96,0xae053edd,0x46bde64d,
0xb58d5491,0x55dc471,0x6fd40604,0xff155060,0x24fb9819,0x97e9bdd6,
0xcc434089,0x779ed967,0xbd42e8b0,0x888b8907,0x385b19e7,0xdbeec879,
0x470a7ca1,0xe90f427c,0xc91e84f8,0x0,0x83868009,0x48ed2b32,
0xac70111e,0x4e725a6c,0xfbff0efd,0x5638850f,0x1ed5ae3d,0x27392d36,
0x64d90f0a,0x21a65c68,0xd1545b9b,0x3a2e3624,0xb1670a0c,0xfe75793,
0xd296eeb4,0x9e919b1b,0x4fc5c080,0xa220dc61,0x694b775a,0x161a121c,
0xaba93e2,0xe52aa0c0,0x43e0223c,0x1d171b12,0xb0d090e,0xadc78bf2,
0xb9a8b62d,0xc8a91e14,0x8519f157,0x4c0775af,0xbbdd99ee,0xfd607fa3,
0x9f2601f7,0xbcf5725c,0xc53b6644,0x347efb5b,0x7629438b,0xdcc623cb,
0x68fcedb6,0x63f1e4b8,0xcadc31d7,0x10856342,0x40229713,0x2011c684,
0x7d244a85,0xf83dbbd2,0x1132f9ae,0x6da129c7,0x4b2f9e1d,0xf330b2dc,
0xec52860d,0xd0e3c177,0x6c16b32b,0x99b970a9,0xfa489411,0x2264e947,
0xc48cfca8,0x1a3ff0a0,0xd82c7d56,0xef903322,0xc74e4987,0xc1d138d9,
0xfea2ca8c,0x360bd498,0xcf81f5a6,0x28de7aa5,0x268eb7da,0xa4bfad3f,
0xe49d3a2c,0xd927850,0x9bcc5f6a,0x62467e54,0xc2138df6,0xe8b8d890,
0x5ef7392e,0xf5afc382,0xbe805d9f,0x7c93d069,0xa92dd56f,0xb31225cf,
0x3b99acc8,0xa77d1810,0x6e639ce8,0x7bbb3bdb,0x97826cd,0xf418596e,
0x1b79aec,0xa89a4f83,0x656e95e6,0x7ee6ffaa,0x8cfbc21,0xe6e815ef,
0xd99be7ba,0xce366f4a,0xd4099fea,0xd67cb029,0xafb2a431,0x31233f2a,
0x3094a5c6,0xc066a235,0x37bc4e74,0xa6ca82fc,0xb0d090e0,0x15d8a733,
0x4a9804f1,0xf7daec41,0xe50cd7f,0x2ff69117,0x8dd64d76,0x4db0ef43,
0x544daacc,0xdf0496e4,0xe3b5d19e,0x1b886a4c,0xb81f2cc1,0x7f516546,
0x4ea5e9d,0x5d358c01,0x737487fa,0x2e410bfb,0x5a1d67b3,0x52d2db92,
0x335610e9,0x1347d66d,0x8c61d79a,0x7a0ca137,0x8e14f859,0x893c13eb,
0xee27a9ce,0x35c961b7,0xede51ce1,0x3cb1477a,0x59dfd29c,0x3f73f255,
0x79ce1418,0xbf37c773,0xeacdf753,0x5baafd5f,0x146f3ddf,0x86db4478,
0x81f3afca,0x3ec468b9,0x2c342438,0x5f40a3c2,0x72c31d16,0xc25e2bc,
0x8b493c28,0x41950dff,0x7101a839,0xdeb30c08,0x9ce4b4d8,0x90c15664,
0x6184cb7b,0x70b632d5,0x745c6c48,0x4257b8d0];
pub struct AES {
nk: usize,
nr: usize,
mode: usize,
fkey: [u32;60],
rkey: [u32;60],
pub f: [u8;16]
}
impl AES {
fn rotl8(x: u32) -> u32 {
return ((x)<<8)|((x)>>24);
}
fn rotl16(x: u32) -> u32 {
return ((x)<<16)|((x)>>16);
}
fn rotl24(x: u32) -> u32 {
return ((x)<<24)|((x)>>8);
}
fn pack(b: [u8;4]) -> u32 { /* pack bytes into a 32-bit Word */
return ((((b[3])&0xff) as u32)<<24)|((((b[2])&0xff) as u32)<<16)|((((b[1])&0xff) as u32)<<8)|(((b[0])&0xff) as u32);
}
fn unpack(a: u32) -> [u8;4] { /* unpack bytes from a word */
let b:[u8;4]=[(a&0xff) as u8,((a>>8)&0xff) as u8,((a>>16)&0xff) as u8,((a>>24)&0xff) as u8];
return b;
}
fn bmul(x: u8,y: u8) -> u8 { /* x.y= AntiLog(Log(x) + Log(y)) */
let ix=(x as usize)&0xff;
let iy=(y as usize)&0xff;
let lx=(LTAB[ix] as usize)&0xff;
let ly=(LTAB[iy] as usize)&0xff;
if x != 0 && y != 0 {
return PTAB[(lx+ly)%255];
} else {return 0}
}
fn subbyte(a: u32) -> u32 {
let mut b=AES::unpack(a);
b[0]=FBSUB[b[0] as usize];
b[1]=FBSUB[b[1] as usize];
b[2]=FBSUB[b[2] as usize];
b[3]=FBSUB[b[3] as usize];
return AES::pack(b);
}
fn product(x: u32,y: u32) -> u8 { /* dot product of two 4-byte arrays */
let xb=AES::unpack(x);
let yb=AES::unpack(y);
return AES::bmul(xb[0],yb[0])^AES::bmul(xb[1],yb[1])^AES::bmul(xb[2],yb[2])^AES::bmul(xb[3],yb[3]);
}
fn invmixcol(x: u32) -> u32 { /* matrix Multiplication */
let mut b:[u8;4]=[0;4];
let mut m=AES::pack(INCO);
b[3]=AES::product(m,x);
m=AES::rotl24(m);
b[2]=AES::product(m,x);
m=AES::rotl24(m);
b[1]=AES::product(m,x);
m=AES::rotl24(m);
b[0]=AES::product(m,x);
let y=AES::pack(b);
return y;
}
fn increment(f: &mut [u8;16]) {
for i in 0..16 {
f[i]+=1;
if f[i]!=0 {break}
}
}
pub fn new() -> AES {
AES {
nk:0,
nr:0,
mode:0,
fkey:[0;60],
rkey:[0;60],
f:[0;16]
}
}
/* reset cipher */
pub fn reset(&mut self,m: usize,iv: Option<[u8;16]>) { /* reset mode, or reset iv */
self.mode=m;
for i in 0..16 {self.f[i]=0}
if self.mode != ECB
{
if let Some(x) = iv {
for i in 0..16 {self.f[i]=x[i]}
}
}
}
pub fn init(&mut self,m: usize,nkey: usize,key: &[u8],iv: Option<[u8;16]>) -> bool {
/* Key Scheduler. Create expanded encryption key */
let mut cipherkey:[u32;8]=[0;8];
let mut b:[u8;4]=[0;4];
let nk=nkey/4;
if nk!=4 && nk!=6 && nk!=8 {return false}
let nr=6+nk;
self.nk=nk;
self.nr=nr;
self.reset(m,iv);
let n=4*(nr+1);
let mut j=0;
for i in 0..nk {
for k in 0..4 {b[k]=key[j+k]}
cipherkey[i]=AES::pack(b);
j+=4;
}
for i in 0..nk {self.fkey[i]=cipherkey[i]}
j=nk;
let mut k=0;
while j<n {
self.fkey[j]=self.fkey[j-nk]^AES::subbyte(AES::rotl24(self.fkey[j-1]))^(RCO[k] as u32);
for i in 1..nk {
if (i+j) >= n {break}
self.fkey[i+j]=self.fkey[i+j-nk]^self.fkey[i+j-1];
}
j+=nk;
k+=1;
}
/* now for the expanded decrypt key in reverse order */
for j in 0..4 {self.rkey[j+n-4]=self.fkey[j]}
let mut i=4;
while i<n-4 {
let k=n-4-i;
for j in 0..4 {self.rkey[k+j]=AES::invmixcol(self.fkey[i+j])}
i+=4;
}
for j in n-4..n {self.rkey[j-n+4]=self.fkey[j]}
return true;
}
pub fn getreg(&mut self) -> [u8;16] {
let mut ir:[u8;16]=[0;16];
for i in 0..16 {ir[i]=self.f[i]}
return ir;
}
/* Encrypt a single block */
pub fn ecb_encrypt(&mut self,buff: &mut [u8;16]) {
let mut b:[u8;4]=[0;4];
let mut p:[u32;4]=[0;4];
let mut q:[u32;4]=[0;4];
let mut j=0;
for i in 0..4 {
for k in 0..4 {b[k]=buff[j+k]}
p[i]=AES::pack(b);
p[i]^=self.fkey[i];
j+=4;
}
let mut k=4;
/* State alternates between p and q */
for _ in 1..self.nr {
q[0]=self.fkey[k]^FTABLE[(p[0]&0xff) as usize]^AES::rotl8(FTABLE[((p[1]>>8)&0xff) as usize])^AES::rotl16(FTABLE[((p[2]>>16)&0xff) as usize])^AES::rotl24(FTABLE[((p[3]>>24)&0xff) as usize]);
q[1]=self.fkey[k+1]^FTABLE[(p[1]&0xff) as usize]^AES::rotl8(FTABLE[((p[2]>>8)&0xff) as usize])^AES::rotl16(FTABLE[((p[3]>>16)&0xff) as usize])^AES::rotl24(FTABLE[((p[0]>>24)&0xff) as usize]);
q[2]=self.fkey[k+2]^FTABLE[(p[2]&0xff) as usize]^AES::rotl8(FTABLE[((p[3]>>8)&0xff) as usize])^AES::rotl16(FTABLE[((p[0]>>16)&0xff) as usize])^AES::rotl24(FTABLE[((p[1]>>24)&0xff) as usize]);
q[3]=self.fkey[k+3]^FTABLE[(p[3]&0xff) as usize]^AES::rotl8(FTABLE[((p[0]>>8)&0xff) as usize])^AES::rotl16(FTABLE[((p[1]>>16)&0xff) as usize])^AES::rotl24(FTABLE[((p[2]>>24)&0xff) as usize]);
k+=4;
for j in 0..4 {
let t=p[j]; p[j]=q[j]; q[j]=t;
}
}
/* Last Round */
q[0]=self.fkey[k]^(FBSUB[(p[0]&0xff) as usize] as u32)^AES::rotl8((FBSUB[((p[1]>>8)&0xff) as usize]) as u32)^AES::rotl16((FBSUB[((p[2]>>16)&0xff) as usize]) as u32)^AES::rotl24((FBSUB[((p[3]>>24)&0xff) as usize]) as u32);
q[1]=self.fkey[k+1]^(FBSUB[(p[1]&0xff) as usize] as u32)^AES::rotl8((FBSUB[((p[2]>>8)&0xff) as usize]) as u32)^AES::rotl16((FBSUB[((p[3]>>16)&0xff) as usize]) as u32)^AES::rotl24((FBSUB[((p[0]>>24)&0xff) as usize]) as u32);
q[2]=self.fkey[k+2]^(FBSUB[(p[2]&0xff) as usize] as u32)^AES::rotl8((FBSUB[((p[3]>>8)&0xff) as usize]) as u32)^AES::rotl16((FBSUB[((p[0]>>16)&0xff) as usize]) as u32)^AES::rotl24((FBSUB[((p[1]>>24)&0xff) as usize]) as u32);
q[3]=self.fkey[k+3]^(FBSUB[(p[3]&0xff) as usize] as u32)^AES::rotl8((FBSUB[((p[0]>>8)&0xff) as usize]) as u32)^AES::rotl16((FBSUB[((p[1]>>16)&0xff) as usize]) as u32)^AES::rotl24((FBSUB[((p[2]>>24)&0xff) as usize]) as u32);
j=0;
for i in 0..4 {
b=AES::unpack(q[i]);
for k in 0..4 {buff[j+k]=b[k]}
j+=4;
}
}
/* Decrypt a single block */
pub fn ecb_decrypt(&mut self,buff: &mut [u8;16]) {
let mut b:[u8;4]=[0;4];
let mut p:[u32;4]=[0;4];
let mut q:[u32;4]=[0;4];
let mut j=0;
for i in 0..4 {
for k in 0..4 {b[k]=buff[j+k]}
p[i]=AES::pack(b);
p[i]^=self.rkey[i];
j+=4;
}
let mut k=4;
/* State alternates between p and q */
for _ in 1..self.nr {
q[0]=self.rkey[k]^RTABLE[(p[0]&0xff) as usize]^AES::rotl8(RTABLE[((p[3]>>8)&0xff) as usize])^AES::rotl16(RTABLE[((p[2]>>16)&0xff) as usize])^AES::rotl24(RTABLE[((p[1]>>24)&0xff) as usize]);
q[1]=self.rkey[k+1]^RTABLE[(p[1]&0xff) as usize]^AES::rotl8(RTABLE[((p[0]>>8)&0xff) as usize])^AES::rotl16(RTABLE[((p[3]>>16)&0xff) as usize])^AES::rotl24(RTABLE[((p[2]>>24)&0xff) as usize]);
q[2]=self.rkey[k+2]^RTABLE[(p[2]&0xff) as usize]^AES::rotl8(RTABLE[((p[1]>>8)&0xff) as usize])^AES::rotl16(RTABLE[((p[0]>>16)&0xff) as usize])^AES::rotl24(RTABLE[((p[3]>>24)&0xff) as usize]);
q[3]=self.rkey[k+3]^RTABLE[(p[3]&0xff) as usize]^AES::rotl8(RTABLE[((p[2]>>8)&0xff) as usize])^AES::rotl16(RTABLE[((p[1]>>16)&0xff) as usize])^AES::rotl24(RTABLE[((p[0]>>24)&0xff) as usize]);
k+=4;
for j in 0..4 {
let t=p[j]; p[j]=q[j]; q[j]=t;
}
}
/* Last Round */
q[0]=self.rkey[k]^(RBSUB[(p[0]&0xff) as usize] as u32)^AES::rotl8((RBSUB[((p[3]>>8)&0xff) as usize]) as u32)^AES::rotl16((RBSUB[((p[2]>>16)&0xff) as usize]) as u32)^AES::rotl24((RBSUB[((p[1]>>24)&0xff) as usize]) as u32);
q[1]=self.rkey[k+1]^(RBSUB[(p[1]&0xff) as usize] as u32)^AES::rotl8((RBSUB[((p[0]>>8)&0xff) as usize]) as u32)^AES::rotl16((RBSUB[((p[3]>>16)&0xff) as usize]) as u32)^AES::rotl24((RBSUB[((p[2]>>24)&0xff) as usize]) as u32);
q[2]=self.rkey[k+2]^(RBSUB[(p[2]&0xff) as usize] as u32)^AES::rotl8((RBSUB[((p[1]>>8)&0xff) as usize]) as u32)^AES::rotl16((RBSUB[((p[0]>>16)&0xff) as usize]) as u32)^AES::rotl24((RBSUB[((p[3]>>24)&0xff) as usize]) as u32);
q[3]=self.rkey[k+3]^(RBSUB[((p[3])&0xff) as usize] as u32)^AES::rotl8((RBSUB[((p[2]>>8)&0xff) as usize]) as u32)^AES::rotl16((RBSUB[((p[1]>>16)&0xff) as usize]) as u32)^AES::rotl24((RBSUB[((p[0]>>24)&0xff) as usize]) as u32);
j=0;
for i in 0..4 {
b=AES::unpack(q[i]);
for k in 0..4 {buff[j+k]=b[k]}
j+=4;
}
}
/* Encrypt using selected mode of operation */
pub fn encrypt(&mut self,buff: &mut [u8;16]) -> u32 {
let mut st:[u8;16]=[0;16];
// Supported Modes of Operation
let mut fell_off: u32=0;
match self.mode {
ECB => {
self.ecb_encrypt(buff);
return 0;
},
CBC => {
for j in 0..16 {buff[j]^=self.f[j]}
self.ecb_encrypt(buff);
for j in 0..16 {self.f[j]=buff[j]}
return 0;
},
CFB1 | CFB2 | CFB4 => {
let bytes=self.mode-CFB1+1;
for j in 0..bytes {fell_off=(fell_off<<8)|(self.f[j] as u32)}
for j in 0..16 {st[j]=self.f[j]}
for j in bytes..16 {self.f[j-bytes]=self.f[j]}
self.ecb_encrypt(&mut st);
for j in 0..bytes {
buff[j]^=st[j];
self.f[16-bytes+j]=buff[j];
}
return fell_off;
},
OFB1 | OFB2 | OFB4 | OFB8 | OFB16 => {
let bytes=self.mode-OFB1+1;
for j in 0..16 {st[j]=self.f[j]}
self.ecb_encrypt(&mut st);
for j in 0..bytes {buff[j]^=st[j]}
for j in 0..16 {self.f[j]=st[j]}
//self.ecb_encrypt(&mut (self.f));
//for j in 0..bytes {buff[j]^=self.f[j]}
return 0;
},
CTR1 | CTR2 | CTR4 | CTR8 | CTR16 => {
let bytes=self.mode-CTR1+1;
for j in 0..16 {st[j]=self.f[j]}
self.ecb_encrypt(&mut st);
for j in 0..bytes {buff[j]^=st[j]}
AES::increment(&mut (self.f));
return 0;
},
_ => {
return 0;
}
}
}
/* Decrypt using selected mode of operation */
pub fn decrypt(&mut self,buff: &mut [u8;16]) -> u32 {
let mut st:[u8;16]=[0;16];
// Supported Modes of Operation
let mut fell_off: u32=0;
match self.mode {
ECB => {
self.ecb_decrypt(buff);
return 0;
},
CBC => {
for j in 0..16 {
st[j]=self.f[j];
self.f[j]=buff[j];
}
self.ecb_decrypt(buff);
for j in 0..16 {
buff[j]^=st[j];
st[j]=0;
}
return 0;
},
CFB1 | CFB2 | CFB4 => {
let bytes=self.mode-CFB1+1;
for j in 0..bytes {fell_off=(fell_off<<8)|(self.f[j] as u32)}
for j in 0..16 {st[j]=self.f[j]}
for j in bytes..16 {self.f[j-bytes]=self.f[j]}
self.ecb_encrypt(&mut st);
for j in 0..bytes {
self.f[16-bytes+j]=buff[j];
buff[j]^=st[j];
}
return fell_off;
},
OFB1 | OFB2 | OFB4 | OFB8 | OFB16 => {
let bytes=self.mode-OFB1+1;
for j in 0..16 {st[j]=self.f[j]}
self.ecb_encrypt(&mut st);
for j in 0..bytes {buff[j]^=st[j]}
for j in 0..16 {self.f[j]=st[j]}
// self.ecb_encrypt(A.f[:]);
// for j in 0..bytes {buff[j]^=self.f[j]}
return 0;
},
CTR1 | CTR2 | CTR4 | CTR8 | CTR16 => {
let bytes=self.mode-CTR1+1;
for j in 0..16 {st[j]=self.f[j]}
self.ecb_encrypt(&mut st);
for j in 0..bytes {buff[j]^=st[j]}
AES::increment(&mut (self.f));
return 0;
},
_ => {
return 0;
}
}
}
/* Clean up and delete left-overs */
pub fn end(&mut self) { // clean up
for i in 0..4*(self.nr+1) {self.fkey[i]=0; self.rkey[i]=0}
for i in 0..16 {self.f[i]=0}
}
}
/*
fn main()
{
let mut key:[u8;32]=[0;32];
let mut block:[u8;16]=[0;16];
let mut iv: [u8;16] = [0;16];
for i in 0..32 {key[i]=0}
key[0]=1;
for i in 0..16 {iv[i]=i as u8}
for i in 0..16 {block[i]=i as u8}
let mut aes=AES::new();
aes.init(CTR16,32,&key,Some(iv));
println!("Plain= ");
for i in 0..16 {print!("{:02x} ",block[i])}
println!("");
aes.encrypt(&mut block);
println!("Encrypt= ");
for i in 0..16 {print!("{:02x} ",block[i])}
println!("");
aes.reset(CTR16,Some(iv));
aes.decrypt(&mut block);
println!("Decrypt= ");
for i in 0..16 {print!("{:02x} ",block[i])}
println!("");
aes.end();
}
*/<|fim▁end|>
|
0x5a36366c,0x413f3f7e,0x2f7f7f5,0x4fcccc83,0x5c343468,0xf4a5a551,
0x34e5e5d1,0x8f1f1f9,0x937171e2,0x73d8d8ab,0x53313162,0x3f15152a,
0xc040408,0x52c7c795,0x65232346,0x5ec3c39d,0x28181830,0xa1969637,
0xf05050a,0xb59a9a2f,0x907070e,0x36121224,0x9b80801b,0x3de2e2df,
|
<|file_name|>db.go<|end_file_name|><|fim▁begin|>package db
import (
"hg/messages"
"fmt"
"gopkg.in/mgo.v2"
)
const (
mongoURL = "127.0.0.1:27017"
)
func persist(operation string,historyEntry messages.HistoryMessage) {
fmt.Println("Do db operation")
session, err := mgo.Dial(mongoURL)
if err != nil {
panic(err)<|fim▁hole|> historyCollection := session.DB("historys").C("history")
switch operation {
case "Insert":
err = historyCollection.Insert(historyEntry)
if err != nil {
fmt.Printf("Can't insert document: %v\n", err)
panic(err)
}
case "Update":
fmt.Println("Update method is not supported yet!")
case "Delete":
fmt.Println("Delete method is not supported yet!")
case "Put":
fmt.Println("Put method is not supported yet!")
}
}
func RecordHistory(historyEntry messages.HistoryMessage){
fmt.Println("Start to record history object")
persist("Insert",historyEntry)
}<|fim▁end|>
|
}
defer session.Close()
|
<|file_name|>iter.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Composable external iterators
//!
//! # The `Iterator` trait
//!
//! This module defines Rust's core iteration trait. The `Iterator` trait has
//! one unimplemented method, `next`. All other methods are derived through
//! default methods to perform operations such as `zip`, `chain`, `enumerate`,
//! and `fold`.
//!
//! The goal of this module is to unify iteration across all containers in Rust.
//! An iterator can be considered as a state machine which is used to track
//! which element will be yielded next.
//!
//! There are various extensions also defined in this module to assist with
//! various types of iteration, such as the `DoubleEndedIterator` for iterating
//! in reverse, the `FromIterator` trait for creating a container from an
//! iterator, and much more.
//!
//! # Rust's `for` loop
//!
//! The special syntax used by rust's `for` loop is based around the
//! `IntoIterator` trait defined in this module. `for` loops can be viewed as a
//! syntactical expansion into a `loop`, for example, the `for` loop in this
//! example is essentially translated to the `loop` below.
//!
//! ```
//! let values = vec![1, 2, 3];
//!
//! for x in values {
//! println!("{}", x);
//! }
//!
//! // Rough translation of the iteration without a `for` iterator.
//! # let values = vec![1, 2, 3];
//! let mut it = values.into_iter();
//! loop {
//! match it.next() {
//! Some(x) => println!("{}", x),
//! None => break,
//! }
//! }
//! ```
//!
//! Because `Iterator`s implement `IntoIterator`, this `for` loop syntax can be applied to any
//! iterator over any type.
#![stable(feature = "rust1", since = "1.0.0")]
use self::MinMaxResult::*;
use clone::Clone;
use cmp;
use cmp::{Ord, PartialOrd, PartialEq};
use default::Default;
use marker;
use mem;
use num::{Zero, One};
use ops::{self, Add, Sub, FnMut, Mul, RangeFrom};
use option::Option::{self, Some, None};
use marker::Sized;
use usize;
fn _assert_is_object_safe(_: &Iterator<Item=()>) {}
/// An interface for dealing with "external iterators". These types of iterators
/// can be resumed at any time as all state is stored internally as opposed to
/// being located on the call stack.
///
/// The Iterator protocol states that an iterator yields a (potentially-empty,
/// potentially-infinite) sequence of values, and returns `None` to signal that
/// it's finished. The Iterator protocol does not define behavior after `None`
/// is returned. A concrete Iterator implementation may choose to behave however
/// it wishes, either by returning `None` infinitely, or by doing something
/// else.
#[lang = "iterator"]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "`{Self}` is not an iterator; maybe try calling \
`.iter()` or a similar method"]
pub trait Iterator {
/// The type of the elements being iterated
#[stable(feature = "rust1", since = "1.0.0")]
type Item;
/// Advances the iterator and returns the next value. Returns `None` when the
/// end is reached.
#[stable(feature = "rust1", since = "1.0.0")]
fn next(&mut self) -> Option<Self::Item>;
/// Returns a lower and upper bound on the remaining length of the iterator.
///
/// An upper bound of `None` means either there is no known upper bound, or
/// the upper bound does not fit within a `usize`.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn size_hint(&self) -> (usize, Option<usize>) { (0, None) }
/// Counts the number of elements in this iterator.
///
/// # Overflow Behavior
///
/// The method does no guarding against overflows, so counting elements of
/// an iterator with more than `usize::MAX` elements either produces the
/// wrong result or panics. If debug assertions are enabled, a panic is
/// guaranteed.
///
/// # Panics
///
/// This functions might panic if the iterator has more than `usize::MAX`
/// elements.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// assert_eq!(a.iter().count(), 5);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn count(self) -> usize where Self: Sized {
// Might overflow.
self.fold(0, |cnt, _| cnt + 1)
}
/// Loops through the entire iterator, returning the last element.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// assert_eq!(a.iter().last(), Some(&5));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn last(self) -> Option<Self::Item> where Self: Sized {
let mut last = None;
for x in self { last = Some(x); }
last
}
/// Loops through `n` iterations, returning the `n`th element of the
/// iterator.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter();
/// assert_eq!(it.nth(2), Some(&3));
/// assert_eq!(it.nth(2), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn nth(&mut self, mut n: usize) -> Option<Self::Item> where Self: Sized {
for x in self.by_ref() {
if n == 0 { return Some(x) }
n -= 1;
}
None
}
/// Chain this iterator with another, returning a new iterator that will
/// finish iterating over the current iterator, and then iterate
/// over the other specified iterator.
///
/// # Examples
///
/// ```
/// let a = [0];
/// let b = [1];
/// let mut it = a.iter().chain(b.iter());
/// assert_eq!(it.next(), Some(&0));
/// assert_eq!(it.next(), Some(&1));
/// assert!(it.next().is_none());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn chain<U>(self, other: U) -> Chain<Self, U::IntoIter> where
Self: Sized, U: IntoIterator<Item=Self::Item>,
{
Chain{a: self, b: other.into_iter(), flag: false}
}
/// Creates an iterator that iterates over both this and the specified
/// iterators simultaneously, yielding the two elements as pairs. When
/// either iterator returns `None`, all further invocations of `next()`
/// will return `None`.
///
/// # Examples
///
/// ```
/// let a = [0];
/// let b = [1];
/// let mut it = a.iter().zip(b.iter());
/// assert_eq!(it.next(), Some((&0, &1)));
/// assert!(it.next().is_none());
/// ```
///
/// `zip` can provide similar functionality to `enumerate`:
///
/// ```
/// for pair in "foo".chars().enumerate() {
/// println!("{:?}", pair);
/// }
///
/// for pair in (0..).zip("foo".chars()) {
/// println!("{:?}", pair);
/// }
/// ```
///
/// both produce the same output.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn zip<U>(self, other: U) -> Zip<Self, U::IntoIter> where
Self: Sized, U: IntoIterator
{
Zip{a: self, b: other.into_iter()}
}
/// Creates a new iterator that will apply the specified function to each
/// element returned by the first, yielding the mapped element instead.
///
/// # Examples
///
/// ```
/// let a = [1, 2];
/// let mut it = a.iter().map(|&x| 2 * x);
/// assert_eq!(it.next(), Some(2));
/// assert_eq!(it.next(), Some(4));
/// assert!(it.next().is_none());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn map<B, F>(self, f: F) -> Map<Self, F> where
Self: Sized, F: FnMut(Self::Item) -> B,
{
Map{iter: self, f: f}
}
/// Creates an iterator that applies the predicate to each element returned
/// by this iterator. The only elements that will be yielded are those that
/// make the predicate evaluate to `true`.
///
/// # Examples
///
/// ```
/// let a = [1, 2];
/// let mut it = a.iter().filter(|&x| *x > 1);
/// assert_eq!(it.next(), Some(&2));
/// assert!(it.next().is_none());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn filter<P>(self, predicate: P) -> Filter<Self, P> where
Self: Sized, P: FnMut(&Self::Item) -> bool,
{
Filter{iter: self, predicate: predicate}
}
/// Creates an iterator that both filters and maps elements.
/// If the specified function returns `None`, the element is skipped.
/// Otherwise the option is unwrapped and the new value is yielded.
///
/// # Examples
///
/// ```
/// let a = [1, 2];
/// let mut it = a.iter().filter_map(|&x| if x > 1 {Some(2 * x)} else {None});
/// assert_eq!(it.next(), Some(4));
/// assert!(it.next().is_none());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F> where
Self: Sized, F: FnMut(Self::Item) -> Option<B>,
{
FilterMap { iter: self, f: f }
}
/// Creates an iterator that yields pairs `(i, val)` where `i` is the
/// current index of iteration and `val` is the value returned by the
/// iterator.
///
/// `enumerate` keeps its count as a `usize`. If you want to count by a
/// different sized integer, the `zip` function provides similar
/// functionality.
///
/// # Overflow Behavior
///
/// The method does no guarding against overflows, so enumerating more than
/// `usize::MAX` elements either produces the wrong result or panics. If
/// debug assertions are enabled, a panic is guaranteed.
///
/// # Panics
///
/// The returned iterator might panic if the to-be-returned index would
/// overflow a `usize`.
///
/// # Examples
///
/// ```
/// let a = [100, 200];
/// let mut it = a.iter().enumerate();
/// assert_eq!(it.next(), Some((0, &100)));
/// assert_eq!(it.next(), Some((1, &200)));
/// assert!(it.next().is_none());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn enumerate(self) -> Enumerate<Self> where Self: Sized {
Enumerate { iter: self, count: 0 }
}
/// Creates an iterator that has a `.peek()` method
/// that returns an optional reference to the next element.
///
/// # Examples
///
/// ```
/// # #![feature(core)]
/// let xs = [100, 200, 300];
/// let mut it = xs.iter().cloned().peekable();
/// assert_eq!(*it.peek().unwrap(), 100);
/// assert_eq!(it.next().unwrap(), 100);
/// assert_eq!(it.next().unwrap(), 200);
/// assert_eq!(*it.peek().unwrap(), 300);
/// assert_eq!(*it.peek().unwrap(), 300);
/// assert_eq!(it.next().unwrap(), 300);
/// assert!(it.peek().is_none());
/// assert!(it.next().is_none());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn peekable(self) -> Peekable<Self> where Self: Sized {
Peekable{iter: self, peeked: None}
}
/// Creates an iterator that invokes the predicate on elements
/// until it returns false. Once the predicate returns false, that
/// element and all further elements are yielded.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter().skip_while(|&a| *a < 3);
/// assert_eq!(it.next(), Some(&3));
/// assert_eq!(it.next(), Some(&4));
/// assert_eq!(it.next(), Some(&5));
/// assert!(it.next().is_none());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P> where
Self: Sized, P: FnMut(&Self::Item) -> bool,
{
SkipWhile{iter: self, flag: false, predicate: predicate}
}
/// Creates an iterator that yields elements so long as the predicate
/// returns true. After the predicate returns false for the first time, no
/// further elements will be yielded.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter().take_while(|&a| *a < 3);
/// assert_eq!(it.next(), Some(&1));
/// assert_eq!(it.next(), Some(&2));
/// assert!(it.next().is_none());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P> where
Self: Sized, P: FnMut(&Self::Item) -> bool,
{
TakeWhile{iter: self, flag: false, predicate: predicate}
}
/// Creates an iterator that skips the first `n` elements of this iterator,
/// and then yields all further items.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter().skip(3);
/// assert_eq!(it.next(), Some(&4));
/// assert_eq!(it.next(), Some(&5));
/// assert!(it.next().is_none());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn skip(self, n: usize) -> Skip<Self> where Self: Sized {
Skip{iter: self, n: n}
}
/// Creates an iterator that yields the first `n` elements of this
/// iterator.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter().take(3);
/// assert_eq!(it.next(), Some(&1));
/// assert_eq!(it.next(), Some(&2));
/// assert_eq!(it.next(), Some(&3));
/// assert!(it.next().is_none());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn take(self, n: usize) -> Take<Self> where Self: Sized, {
Take{iter: self, n: n}
}
/// Creates a new iterator that behaves in a similar fashion to fold.
/// There is a state which is passed between each iteration and can be
/// mutated as necessary. The yielded values from the closure are yielded
/// from the Scan instance when not `None`.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter().scan(1, |fac, &x| {
/// *fac = *fac * x;
/// Some(*fac)
/// });
/// assert_eq!(it.next(), Some(1));
/// assert_eq!(it.next(), Some(2));
/// assert_eq!(it.next(), Some(6));
/// assert_eq!(it.next(), Some(24));
/// assert_eq!(it.next(), Some(120));
/// assert!(it.next().is_none());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option<B>,
{
Scan{iter: self, f: f, state: initial_state}
}
/// Creates an iterator that maps each element to an iterator,
/// and yields the elements of the produced iterators.
///
/// # Examples
///
/// ```
/// # #![feature(core)]
/// let xs = [2, 3];
/// let ys = [0, 1, 0, 1, 2];
/// let it = xs.iter().flat_map(|&x| (0..).take(x));
/// // Check that `it` has the same elements as `ys`
/// for (i, x) in it.enumerate() {
/// assert_eq!(x, ys[i]);
/// }
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where Self: Sized, U: IntoIterator, F: FnMut(Self::Item) -> U,
{
FlatMap{iter: self, f: f, frontiter: None, backiter: None }
}
/// Creates an iterator that yields `None` forever after the underlying
/// iterator yields `None`. Random-access iterator behavior is not
/// affected, only single and double-ended iterator behavior.
///
/// # Examples
///
/// ```
/// fn process<U: Iterator<Item=i32>>(it: U) -> i32 {
/// let mut it = it.fuse();
/// let mut sum = 0;
/// for x in it.by_ref() {
/// if x > 5 {
/// break;
/// }
/// sum += x;
/// }
/// // did we exhaust the iterator?
/// if it.next().is_none() {
/// sum += 1000;
/// }
/// sum
/// }
/// let x = vec![1, 2, 3, 7, 8, 9];
/// assert_eq!(process(x.into_iter()), 6);
/// let x = vec![1, 2, 3];
/// assert_eq!(process(x.into_iter()), 1006);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn fuse(self) -> Fuse<Self> where Self: Sized {
Fuse{iter: self, done: false}
}
/// Creates an iterator that calls a function with a reference to each
/// element before yielding it. This is often useful for debugging an
/// iterator pipeline.
///
/// # Examples
///
/// ```
/// # #![feature(core)]
///
/// let a = [1, 4, 2, 3, 8, 9, 6];
/// let sum: i32 = a.iter()
/// .map(|x| *x)
/// .inspect(|&x| println!("filtering {}", x))
/// .filter(|&x| x % 2 == 0)
/// .inspect(|&x| println!("{} made it through", x))
/// .sum();
/// println!("{}", sum);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn inspect<F>(self, f: F) -> Inspect<Self, F> where
Self: Sized, F: FnMut(&Self::Item),
{
Inspect{iter: self, f: f}
}
/// Creates a wrapper around a mutable reference to the iterator.
///
/// This is useful to allow applying iterator adaptors while still
/// retaining ownership of the original iterator value.
///
/// # Examples
///
/// ```
/// let mut it = 0..10;
/// // sum the first five values
/// let partial_sum = it.by_ref().take(5).fold(0, |a, b| a + b);
/// assert_eq!(partial_sum, 10);
/// assert_eq!(it.next(), Some(5));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn by_ref(&mut self) -> &mut Self where Self: Sized { self }
/// Loops through the entire iterator, collecting all of the elements into
/// a container implementing `FromIterator`.
///
/// # Examples
///
/// ```
/// let expected = [1, 2, 3, 4, 5];
/// let actual: Vec<_> = expected.iter().cloned().collect();
/// assert_eq!(actual, expected);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn collect<B: FromIterator<Self::Item>>(self) -> B where Self: Sized {
FromIterator::from_iter(self)
}
/// Loops through the entire iterator, collecting all of the elements into
/// one of two containers, depending on a predicate. The elements of the
/// first container satisfy the predicate, while the elements of the second
/// do not.
///
/// ```
/// # #![feature(core)]
/// let vec = vec![1, 2, 3, 4];
/// let (even, odd): (Vec<_>, Vec<_>) = vec.into_iter().partition(|&n| n % 2 == 0);
/// assert_eq!(even, [2, 4]);
/// assert_eq!(odd, [1, 3]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn partition<B, F>(self, mut f: F) -> (B, B) where
Self: Sized,
B: Default + Extend<Self::Item>,
F: FnMut(&Self::Item) -> bool
{
let mut left: B = Default::default();
let mut right: B = Default::default();
for x in self {
if f(&x) {
left.extend(Some(x).into_iter())
} else {
right.extend(Some(x).into_iter())
}
}
(left, right)
}
/// Performs a fold operation over the entire iterator, returning the
/// eventual state at the end of the iteration.
///
/// This operation is sometimes called 'reduce' or 'inject'.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// assert_eq!(a.iter().fold(0, |acc, &item| acc + item), 15);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn fold<B, F>(self, init: B, mut f: F) -> B where
Self: Sized, F: FnMut(B, Self::Item) -> B,
{
let mut accum = init;
for x in self {
accum = f(accum, x);
}
accum
}
/// Tests whether the predicate holds true for all elements in the iterator.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// assert!(a.iter().all(|x| *x > 0));
/// assert!(!a.iter().all(|x| *x > 2));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn all<F>(&mut self, mut f: F) -> bool where
Self: Sized, F: FnMut(Self::Item) -> bool
{
for x in self.by_ref() {
if !f(x) {
return false;
}
}
true
}
/// Tests whether any element of an iterator satisfies the specified
/// predicate.
///
/// Does not consume the iterator past the first found element.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter();
/// assert!(it.any(|x| *x == 3));
/// assert_eq!(it.collect::<Vec<_>>(), [&4, &5]);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn any<F>(&mut self, mut f: F) -> bool where
Self: Sized,
F: FnMut(Self::Item) -> bool
{
for x in self.by_ref() {
if f(x) {
return true;
}
}
false
}
/// Returns the first element satisfying the specified predicate.
///
/// Does not consume the iterator past the first found element.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter();
/// assert_eq!(it.find(|&x| *x == 3), Some(&3));
/// assert_eq!(it.collect::<Vec<_>>(), [&4, &5]);
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn find<P>(&mut self, mut predicate: P) -> Option<Self::Item> where
Self: Sized,
P: FnMut(&Self::Item) -> bool,
{
for x in self.by_ref() {
if predicate(&x) { return Some(x) }
}
None
}
/// Returns the index of the first element satisfying the specified predicate
///
/// Does not consume the iterator past the first found element.
///
/// # Overflow Behavior
///
/// The method does no guarding against overflows, so if there are more
/// than `usize::MAX` non-matching elements, it either produces the wrong
/// result or panics. If debug assertions are enabled, a panic is
/// guaranteed.
///
/// # Panics
///
/// This functions might panic if the iterator has more than `usize::MAX`
/// non-matching elements.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter();
/// assert_eq!(it.position(|x| *x == 3), Some(2));
/// assert_eq!(it.collect::<Vec<_>>(), [&4, &5]);
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
Self: Sized,
P: FnMut(Self::Item) -> bool,
{
// `enumerate` might overflow.
for (i, x) in self.by_ref().enumerate() {
if predicate(x) {
return Some(i);
}
}
None
}
/// Returns the index of the last element satisfying the specified predicate
///
/// If no element matches, `None` is returned.
///
/// Does not consume the iterator *before* the first found element.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 2, 4, 5];
/// let mut it = a.iter();
/// assert_eq!(it.rposition(|x| *x == 2), Some(2));
/// assert_eq!(it.collect::<Vec<_>>(), [&1, &2]);
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
P: FnMut(Self::Item) -> bool,
Self: Sized + ExactSizeIterator + DoubleEndedIterator
{
let mut i = self.len();
while let Some(v) = self.next_back() {
if predicate(v) {
return Some(i - 1);
}
// No need for an overflow check here, because `ExactSizeIterator`
// implies that the number of elements fits into a `usize`.
i -= 1;
}
None
}
/// Consumes the entire iterator to return the maximum element.
///
/// Returns the rightmost element if the comparison determines two elements
/// to be equally maximum.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// assert_eq!(a.iter().max(), Some(&5));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn max(self) -> Option<Self::Item> where Self: Sized, Self::Item: Ord
{
select_fold1(self,
|_| (),
// switch to y even if it is only equal, to preserve
// stability.
|_, x, _, y| *x <= *y)
.map(|(_, x)| x)
}
/// Consumes the entire iterator to return the minimum element.
///
/// Returns the leftmost element if the comparison determines two elements
/// to be equally minimum.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3, 4, 5];
/// assert_eq!(a.iter().min(), Some(&1));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn min(self) -> Option<Self::Item> where Self: Sized, Self::Item: Ord
{
select_fold1(self,
|_| (),
// only switch to y if it is strictly smaller, to
// preserve stability.
|_, x, _, y| *x > *y)
.map(|(_, x)| x)
}
/// `min_max` finds the minimum and maximum elements in the iterator.
///
/// The return type `MinMaxResult` is an enum of three variants:
///
/// - `NoElements` if the iterator is empty.
/// - `OneElement(x)` if the iterator has exactly one element.
/// - `MinMax(x, y)` is returned otherwise, where `x <= y`. Two
/// values are equal if and only if there is more than one
/// element in the iterator and all elements are equal.
///
/// On an iterator of length `n`, `min_max` does `1.5 * n` comparisons,
/// and so is faster than calling `min` and `max` separately which does `2 *
/// n` comparisons.
///
/// # Examples
///
/// ```
/// # #![feature(core)]
/// use std::iter::MinMaxResult::{NoElements, OneElement, MinMax};
///
/// let a: [i32; 0] = [];
/// assert_eq!(a.iter().min_max(), NoElements);
///
/// let a = [1];
/// assert_eq!(a.iter().min_max(), OneElement(&1));
///
/// let a = [1, 2, 3, 4, 5];
/// assert_eq!(a.iter().min_max(), MinMax(&1, &5));
///
/// let a = [1, 1, 1, 1];
/// assert_eq!(a.iter().min_max(), MinMax(&1, &1));
/// ```
#[unstable(feature = "core", reason = "return type may change")]
fn min_max(mut self) -> MinMaxResult<Self::Item> where Self: Sized, Self::Item: Ord
{
let (mut min, mut max) = match self.next() {
None => return NoElements,
Some(x) => {
match self.next() {
None => return OneElement(x),
Some(y) => if x <= y {(x, y)} else {(y, x)}
}
}
};
loop {
// `first` and `second` are the two next elements we want to look
// at. We first compare `first` and `second` (#1). The smaller one
// is then compared to current minimum (#2). The larger one is
// compared to current maximum (#3). This way we do 3 comparisons
// for 2 elements.
let first = match self.next() {
None => break,
Some(x) => x
};
let second = match self.next() {
None => {
if first < min {
min = first;
} else if first >= max {
max = first;
}
break;
}
Some(x) => x
};
if first <= second {
if first < min { min = first }
if second >= max { max = second }
} else {
if second < min { min = second }
if first >= max { max = first }
}
}
MinMax(min, max)
}
/// Returns the element that gives the maximum value from the
/// specified function.
///
/// Returns the rightmost element if the comparison determines two elements
/// to be equally maximum.
///
/// # Examples
///
/// ```
/// # #![feature(core)]
///
/// let a = [-3_i32, 0, 1, 5, -10];
/// assert_eq!(*a.iter().max_by(|x| x.abs()).unwrap(), -10);
/// ```
#[inline]
#[unstable(feature = "core",
reason = "may want to produce an Ordering directly; see #15311")]
fn max_by<B: Ord, F>(self, f: F) -> Option<Self::Item> where
Self: Sized,
F: FnMut(&Self::Item) -> B,
{
select_fold1(self,
f,
// switch to y even if it is only equal, to preserve
// stability.
|x_p, _, y_p, _| x_p <= y_p)
.map(|(_, x)| x)
}
/// Returns the element that gives the minimum value from the
/// specified function.
///
/// Returns the leftmost element if the comparison determines two elements
/// to be equally minimum.
///
/// # Examples
///
/// ```
/// # #![feature(core)]
///
/// let a = [-3_i32, 0, 1, 5, -10];
/// assert_eq!(*a.iter().min_by(|x| x.abs()).unwrap(), 0);
/// ```
#[inline]
#[unstable(feature = "core",
reason = "may want to produce an Ordering directly; see #15311")]
fn min_by<B: Ord, F>(self, f: F) -> Option<Self::Item> where
Self: Sized,
F: FnMut(&Self::Item) -> B,
{
select_fold1(self,
f,
// only switch to y if it is strictly smaller, to
// preserve stability.
|x_p, _, y_p, _| x_p > y_p)
.map(|(_, x)| x)
}
/// Change the direction of the iterator
///
/// The flipped iterator swaps the ends on an iterator that can already
/// be iterated from the front and from the back.
///
///
/// If the iterator also implements RandomAccessIterator, the flipped
/// iterator is also random access, with the indices starting at the back
/// of the original iterator.
///
/// Note: Random access with flipped indices still only applies to the first
/// `std::usize::MAX` elements of the original iterator.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn rev(self) -> Rev<Self> where Self: Sized + DoubleEndedIterator {
Rev{iter: self}
}
/// Converts an iterator of pairs into a pair of containers.
///
/// Loops through the entire iterator, collecting the first component of
/// each item into one new container, and the second component into another.
///
/// # Examples
///
/// ```
/// # #![feature(core)]
/// let a = [(1, 2), (3, 4)];
/// let (left, right): (Vec<_>, Vec<_>) = a.iter().cloned().unzip();
/// assert_eq!(left, [1, 3]);
/// assert_eq!(right, [2, 4]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB) where
FromA: Default + Extend<A>,
FromB: Default + Extend<B>,
Self: Sized + Iterator<Item=(A, B)>,
{
struct SizeHint<A>(usize, Option<usize>, marker::PhantomData<A>);
impl<A> Iterator for SizeHint<A> {
type Item = A;
fn next(&mut self) -> Option<A> { None }
fn size_hint(&self) -> (usize, Option<usize>) {
(self.0, self.1)
}
}
let (lo, hi) = self.size_hint();
let mut ts: FromA = Default::default();
let mut us: FromB = Default::default();
ts.extend(SizeHint(lo, hi, marker::PhantomData));
us.extend(SizeHint(lo, hi, marker::PhantomData));
for (t, u) in self {
ts.extend(Some(t).into_iter());
us.extend(Some(u).into_iter());
}
(ts, us)
}
/// Creates an iterator that clones the elements it yields. Useful for
/// converting an Iterator<&T> to an Iterator<T>.
#[stable(feature = "rust1", since = "1.0.0")]
fn cloned<'a, T: 'a>(self) -> Cloned<Self>
where Self: Sized + Iterator<Item=&'a T>, T: Clone
{
Cloned { it: self }
}
/// Repeats an iterator endlessly
///
/// # Examples
///
/// ```
/// let a = [1, 2];
/// let mut it = a.iter().cycle();
/// assert_eq!(it.next(), Some(&1));
/// assert_eq!(it.next(), Some(&2));
/// assert_eq!(it.next(), Some(&1));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
fn cycle(self) -> Cycle<Self> where Self: Sized + Clone {
Cycle{orig: self.clone(), iter: self}
}
/// Use an iterator to reverse a container in place.
#[unstable(feature = "core",
reason = "uncertain about placement or widespread use")]
fn reverse_in_place<'a, T: 'a>(&mut self) where
Self: Sized + Iterator<Item=&'a mut T> + DoubleEndedIterator
{
loop {
match (self.next(), self.next_back()) {
(Some(x), Some(y)) => mem::swap(x, y),
_ => break
}
}
}
/// Iterates over the entire iterator, summing up all the elements
///
/// # Examples
///
/// ```
/// # #![feature(core)]
///
/// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter().cloned();
/// assert_eq!(it.sum::<i32>(), 15);
/// ```
#[unstable(feature="core")]
fn sum<S=<Self as Iterator>::Item>(self) -> S where
S: Add<Self::Item, Output=S> + Zero,
Self: Sized,
{
self.fold(Zero::zero(), |s, e| s + e)
}
/// Iterates over the entire iterator, multiplying all the elements
///
/// # Examples
///
/// ```
/// # #![feature(core)]
///
/// fn factorial(n: u32) -> u32 {
/// (1..).take_while(|&i| i <= n).product()
/// }
/// assert_eq!(factorial(0), 1);
/// assert_eq!(factorial(1), 1);
/// assert_eq!(factorial(5), 120);
/// ```
#[unstable(feature="core")]
fn product<P=<Self as Iterator>::Item>(self) -> P where
P: Mul<Self::Item, Output=P> + One,
Self: Sized,
{
self.fold(One::one(), |p, e| p * e)
}
}
/// Select an element from an iterator based on the given projection
/// and "comparison" function.
///
/// This is an idiosyncratic helper to try to factor out the
/// commonalities of {max,min}{,_by}. In particular, this avoids
/// having to implement optimisations several times.
#[inline]
fn select_fold1<I,B, FProj, FCmp>(mut it: I,
mut f_proj: FProj,
mut f_cmp: FCmp) -> Option<(B, I::Item)>
where I: Iterator,
FProj: FnMut(&I::Item) -> B,
FCmp: FnMut(&B, &I::Item, &B, &I::Item) -> bool
{
// start with the first element as our selection. This avoids
// having to use `Option`s inside the loop, translating to a
// sizeable performance gain (6x in one case).
it.next().map(|mut sel| {
let mut sel_p = f_proj(&sel);
for x in it {
let x_p = f_proj(&x);
if f_cmp(&sel_p, &sel, &x_p, &x) {
sel = x;
sel_p = x_p;
}
}
(sel_p, sel)
})
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, I: Iterator + ?Sized> Iterator for &'a mut I {
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> { (**self).next() }
fn size_hint(&self) -> (usize, Option<usize>) { (**self).size_hint() }
}
/// Conversion from an `Iterator`
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented="a collection of type `{Self}` cannot be \
built from an iterator over elements of type `{A}`"]
pub trait FromIterator<A> {
/// Builds a container with elements from something iterable.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
/// use std::iter::FromIterator;
///
/// let colors_vec = vec!["red", "red", "yellow", "blue"];
/// let colors_set = HashSet::<&str>::from_iter(colors_vec);
/// assert_eq!(colors_set.len(), 3);
/// ```
///
/// `FromIterator` is more commonly used implicitly via the
/// `Iterator::collect` method:
///
/// ```
/// use std::collections::HashSet;
///
/// let colors_vec = vec!["red", "red", "yellow", "blue"];
/// let colors_set = colors_vec.into_iter().collect::<HashSet<&str>>();
/// assert_eq!(colors_set.len(), 3);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn from_iter<T: IntoIterator<Item=A>>(iterator: T) -> Self;
}
/// Conversion into an `Iterator`
///
/// Implementing this trait allows you to use your type with Rust's `for` loop. See
/// the [module level documentation](index.html) for more details.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait IntoIterator {
/// The type of the elements being iterated
#[stable(feature = "rust1", since = "1.0.0")]
type Item;
/// A container for iterating over elements of type `Item`
#[stable(feature = "rust1", since = "1.0.0")]
type IntoIter: Iterator<Item=Self::Item>;
/// Consumes `Self` and returns an iterator over it
#[stable(feature = "rust1", since = "1.0.0")]
fn into_iter(self) -> Self::IntoIter;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator> IntoIterator for I {
type Item = I::Item;
type IntoIter = I;
fn into_iter(self) -> I {
self
}
}
/// A type growable from an `Iterator` implementation
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Extend<A> {
/// Extends a container with the elements yielded by an arbitrary iterator
#[stable(feature = "rust1", since = "1.0.0")]
fn extend<T: IntoIterator<Item=A>>(&mut self, iterable: T);
}
/// A range iterator able to yield elements from both ends
///
/// A `DoubleEndedIterator` can be thought of as a deque in that `next()` and
/// `next_back()` exhaust elements from the *same* range, and do not work
/// independently of each other.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait DoubleEndedIterator: Iterator {
/// Yields an element from the end of the range, returning `None` if the
/// range is empty.
#[stable(feature = "rust1", since = "1.0.0")]
fn next_back(&mut self) -> Option<Self::Item>;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for &'a mut I {
fn next_back(&mut self) -> Option<I::Item> { (**self).next_back() }
}
/// An object implementing random access indexing by `usize`
///
/// A `RandomAccessIterator` should be either infinite or a
/// `DoubleEndedIterator`. Calling `next()` or `next_back()` on a
/// `RandomAccessIterator` reduces the indexable range accordingly. That is,
/// `it.idx(1)` will become `it.idx(0)` after `it.next()` is called.
#[unstable(feature = "core",
reason = "not widely used, may be better decomposed into Index \
and ExactSizeIterator")]
pub trait RandomAccessIterator: Iterator {
/// Returns the number of indexable elements. At most `std::usize::MAX`
/// elements are indexable, even if the iterator represents a longer range.
fn indexable(&self) -> usize;
/// Returns an element at an index, or `None` if the index is out of bounds
fn idx(&mut self, index: usize) -> Option<Self::Item>;
}
/// An iterator that knows its exact length
///
/// This trait is a helper for iterators like the vector iterator, so that
/// it can support double-ended enumeration.
///
/// `Iterator::size_hint` *must* return the exact size of the iterator.
/// Note that the size must fit in `usize`.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait ExactSizeIterator: Iterator {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
/// Returns the exact length of the iterator.
fn len(&self) -> usize {
let (lower, upper) = self.size_hint();
// Note: This assertion is overly defensive, but it checks the invariant
// guaranteed by the trait. If this trait were rust-internal,
// we could use debug_assert!; assert_eq! will check all Rust user
// implementations too.
assert_eq!(upper, Some(lower));
lower
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for &'a mut I {}
// All adaptors that preserve the size of the wrapped iterator are fine
// Adaptors that may overflow in `size_hint` are not, i.e. `Chain`.
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> ExactSizeIterator for Enumerate<I> where I: ExactSizeIterator {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: ExactSizeIterator, F> ExactSizeIterator for Inspect<I, F> where
F: FnMut(&I::Item),
{}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> ExactSizeIterator for Rev<I>
where I: ExactSizeIterator + DoubleEndedIterator {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<B, I: ExactSizeIterator, F> ExactSizeIterator for Map<I, F> where
F: FnMut(I::Item) -> B,
{}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, B> ExactSizeIterator for Zip<A, B>
where A: ExactSizeIterator, B: ExactSizeIterator {}
/// An double-ended iterator with the direction inverted
#[derive(Clone)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Rev<T> {
iter: T
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> Iterator for Rev<I> where I: DoubleEndedIterator {
type Item = <I as Iterator>::Item;
#[inline]
fn next(&mut self) -> Option<<I as Iterator>::Item> { self.iter.next_back() }
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> DoubleEndedIterator for Rev<I> where I: DoubleEndedIterator {
#[inline]
fn next_back(&mut self) -> Option<<I as Iterator>::Item> { self.iter.next() }
}
#[unstable(feature = "core", reason = "trait is experimental")]
impl<I> RandomAccessIterator for Rev<I>
where I: DoubleEndedIterator + RandomAccessIterator
{
#[inline]
fn indexable(&self) -> usize { self.iter.indexable() }
#[inline]
fn idx(&mut self, index: usize) -> Option<<I as Iterator>::Item> {
let amt = self.indexable();
if amt > index {
self.iter.idx(amt - index - 1)
} else {
None
}
}
}
/// `MinMaxResult` is an enum returned by `min_max`. See `Iterator::min_max` for
/// more detail.
#[derive(Clone, PartialEq, Debug)]
#[unstable(feature = "core",
reason = "unclear whether such a fine-grained result is widely useful")]
pub enum MinMaxResult<T> {
/// Empty iterator
NoElements,
/// Iterator with one element, so the minimum and maximum are the same
OneElement(T),
/// More than one element in the iterator, the first element is not larger
/// than the second
MinMax(T, T)
}
impl<T: Clone> MinMaxResult<T> {
/// `into_option` creates an `Option` of type `(T,T)`. The returned `Option`
/// has variant `None` if and only if the `MinMaxResult` has variant
/// `NoElements`. Otherwise variant `Some(x,y)` is returned where `x <= y`.
/// If `MinMaxResult` has variant `OneElement(x)`, performing this operation
/// will make one clone of `x`.
///
/// # Examples
///
/// ```
/// # #![feature(core)]
/// use std::iter::MinMaxResult::{self, NoElements, OneElement, MinMax};
///
/// let r: MinMaxResult<i32> = NoElements;
/// assert_eq!(r.into_option(), None);
///
/// let r = OneElement(1);
/// assert_eq!(r.into_option(), Some((1, 1)));
///
/// let r = MinMax(1, 2);
/// assert_eq!(r.into_option(), Some((1, 2)));
/// ```
#[unstable(feature = "core", reason = "type is unstable")]
pub fn into_option(self) -> Option<(T,T)> {
match self {
NoElements => None,
OneElement(x) => Some((x.clone(), x)),
MinMax(x, y) => Some((x, y))
}
}
}
/// An iterator that clones the elements of an underlying iterator
#[stable(feature = "iter_cloned", since = "1.1.0")]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[derive(Clone)]
pub struct Cloned<I> {
it: I,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, I, T: 'a> Iterator for Cloned<I>
where I: Iterator<Item=&'a T>, T: Clone
{
type Item = T;
fn next(&mut self) -> Option<T> {
self.it.next().cloned()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.it.size_hint()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, I, T: 'a> DoubleEndedIterator for Cloned<I>
where I: DoubleEndedIterator<Item=&'a T>, T: Clone
{
fn next_back(&mut self) -> Option<T> {
self.it.next_back().cloned()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, I, T: 'a> ExactSizeIterator for Cloned<I>
where I: ExactSizeIterator<Item=&'a T>, T: Clone
{}
#[unstable(feature = "core", reason = "trait is experimental")]
impl<'a, I, T: 'a> RandomAccessIterator for Cloned<I>
where I: RandomAccessIterator<Item=&'a T>, T: Clone
{
#[inline]
fn indexable(&self) -> usize {
self.it.indexable()
}
#[inline]
fn idx(&mut self, index: usize) -> Option<T> {
self.it.idx(index).cloned()
}
}
/// An iterator that repeats endlessly
#[derive(Clone)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Cycle<I> {
orig: I,
iter: I,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> Iterator for Cycle<I> where I: Clone + Iterator {
type Item = <I as Iterator>::Item;
#[inline]
fn next(&mut self) -> Option<<I as Iterator>::Item> {
match self.iter.next() {
None => { self.iter = self.orig.clone(); self.iter.next() }
y => y
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
// the cycle iterator is either empty or infinite
match self.orig.size_hint() {
sz @ (0, Some(0)) => sz,
(0, _) => (0, None),
_ => (usize::MAX, None)
}
}
}
#[unstable(feature = "core", reason = "trait is experimental")]
impl<I> RandomAccessIterator for Cycle<I> where
I: Clone + RandomAccessIterator,
{
#[inline]
fn indexable(&self) -> usize {
if self.orig.indexable() > 0 {
usize::MAX
} else {
0
}
}
#[inline]
fn idx(&mut self, index: usize) -> Option<<I as Iterator>::Item> {
let liter = self.iter.indexable();
let lorig = self.orig.indexable();
if lorig == 0 {
None
} else if index < liter {
self.iter.idx(index)
} else {
self.orig.idx((index - liter) % lorig)
}
}
}
/// An iterator that strings two iterators together
#[derive(Clone)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Chain<A, B> {
a: A,
b: B,
flag: bool,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, B> Iterator for Chain<A, B> where
A: Iterator,
B: Iterator<Item = A::Item>
{
type Item = A::Item;
#[inline]
fn next(&mut self) -> Option<A::Item> {
if self.flag {
self.b.next()
} else {
match self.a.next() {
Some(x) => return Some(x),
_ => ()
}
self.flag = true;
self.b.next()
}
}
#[inline]
fn count(self) -> usize {
(if !self.flag { self.a.count() } else { 0 }) + self.b.count()
}
#[inline]
fn nth(&mut self, mut n: usize) -> Option<A::Item> {
if !self.flag {
for x in self.a.by_ref() {
if n == 0 {
return Some(x)
}
n -= 1;
}
self.flag = true;
}
self.b.nth(n)
}
#[inline]
fn last(self) -> Option<A::Item> {
let a_last = if self.flag { None } else { self.a.last() };
let b_last = self.b.last();
b_last.or(a_last)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (a_lower, a_upper) = self.a.size_hint();
let (b_lower, b_upper) = self.b.size_hint();
let lower = a_lower.saturating_add(b_lower);
let upper = match (a_upper, b_upper) {
(Some(x), Some(y)) => x.checked_add(y),
_ => None
};
(lower, upper)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, B> DoubleEndedIterator for Chain<A, B> where
A: DoubleEndedIterator,
B: DoubleEndedIterator<Item=A::Item>,
{
#[inline]
fn next_back(&mut self) -> Option<A::Item> {
match self.b.next_back() {
Some(x) => Some(x),
None => self.a.next_back()
}
}
}
#[unstable(feature = "core", reason = "trait is experimental")]
impl<A, B> RandomAccessIterator for Chain<A, B> where
A: RandomAccessIterator,
B: RandomAccessIterator<Item = A::Item>,
{
#[inline]
fn indexable(&self) -> usize {
let (a, b) = (self.a.indexable(), self.b.indexable());
a.saturating_add(b)
}
#[inline]
fn idx(&mut self, index: usize) -> Option<A::Item> {
let len = self.a.indexable();
if index < len {
self.a.idx(index)
} else {
self.b.idx(index - len)
}
}
}
/// An iterator that iterates two other iterators simultaneously
#[derive(Clone)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Zip<A, B> {
a: A,
b: B
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, B> Iterator for Zip<A, B> where A: Iterator, B: Iterator
{
type Item = (A::Item, B::Item);
#[inline]
fn next(&mut self) -> Option<(A::Item, B::Item)> {
self.a.next().and_then(|x| {
self.b.next().and_then(|y| {
Some((x, y))
})
})
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (a_lower, a_upper) = self.a.size_hint();
let (b_lower, b_upper) = self.b.size_hint();
let lower = cmp::min(a_lower, b_lower);
let upper = match (a_upper, b_upper) {
(Some(x), Some(y)) => Some(cmp::min(x,y)),
(Some(x), None) => Some(x),
(None, Some(y)) => Some(y),
(None, None) => None
};
(lower, upper)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, B> DoubleEndedIterator for Zip<A, B> where
A: DoubleEndedIterator + ExactSizeIterator,
B: DoubleEndedIterator + ExactSizeIterator,
{
#[inline]
fn next_back(&mut self) -> Option<(A::Item, B::Item)> {
let a_sz = self.a.len();
let b_sz = self.b.len();
if a_sz != b_sz {
// Adjust a, b to equal length
if a_sz > b_sz {
for _ in 0..a_sz - b_sz { self.a.next_back(); }
} else {
for _ in 0..b_sz - a_sz { self.b.next_back(); }
}
}
match (self.a.next_back(), self.b.next_back()) {
(Some(x), Some(y)) => Some((x, y)),
(None, None) => None,
_ => unreachable!(),
}
}
}
#[unstable(feature = "core", reason = "trait is experimental")]
impl<A, B> RandomAccessIterator for Zip<A, B> where
A: RandomAccessIterator,
B: RandomAccessIterator
{
#[inline]
fn indexable(&self) -> usize {
cmp::min(self.a.indexable(), self.b.indexable())
}
#[inline]
fn idx(&mut self, index: usize) -> Option<(A::Item, B::Item)> {
self.a.idx(index).and_then(|x| {
self.b.idx(index).and_then(|y| {
Some((x, y))
})
})
}
}
/// An iterator that maps the values of `iter` with `f`
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct Map<I, F> {
iter: I,
f: F,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<B, I: Iterator, F> Iterator for Map<I, F> where F: FnMut(I::Item) -> B {
type Item = B;
#[inline]
fn next(&mut self) -> Option<B> {
self.iter.next().map(|a| (self.f)(a))
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<B, I: DoubleEndedIterator, F> DoubleEndedIterator for Map<I, F> where
F: FnMut(I::Item) -> B,
{
#[inline]
fn next_back(&mut self) -> Option<B> {
self.iter.next_back().map(|a| (self.f)(a))
}
}
#[unstable(feature = "core", reason = "trait is experimental")]
impl<B, I: RandomAccessIterator, F> RandomAccessIterator for Map<I, F> where
F: FnMut(I::Item) -> B,
{
#[inline]
fn indexable(&self) -> usize {
self.iter.indexable()
}
#[inline]
fn idx(&mut self, index: usize) -> Option<B> {
self.iter.idx(index).map(|a| (self.f)(a))
}
}
/// An iterator that filters the elements of `iter` with `predicate`
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct Filter<I, P> {
iter: I,
predicate: P,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator, P> Iterator for Filter<I, P> where P: FnMut(&I::Item) -> bool {
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<I::Item> {
for x in self.iter.by_ref() {
if (self.predicate)(&x) {
return Some(x);
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (_, upper) = self.iter.size_hint();
(0, upper) // can't know a lower bound, due to the predicate
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: DoubleEndedIterator, P> DoubleEndedIterator for Filter<I, P>
where P: FnMut(&I::Item) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<I::Item> {
for x in self.iter.by_ref().rev() {
if (self.predicate)(&x) {
return Some(x);
}
}
None
}
}
/// An iterator that uses `f` to both filter and map elements from `iter`
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct FilterMap<I, F> {
iter: I,
f: F,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
where F: FnMut(I::Item) -> Option<B>,
{
type Item = B;
#[inline]
fn next(&mut self) -> Option<B> {
for x in self.iter.by_ref() {
if let Some(y) = (self.f)(x) {
return Some(y);
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (_, upper) = self.iter.size_hint();
(0, upper) // can't know a lower bound, due to the predicate
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<B, I: DoubleEndedIterator, F> DoubleEndedIterator for FilterMap<I, F>
where F: FnMut(I::Item) -> Option<B>,
{
#[inline]
fn next_back(&mut self) -> Option<B> {
for x in self.iter.by_ref().rev() {
if let Some(y) = (self.f)(x) {
return Some(y);
}
}
None
}
}
/// An iterator that yields the current count and the element during iteration
#[derive(Clone)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Enumerate<I> {
iter: I,
count: usize,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> Iterator for Enumerate<I> where I: Iterator {
type Item = (usize, <I as Iterator>::Item);
/// # Overflow Behavior
///
/// The method does no guarding against overflows, so enumerating more than
/// `usize::MAX` elements either produces the wrong result or panics. If
/// debug assertions are enabled, a panic is guaranteed.
///
/// # Panics
///
/// Might panic if the index of the element overflows a `usize`.
#[inline]
fn next(&mut self) -> Option<(usize, <I as Iterator>::Item)> {
self.iter.next().map(|a| {
let ret = (self.count, a);
// Possible undefined overflow.
self.count += 1;
ret
})
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<(usize, I::Item)> {
self.iter.nth(n).map(|a| {
let i = self.count + n;
self.count = i + 1;
(i, a)
})
}
#[inline]
fn count(self) -> usize {
self.iter.count()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> DoubleEndedIterator for Enumerate<I> where
I: ExactSizeIterator + DoubleEndedIterator
{
#[inline]
fn next_back(&mut self) -> Option<(usize, <I as Iterator>::Item)> {
self.iter.next_back().map(|a| {
let len = self.iter.len();
// Can safely add, `ExactSizeIterator` promises that the number of
// elements fits into a `usize`.
(self.count + len, a)
})
}
}
#[unstable(feature = "core", reason = "trait is experimental")]
impl<I> RandomAccessIterator for Enumerate<I> where I: RandomAccessIterator {
#[inline]
fn indexable(&self) -> usize {
self.iter.indexable()
}
#[inline]
fn idx(&mut self, index: usize) -> Option<(usize, <I as Iterator>::Item)> {
// Can safely add, `ExactSizeIterator` (ancestor of
// `RandomAccessIterator`) promises that the number of elements fits
// into a `usize`.
self.iter.idx(index).map(|a| (self.count + index, a))
}
}
/// An iterator with a `peek()` that returns an optional reference to the next element.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Peekable<I: Iterator> {
iter: I,
peeked: Option<I::Item>,
}
impl<I: Iterator + Clone> Clone for Peekable<I> where I::Item: Clone {
fn clone(&self) -> Peekable<I> {
Peekable {
iter: self.iter.clone(),
peeked: self.peeked.clone(),
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator> Iterator for Peekable<I> {
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<I::Item> {
match self.peeked {
Some(_) => self.peeked.take(),
None => self.iter.next(),
}
}
#[inline]
fn count(self) -> usize {
(if self.peeked.is_some() { 1 } else { 0 }) + self.iter.count()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<I::Item> {
match self.peeked {
Some(_) if n == 0 => self.peeked.take(),
Some(_) => {
self.peeked = None;
self.iter.nth(n-1)
},
None => self.iter.nth(n)
}
}
#[inline]
fn last(self) -> Option<I::Item> {
self.iter.last().or(self.peeked)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (lo, hi) = self.iter.size_hint();
if self.peeked.is_some() {
let lo = lo.saturating_add(1);
let hi = hi.and_then(|x| x.checked_add(1));
(lo, hi)
} else {
(lo, hi)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: ExactSizeIterator> ExactSizeIterator for Peekable<I> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator> Peekable<I> {
/// Returns a reference to the next element of the iterator with out
/// advancing it, or None if the iterator is exhausted.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn peek(&mut self) -> Option<&I::Item> {
if self.peeked.is_none() {
self.peeked = self.iter.next();
}
match self.peeked {
Some(ref value) => Some(value),
None => None,
}
}
/// Checks whether peekable iterator is empty or not.
#[inline]
pub fn is_empty(&mut self) -> bool {
self.peek().is_none()
}
}
/// An iterator that rejects elements while `predicate` is true
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct SkipWhile<I, P> {
iter: I,
flag: bool,
predicate: P,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator, P> Iterator for SkipWhile<I, P>
where P: FnMut(&I::Item) -> bool
{
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<I::Item> {
for x in self.iter.by_ref() {
if self.flag || !(self.predicate)(&x) {
self.flag = true;
return Some(x);
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (_, upper) = self.iter.size_hint();
(0, upper) // can't know a lower bound, due to the predicate
}
}
/// An iterator that only accepts elements while `predicate` is true
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct TakeWhile<I, P> {
iter: I,
flag: bool,
predicate: P,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator, P> Iterator for TakeWhile<I, P>
where P: FnMut(&I::Item) -> bool
{
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<I::Item> {
if self.flag {
None
} else {
self.iter.next().and_then(|x| {
if (self.predicate)(&x) {
Some(x)
} else {
self.flag = true;
None
}
})
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (_, upper) = self.iter.size_hint();
(0, upper) // can't know a lower bound, due to the predicate
}
}
/// An iterator that skips over `n` elements of `iter`.
#[derive(Clone)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Skip<I> {
iter: I,
n: usize
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> Iterator for Skip<I> where I: Iterator {
type Item = <I as Iterator>::Item;
#[inline]
fn next(&mut self) -> Option<I::Item> {
if self.n == 0 {
self.iter.next()
} else {
let old_n = self.n;
self.n = 0;
self.iter.nth(old_n)
}
}
#[inline]
fn nth(&mut self, n: usize) -> Option<I::Item> {
// Can't just add n + self.n due to overflow.
if self.n == 0 {
self.iter.nth(n)
} else {
let to_skip = self.n;
self.n = 0;
// nth(n) skips n+1
if self.iter.nth(to_skip-1).is_none() {
return None;
}
self.iter.nth(n)
}
}
#[inline]
fn count(self) -> usize {
self.iter.count().saturating_sub(self.n)
}
#[inline]
fn last(mut self) -> Option<I::Item> {
if self.n == 0 {
self.iter.last()
} else {
let next = self.next();
if next.is_some() {
// recurse. n should be 0.
self.last().or(next)
} else {
None
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (lower, upper) = self.iter.size_hint();
let lower = lower.saturating_sub(self.n);
let upper = upper.map(|x| x.saturating_sub(self.n));
(lower, upper)
}
}
#[unstable(feature = "core", reason = "trait is experimental")]
impl<I> RandomAccessIterator for Skip<I> where I: RandomAccessIterator{
#[inline]
fn indexable(&self) -> usize {
self.iter.indexable().saturating_sub(self.n)
}
#[inline]
fn idx(&mut self, index: usize) -> Option<<I as Iterator>::Item> {
if index >= self.indexable() {
None
} else {
self.iter.idx(index + self.n)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> ExactSizeIterator for Skip<I> where I: ExactSizeIterator {}
/// An iterator that only iterates over the first `n` iterations of `iter`.
#[derive(Clone)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Take<I> {
iter: I,
n: usize
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> Iterator for Take<I> where I: Iterator{
type Item = <I as Iterator>::Item;
#[inline]
fn next(&mut self) -> Option<<I as Iterator>::Item> {
if self.n != 0 {
self.n -= 1;
self.iter.next()
} else {
None
}
}
#[inline]
fn nth(&mut self, n: usize) -> Option<I::Item> {
if self.n > n {
self.n -= n + 1;
self.iter.nth(n)
} else {
if self.n > 0 {
self.iter.nth(self.n - 1);
self.n = 0;
}
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (lower, upper) = self.iter.size_hint();
let lower = cmp::min(lower, self.n);
let upper = match upper {
Some(x) if x < self.n => Some(x),
_ => Some(self.n)
};
(lower, upper)
}
}
#[unstable(feature = "core", reason = "trait is experimental")]
impl<I> RandomAccessIterator for Take<I> where I: RandomAccessIterator{
#[inline]
fn indexable(&self) -> usize {
cmp::min(self.iter.indexable(), self.n)
}
#[inline]
fn idx(&mut self, index: usize) -> Option<<I as Iterator>::Item> {
if index >= self.n {
None
} else {
self.iter.idx(index)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> ExactSizeIterator for Take<I> where I: ExactSizeIterator {}
/// An iterator to maintain state while iterating another iterator
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct Scan<I, St, F> {
iter: I,
f: F,
/// The current internal state to be passed to the closure next.
#[unstable(feature = "core")]
pub state: St,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<B, I, St, F> Iterator for Scan<I, St, F> where
I: Iterator,
F: FnMut(&mut St, I::Item) -> Option<B>,
{
type Item = B;
#[inline]
fn next(&mut self) -> Option<B> {
self.iter.next().and_then(|a| (self.f)(&mut self.state, a))
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (_, upper) = self.iter.size_hint();
(0, upper) // can't know a lower bound, due to the scan function
}
}
/// An iterator that maps each element to an iterator,
/// and yields the elements of the produced iterators
///
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct FlatMap<I, U: IntoIterator, F> {
iter: I,
f: F,
frontiter: Option<U::IntoIter>,
backiter: Option<U::IntoIter>,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator, U: IntoIterator, F> Iterator for FlatMap<I, U, F>
where F: FnMut(I::Item) -> U,
{
type Item = U::Item;
#[inline]
fn next(&mut self) -> Option<U::Item> {
loop {
if let Some(ref mut inner) = self.frontiter {
if let Some(x) = inner.by_ref().next() {
return Some(x)
}
}
match self.iter.next().map(|x| (self.f)(x)) {
None => return self.backiter.as_mut().and_then(|it| it.next()),
next => self.frontiter = next.map(IntoIterator::into_iter),
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (flo, fhi) = self.frontiter.as_ref().map_or((0, Some(0)), |it| it.size_hint());
let (blo, bhi) = self.backiter.as_ref().map_or((0, Some(0)), |it| it.size_hint());
let lo = flo.saturating_add(blo);
match (self.iter.size_hint(), fhi, bhi) {
((0, Some(0)), Some(a), Some(b)) => (lo, a.checked_add(b)),
_ => (lo, None)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: DoubleEndedIterator, U, F> DoubleEndedIterator for FlatMap<I, U, F> where
F: FnMut(I::Item) -> U,
U: IntoIterator,
U::IntoIter: DoubleEndedIterator
{
#[inline]
fn next_back(&mut self) -> Option<U::Item> {
loop {
if let Some(ref mut inner) = self.backiter {
if let Some(y) = inner.next_back() {
return Some(y)
}
}
match self.iter.next_back().map(|x| (self.f)(x)) {
None => return self.frontiter.as_mut().and_then(|it| it.next_back()),
next => self.backiter = next.map(IntoIterator::into_iter),
}
}
}
}
/// An iterator that yields `None` forever after the underlying iterator
/// yields `None` once.
#[derive(Clone)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Fuse<I> {
iter: I,
done: bool
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> Iterator for Fuse<I> where I: Iterator {
type Item = <I as Iterator>::Item;
#[inline]
fn next(&mut self) -> Option<<I as Iterator>::Item> {
if self.done {
None
} else {
let next = self.iter.next();
self.done = next.is_none();
next
}
}
#[inline]
fn nth(&mut self, n: usize) -> Option<I::Item> {
if self.done {
None
} else {
let nth = self.iter.nth(n);
self.done = nth.is_none();
nth
}
}
#[inline]
fn last(self) -> Option<I::Item> {
if self.done {
None
} else {
self.iter.last()
}
}
#[inline]
fn count(self) -> usize {
if self.done {
0
} else {
self.iter.count()
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.done {
(0, Some(0))
} else {
self.iter.size_hint()
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> DoubleEndedIterator for Fuse<I> where I: DoubleEndedIterator {
#[inline]
fn next_back(&mut self) -> Option<<I as Iterator>::Item> {
if self.done {
None
} else {
let next = self.iter.next_back();
self.done = next.is_none();
next
}
}
}
// Allow RandomAccessIterators to be fused without affecting random-access behavior
#[unstable(feature = "core", reason = "trait is experimental")]
impl<I> RandomAccessIterator for Fuse<I> where I: RandomAccessIterator {
#[inline]
fn indexable(&self) -> usize {
self.iter.indexable()
}
#[inline]
fn idx(&mut self, index: usize) -> Option<<I as Iterator>::Item> {
self.iter.idx(index)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> ExactSizeIterator for Fuse<I> where I: ExactSizeIterator {}
impl<I> Fuse<I> {
/// Resets the `Fuse` such that the next call to `.next()` or
/// `.next_back()` will call the underlying iterator again even if it
/// previously returned `None`.
#[inline]
#[unstable(feature = "core", reason = "seems marginal")]
pub fn reset_fuse(&mut self) {
self.done = false
}
}
/// An iterator that calls a function with a reference to each
/// element before yielding it.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct Inspect<I, F> {
iter: I,
f: F,
}
impl<I: Iterator, F> Inspect<I, F> where F: FnMut(&I::Item) {
#[inline]
fn do_inspect(&mut self, elt: Option<I::Item>) -> Option<I::Item> {
if let Some(ref a) = elt {
(self.f)(a);
}
elt
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator, F> Iterator for Inspect<I, F> where F: FnMut(&I::Item) {
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<I::Item> {
let next = self.iter.next();
self.do_inspect(next)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: DoubleEndedIterator, F> DoubleEndedIterator for Inspect<I, F>
where F: FnMut(&I::Item),
{
#[inline]
fn next_back(&mut self) -> Option<I::Item> {
let next = self.iter.next_back();
self.do_inspect(next)
}
}
#[unstable(feature = "core", reason = "trait is experimental")]
impl<I: RandomAccessIterator, F> RandomAccessIterator for Inspect<I, F>
where F: FnMut(&I::Item),
{
#[inline]
fn indexable(&self) -> usize {
self.iter.indexable()
}
#[inline]
fn idx(&mut self, index: usize) -> Option<I::Item> {
let element = self.iter.idx(index);
self.do_inspect(element)
}
}
/// An iterator that passes mutable state to a closure and yields the result.
///
/// # Examples
///
/// An iterator that yields sequential Fibonacci numbers, and stops on overflow.
///
/// ```
/// #![feature(core)]
/// use std::iter::Unfold;
///
/// // This iterator will yield up to the last Fibonacci number before the max
/// // value of `u32`. You can simply change `u32` to `u64` in this line if
/// // you want higher values than that.
/// let mut fibonacci = Unfold::new((Some(0u32), Some(1u32)),
/// |&mut (ref mut x2, ref mut x1)| {
/// // Attempt to get the next Fibonacci number
/// // `x1` will be `None` if previously overflowed.
/// let next = match (*x2, *x1) {
/// (Some(x2), Some(x1)) => x2.checked_add(x1),
/// _ => None,
/// };
///
/// // Shift left: ret <- x2 <- x1 <- next
/// let ret = *x2;
/// *x2 = *x1;
/// *x1 = next;
///
/// ret
/// });
///
/// for i in fibonacci {
/// println!("{}", i);
/// }
/// ```
#[unstable(feature = "core")]
#[derive(Clone)]
pub struct Unfold<St, F> {
f: F,
/// Internal state that will be passed to the closure on the next iteration
#[unstable(feature = "core")]
pub state: St,
}
#[unstable(feature = "core")]
impl<A, St, F> Unfold<St, F> where F: FnMut(&mut St) -> Option<A> {
/// Creates a new iterator with the specified closure as the "iterator
/// function" and an initial state to eventually pass to the closure
#[inline]
pub fn new(initial_state: St, f: F) -> Unfold<St, F> {
Unfold {
f: f,
state: initial_state
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, St, F> Iterator for Unfold<St, F> where F: FnMut(&mut St) -> Option<A> {
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> {
(self.f)(&mut self.state)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
// no possible known bounds at this point
(0, None)
}
}
/// Objects that can be stepped over in both directions.
///
/// The `steps_between` function provides a way to efficiently compare
/// two `Step` objects.
#[unstable(feature = "step_trait",
reason = "likely to be replaced by finer-grained traits")]
pub trait Step: PartialOrd {
/// Steps `self` if possible.
fn step(&self, by: &Self) -> Option<Self>;
/// Returns the number of steps between two step objects. The count is
/// inclusive of `start` and exclusive of `end`.
///
/// Returns `None` if it is not possible to calculate `steps_between`
/// without overflow.
fn steps_between(start: &Self, end: &Self, by: &Self) -> Option<usize>;
}
macro_rules! step_impl_unsigned {
($($t:ty)*) => ($(
impl Step for $t {
#[inline]
fn step(&self, by: &$t) -> Option<$t> {
(*self).checked_add(*by)
}
#[inline]
#[allow(trivial_numeric_casts)]
fn steps_between(start: &$t, end: &$t, by: &$t) -> Option<usize> {
if *by == 0 { return None; }
if *start < *end {
// Note: We assume $t <= usize here
let diff = (*end - *start) as usize;
let by = *by as usize;
if diff % by > 0 {
Some(diff / by + 1)
} else {
Some(diff / by)
}
} else {
Some(0)
}
}
}
)*)
}
macro_rules! step_impl_signed {
($($t:ty)*) => ($(
impl Step for $t {
#[inline]
fn step(&self, by: &$t) -> Option<$t> {
(*self).checked_add(*by)
}
#[inline]
#[allow(trivial_numeric_casts)]
fn steps_between(start: &$t, end: &$t, by: &$t) -> Option<usize> {
if *by == 0 { return None; }
let mut diff: usize;
let mut by_u: usize;
if *by > 0 {
if *start >= *end {
return Some(0);
}
// Note: We assume $t <= isize here
// Use .wrapping_sub and cast to usize to compute the
// difference that may not fit inside the range of isize.
diff = (*end as isize).wrapping_sub(*start as isize) as usize;
by_u = *by as usize;
} else {
if *start <= *end {
return Some(0);
}
diff = (*start as isize).wrapping_sub(*end as isize) as usize;
by_u = (*by as isize).wrapping_mul(-1) as usize;
}
if diff % by_u > 0 {
Some(diff / by_u + 1)
} else {
Some(diff / by_u)
}
}
}
)*)
}
macro_rules! step_impl_no_between {
($($t:ty)*) => ($(
impl Step for $t {
#[inline]
fn step(&self, by: &$t) -> Option<$t> {
(*self).checked_add(*by)
}
#[inline]
fn steps_between(_a: &$t, _b: &$t, _by: &$t) -> Option<usize> {
None
}
}
)*)
}
step_impl_unsigned!(usize u8 u16 u32);
step_impl_signed!(isize i8 i16 i32);
#[cfg(target_pointer_width = "64")]
step_impl_unsigned!(u64);
#[cfg(target_pointer_width = "64")]
step_impl_signed!(i64);
#[cfg(target_pointer_width = "32")]
step_impl_no_between!(u64 i64);
/// An adapter for stepping range iterators by a custom amount.
///
/// The resulting iterator handles overflow by stopping. The `A`
/// parameter is the type being iterated over, while `R` is the range
/// type (usually one of `std::ops::{Range, RangeFrom}`.
#[derive(Clone)]
#[unstable(feature = "step_by", reason = "recent addition")]
pub struct StepBy<A, R> {
step_by: A,
range: R,
}
impl<A: Step> RangeFrom<A> {
/// Creates an iterator starting at the same point, but stepping by
/// the given amount at each iteration.
///
/// # Examples
///
/// ```ignore
/// for i in (0u8..).step_by(2) {
/// println!("{}", i);
/// }
/// ```
///
/// This prints all even `u8` values.
#[unstable(feature = "step_by", reason = "recent addition")]
pub fn step_by(self, by: A) -> StepBy<A, Self> {
StepBy {
step_by: by,
range: self
}
}
}
#[allow(deprecated)]
impl<A: Step> ops::Range<A> {
/// Creates an iterator with the same range, but stepping by the
/// given amount at each iteration.
///
/// The resulting iterator handles overflow by stopping.
///
/// # Examples
///
/// ```
/// # #![feature(step_by, core)]
/// for i in (0..10).step_by(2) {
/// println!("{}", i);
/// }
/// ```
///
/// This prints:
///
/// ```text
/// 0
/// 2
/// 4
/// 6
/// 8
/// ```
#[unstable(feature = "step_by", reason = "recent addition")]
pub fn step_by(self, by: A) -> StepBy<A, Self> {
StepBy {
step_by: by,
range: self
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> Iterator for StepBy<A, RangeFrom<A>> where
A: Clone,
for<'a> &'a A: Add<&'a A, Output = A>
{
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> {
let mut n = &self.range.start + &self.step_by;
mem::swap(&mut n, &mut self.range.start);
Some(n)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(usize::MAX, None) // Too bad we can't specify an infinite lower bound
}
}
/// An iterator over the range [start, stop]
#[derive(Clone)]
#[unstable(feature = "core",
reason = "likely to be replaced by range notation and adapters")]
pub struct RangeInclusive<A> {
range: ops::Range<A>,
done: bool,
}
/// Returns an iterator over the range [start, stop].
#[inline]
#[unstable(feature = "core",
reason = "likely to be replaced by range notation and adapters")]
pub fn range_inclusive<A>(start: A, stop: A) -> RangeInclusive<A>
where A: Step + One + Clone
{
RangeInclusive {
range: start..stop,
done: false,
}
}
#[unstable(feature = "core",
reason = "likely to be replaced by range notation and adapters")]
impl<A> Iterator for RangeInclusive<A> where
A: PartialEq + Step + One + Clone,
for<'a> &'a A: Add<&'a A, Output = A>
{
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> {
self.range.next().or_else(|| {
if !self.done && self.range.start == self.range.end {
self.done = true;
Some(self.range.end.clone())
} else {
None
}
})
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (lo, hi) = self.range.size_hint();
if self.done {
(lo, hi)
} else {
let lo = lo.saturating_add(1);
let hi = hi.and_then(|x| x.checked_add(1));
(lo, hi)
}
}
}
#[unstable(feature = "core",
reason = "likely to be replaced by range notation and adapters")]
impl<A> DoubleEndedIterator for RangeInclusive<A> where
A: PartialEq + Step + One + Clone,
for<'a> &'a A: Add<&'a A, Output = A>,
for<'a> &'a A: Sub<Output=A>
{
#[inline]
fn next_back(&mut self) -> Option<A> {
if self.range.end > self.range.start {
let result = self.range.end.clone();
self.range.end = &self.range.end - &A::one();
Some(result)
} else if !self.done && self.range.start == self.range.end {
self.done = true;
Some(self.range.end.clone())
} else {
None
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(deprecated)]
impl<A: Step + Zero + Clone> Iterator for StepBy<A, ops::Range<A>> {
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> {
let rev = self.step_by < A::zero();
if (rev && self.range.start > self.range.end) ||
(!rev && self.range.start < self.range.end)
{
match self.range.start.step(&self.step_by) {
Some(mut n) => {
mem::swap(&mut self.range.start, &mut n);
Some(n)
},
None => {
let mut n = self.range.end.clone();
mem::swap(&mut self.range.start, &mut n);
Some(n)
}
}
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match Step::steps_between(&self.range.start,
&self.range.end,
&self.step_by) {
Some(hint) => (hint, Some(hint)),
None => (0, None)
}
}
}
macro_rules! range_exact_iter_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl ExactSizeIterator for ops::Range<$t> { }
)*)
}
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(deprecated)]
impl<A: Step + One> Iterator for ops::Range<A> where
for<'a> &'a A: Add<&'a A, Output = A>
{
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> {
if self.start < self.end {
let mut n = &self.start + &A::one();
mem::swap(&mut n, &mut self.start);
Some(n)
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match Step::steps_between(&self.start, &self.end, &A::one()) {
Some(hint) => (hint, Some(hint)),
None => (0, None)
}
}
}
// Ranges of u64 and i64 are excluded because they cannot guarantee having
// a length <= usize::MAX, which is required by ExactSizeIterator.
range_exact_iter_impl!(usize u8 u16 u32 isize i8 i16 i32);
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(deprecated)]
impl<A: Step + One + Clone> DoubleEndedIterator for ops::Range<A> where
for<'a> &'a A: Add<&'a A, Output = A>,
for<'a> &'a A: Sub<&'a A, Output = A>
{
#[inline]
fn next_back(&mut self) -> Option<A> {
if self.start < self.end {
self.end = &self.end - &A::one();
Some(self.end.clone())
} else {
None
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(deprecated)]
impl<A: Step + One> Iterator for ops::RangeFrom<A> where
for<'a> &'a A: Add<&'a A, Output = A>
{
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> {
let mut n = &self.start + &A::one();
mem::swap(&mut n, &mut self.start);
Some(n)
}
}
/// An iterator that repeats an element endlessly
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Repeat<A> {
element: A
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: Clone> Iterator for Repeat<A> {
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> { self.idx(0) }
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) { (usize::MAX, None) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: Clone> DoubleEndedIterator for Repeat<A> {
#[inline]
fn next_back(&mut self) -> Option<A> { self.idx(0) }
}
#[unstable(feature = "core", reason = "trait is experimental")]
impl<A: Clone> RandomAccessIterator for Repeat<A> {
#[inline]
fn indexable(&self) -> usize { usize::MAX }
#[inline]
fn idx(&mut self, _: usize) -> Option<A> { Some(self.element.clone()) }
}
type IterateState<T, F> = (F, Option<T>, bool);
/// An iterator that repeatedly applies a given function, starting
/// from a given seed value.
#[unstable(feature = "core")]
pub type Iterate<T, F> = Unfold<IterateState<T, F>, fn(&mut IterateState<T, F>) -> Option<T>>;
/// Creates a new iterator that produces an infinite sequence of
/// repeated applications of the given function `f`.
#[unstable(feature = "core")]
pub fn iterate<T, F>(seed: T, f: F) -> Iterate<T, F> where
T: Clone,
F: FnMut(T) -> T,
{
fn next<T, F>(st: &mut IterateState<T, F>) -> Option<T> where
T: Clone,
F: FnMut(T) -> T,
{
let &mut (ref mut f, ref mut val, ref mut first) = st;
if *first {
*first = false;
} else if let Some(x) = val.take() {
*val = Some((*f)(x))
}
val.clone()
}
// coerce to a fn pointer
let next: fn(&mut IterateState<T,F>) -> Option<T> = next;
Unfold::new((f, Some(seed), true), next)
}
/// Creates a new iterator that endlessly repeats the element `elt`.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn repeat<T: Clone>(elt: T) -> Repeat<T> {
Repeat{element: elt}
}
/// Functions for lexicographical ordering of sequences.
///
/// Lexicographical ordering through `<`, `<=`, `>=`, `>` requires
/// that the elements implement both `PartialEq` and `PartialOrd`.
///
/// If two sequences are equal up until the point where one ends,
/// the shorter sequence compares less.
#[unstable(feature = "core", reason = "needs review and revision")]
pub mod order {
use cmp;
use cmp::{Eq, Ord, PartialOrd, PartialEq};
use cmp::Ordering::{Equal, Less, Greater};
use option::Option;
use option::Option::{Some, None};
use super::Iterator;
/// Compare `a` and `b` for equality using `Eq`
pub fn equals<A, L, R>(mut a: L, mut b: R) -> bool where
A: Eq,
L: Iterator<Item=A>,
R: Iterator<Item=A>,
{
loop {
match (a.next(), b.next()) {
(None, None) => return true,
(None, _) | (_, None) => return false,
(Some(x), Some(y)) => if x != y { return false },
}
}
}
/// Order `a` and `b` lexicographically using `Ord`
pub fn cmp<A, L, R>(mut a: L, mut b: R) -> cmp::Ordering where
A: Ord,
L: Iterator<Item=A>,
R: Iterator<Item=A>,
{
loop {
match (a.next(), b.next()) {
(None, None) => return Equal,
(None, _ ) => return Less,
(_ , None) => return Greater,
(Some(x), Some(y)) => match x.cmp(&y) {
Equal => (),
non_eq => return non_eq,
},
}
}
}
/// Order `a` and `b` lexicographically using `PartialOrd`
pub fn partial_cmp<L: Iterator, R: Iterator>(mut a: L, mut b: R) -> Option<cmp::Ordering> where
L::Item: PartialOrd<R::Item><|fim▁hole|> loop {
match (a.next(), b.next()) {
(None, None) => return Some(Equal),
(None, _ ) => return Some(Less),
(_ , None) => return Some(Greater),
(Some(x), Some(y)) => match x.partial_cmp(&y) {
Some(Equal) => (),
non_eq => return non_eq,
},
}
}
}
/// Compare `a` and `b` for equality (Using partial equality, `PartialEq`)
pub fn eq<L: Iterator, R: Iterator>(mut a: L, mut b: R) -> bool where
L::Item: PartialEq<R::Item>,
{
loop {
match (a.next(), b.next()) {
(None, None) => return true,
(None, _) | (_, None) => return false,
(Some(x), Some(y)) => if !x.eq(&y) { return false },
}
}
}
/// Compares `a` and `b` for nonequality (Using partial equality, `PartialEq`)
pub fn ne<L: Iterator, R: Iterator>(mut a: L, mut b: R) -> bool where
L::Item: PartialEq<R::Item>,
{
loop {
match (a.next(), b.next()) {
(None, None) => return false,
(None, _) | (_, None) => return true,
(Some(x), Some(y)) => if x.ne(&y) { return true },
}
}
}
/// Returns `a` < `b` lexicographically (Using partial order, `PartialOrd`)
pub fn lt<L: Iterator, R: Iterator>(mut a: L, mut b: R) -> bool where
L::Item: PartialOrd<R::Item>,
{
loop {
match (a.next(), b.next()) {
(None, None) => return false,
(None, _ ) => return true,
(_ , None) => return false,
(Some(x), Some(y)) => if x.ne(&y) { return x.lt(&y) },
}
}
}
/// Returns `a` <= `b` lexicographically (Using partial order, `PartialOrd`)
pub fn le<L: Iterator, R: Iterator>(mut a: L, mut b: R) -> bool where
L::Item: PartialOrd<R::Item>,
{
loop {
match (a.next(), b.next()) {
(None, None) => return true,
(None, _ ) => return true,
(_ , None) => return false,
(Some(x), Some(y)) => if x.ne(&y) { return x.le(&y) },
}
}
}
/// Returns `a` > `b` lexicographically (Using partial order, `PartialOrd`)
pub fn gt<L: Iterator, R: Iterator>(mut a: L, mut b: R) -> bool where
L::Item: PartialOrd<R::Item>,
{
loop {
match (a.next(), b.next()) {
(None, None) => return false,
(None, _ ) => return false,
(_ , None) => return true,
(Some(x), Some(y)) => if x.ne(&y) { return x.gt(&y) },
}
}
}
/// Returns `a` >= `b` lexicographically (Using partial order, `PartialOrd`)
pub fn ge<L: Iterator, R: Iterator>(mut a: L, mut b: R) -> bool where
L::Item: PartialOrd<R::Item>,
{
loop {
match (a.next(), b.next()) {
(None, None) => return true,
(None, _ ) => return false,
(_ , None) => return true,
(Some(x), Some(y)) => if x.ne(&y) { return x.ge(&y) },
}
}
}
}<|fim▁end|>
|
{
|
<|file_name|>float_macros.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![macro_escape]
#![doc(hidden)]
macro_rules! assert_approx_eq(
($a:expr, $b:expr) => ({
use num::Float;
let (a, b) = (&$a, &$b);
assert!((*a - *b).abs() < 1.0e-6,
"{} is not approximately equal to {}", *a, *b);
})
)<|fim▁end|>
|
// http://rust-lang.org/COPYRIGHT.
//
|
<|file_name|>utilities.py<|end_file_name|><|fim▁begin|>"""
Utilities for plotting various figures and animations in EEG101.
"""
<|fim▁hole|>import numpy as np
import matplotlib.pylab as plt
import collections
from scipy import signal
def dot_plot(x, labels, step=1, figsize=(12,8)):
"""
Make a 1D dot plot.
Inputs
x : 1D array containing the points to plot
labels : 1D array containing the label for each point in x
step : vertical space between two points
"""
# Get the histogram for each class
classes = np.unique(labels)
hist = [np.histogram(x[labels==c], density=True) for c in classes]
# Prepare the figure
fig, ax = plt.subplots(figsize=figsize)
for hi, h in enumerate(hist):
bin_centers = (h[1][1:] + h[1][0:-1])/2. # Get bin centers
# Format the data so that each bin has as many points as the histogram bar for that bin
x1 = []
y1 = []
for i, j in zip(np.round(h[0]).astype(int), bin_centers):
y = range(0, i, step)
y1 += y
x1 += [j]*len(y)
# Plot
ax.plot(x1, (-1)**hi*np.array(y1), 'o', markersize=10, label=classes[hi])
ax.legend(scatterpoints=1)
ax.set_xlabel('Alpha power')
ax.set_ylabel('Number of points')
ax.set_yticklabels([])
ax.set_yticks([])
ax.legend()
plt.tight_layout()
def psd_with_bands_plot(f, psd, figsize=(12,8)):
"""
Plot a static PSD.
INPUTS
f : 1D array containing frequencies of the PSD
psd : 1D array containing the power at each frequency in f
figsize : figure size
"""
bands = collections.OrderedDict()
bands[r'$\delta$'] = (0,4)
bands[r'$\theta$'] = (4,8)
bands[r'$\alpha$'] = (8,13)
bands[r'$\beta$'] = (13, 30)
bands[r'$\gamma$'] = (30, 120)
fig, ax = plt.subplots(figsize=figsize)
ax.plot(f, psd)
ax.set_xlabel('Frequency (Hz)')
ax.set_ylabel('Power (dB)')
ylim = ax.get_ylim()
for i, [bkey, bfreq] in enumerate(bands.iteritems()):
ind = (f>=bfreq[0]) & (f<=bfreq[1])
f1 = f[ind]
y1 = psd[ind]
ax.fill_between(f1, y1, ylim[0], facecolor=[(0.7, i/5., 0.7)], alpha=0.5)
ax.text(np.mean(f1), (ylim[0] + ylim[1])/1.22, bkey, fontsize=16, verticalalignment='top', horizontalalignment='center')
ax.set_xlim([min(f), max(f)])
def sinewave(A, f, phi, t):
"""
Return a sine wave with specified parameters at the given time points.
INPUTS
A : Amplitude
f : Frequency (Hz)
phi : Phase (rad)
t : time (in s)
"""
return A*np.sin(2*np.pi*f*t + phi)
def animate_signals(nb_signals, incre, fs=256, refresh_rate=30., anim_dur=10., figsize=(12,8)):
"""
Draw and update a figure in real-time representing the summation of many
sine waves, to explain the concept of Fourier decomposition.
INPUTS
nb_signals : number of signals to sum together
incre : increment, in Hz, between each of the signals
fs : sampling frequency
refresh_rate : refresh rate of the animation
anim_dur : approximate duration of the animation, in seconds
"""
# Initialize values that remain constant throughout the animation
A = 1
t = np.linspace(0, 2, fs)
offsets = np.arange(nb_signals+1).reshape((nb_signals+1,1))*(A*(nb_signals+1))
freqs = np.arange(nb_signals)*incre
# Initialize the figure
fig, ax = plt.subplots(figsize=figsize)
ax.hold(True)
plt.xlabel('Time')
ax.yaxis.set_ticks(offsets)
ax.set_yticklabels([str(f)+' Hz' for f in freqs] + ['Sum'])
ax.xaxis.set_ticks([])
# Initialize the Line2D elements for each signal
sines = np.array([sinewave(A, f, 0, t) for f in freqs])
sines = np.vstack((sines, np.sum(sines, axis=0))) + offsets
points = [ax.plot(t, x)[0] for x in sines]
# Animation refresh loop
for i in np.arange(anim_dur*refresh_rate):
# Update time
t = np.linspace(0, 2, fs) + i*fs/refresh_rate
# Update signals
sines = np.array([sinewave(A, f, 0, t) for f in freqs])
sines = np.vstack((sines, np.sum(sines, axis=0))) + offsets
# Update figure
for p, x in zip(points, sines):
p.set_ydata(x)
# Wait before starting another cycle
plt.pause(1./refresh_rate)
if __name__ == '__main__':
# 1) DISTRIBUTION OF TRAINING DATA
# Generate fake data
nb_points = 10*10
relax_data = np.random.normal(0.01, 0.01, size=(nb_points,))
focus_data = np.random.normal(0.03, 0.01, size=(nb_points,))
dot_plot(x=np.concatenate((relax_data, focus_data)),
labels=np.concatenate((np.zeros((nb_points,)), np.ones((nb_points,)))),
step=4)
# 2) PSD PLOT
# Generate fake data
f = np.arange(0, 110, 1) # one-second windows = 1-Hz bins
psd = 10*np.log10(1./f)
psd_with_bands_plot(f, psd)
# 3) FOURIER DECOMPOSITION ANIMATION
animate_signals(4, 2)<|fim▁end|>
|
# Author: Hubert Banville <[email protected]>
#
# License: TBD
|
<|file_name|>generate_label_testvectors.py<|end_file_name|><|fim▁begin|>import numpy as np
from scipy.ndimage import label
<|fim▁hole|> def bitimage(l):
return np.array([[c for c in s] for s in l]) == '1'
data = [np.ones((7, 7)),
bitimage(["1110111",
"1100011",
"1010101",
"0001000",
"1010101",
"1100011",
"1110111"]),
bitimage(["1011101",
"0001000",
"1001001",
"1111111",
"1001001",
"0001000",
"1011101"])]
strels = [np.ones((3, 3)),
np.zeros((3, 3)),
bitimage(["010", "111", "010"]),
bitimage(["101", "010", "101"]),
bitimage(["100", "010", "001"]),
bitimage(["000", "111", "000"]),
bitimage(["110", "010", "011"]),
bitimage(["110", "111", "011"])]
strels = strels + [np.flipud(s) for s in strels]
strels = strels + [np.rot90(s) for s in strels]
strels = [np.fromstring(s, dtype=int).reshape((3, 3))
for s in set(t.astype(int).tostring() for t in strels)]
inputs = np.vstack(data)
results = np.vstack([label(d, s)[0] for d in data for s in strels])
strels = np.vstack(strels)
np.savetxt(infile, inputs, fmt="%d")
np.savetxt(strelfile, strels, fmt="%d")
np.savetxt(resultfile, results, fmt="%d")
generate_test_vecs("label_inputs.txt", "label_strels.txt", "label_results.txt")<|fim▁end|>
|
def generate_test_vecs(infile, strelfile, resultfile):
"test label with different structuring element neighborhoods"
|
<|file_name|>DatabaseTest.java<|end_file_name|><|fim▁begin|>package sk.atris.netxms.confrepo.tests.service.database;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import sk.atris.netxms.confrepo.exceptions.DatabaseException;
import sk.atris.netxms.confrepo.service.database.DbConnectionManager;
import sk.atris.netxms.confrepo.service.database.DbObjectHandler;
import sk.atris.netxms.confrepo.tests.MockedDatabase;
public class DatabaseTest {
@Before
public void environmentSetup() throws Exception {
MockedDatabase.setup();
}
@After
public void environmentCleanup() throws Exception {
MockedDatabase.cleanup();
}
@Test
public void testDatabase() throws DatabaseException {
Revision object = new Revision("test", "test", 1);
// make sure database works
DbObjectHandler.getInstance().saveToDb(object);
DbObjectHandler.getInstance().removeFromDb(object);
// test shutdown
DbConnectionManager.getInstance().shutdown();
}<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>strategy_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package node
import (
"reflect"
"testing"
"k8s.io/apimachinery/pkg/fields"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/util/diff"
apitesting "k8s.io/kubernetes/pkg/api/testing"
api "k8s.io/kubernetes/pkg/apis/core"
utilfeature "k8s.io/apiserver/pkg/util/feature"
featuregatetesting "k8s.io/component-base/featuregate/testing"
"k8s.io/kubernetes/pkg/features"
// install all api groups for testing
_ "k8s.io/kubernetes/pkg/api/testapi"
)
func TestMatchNode(t *testing.T) {
testFieldMap := map[bool][]fields.Set{
true: {
{"metadata.name": "foo"},
},
false: {
{"foo": "bar"},
},
}
for expectedResult, fieldSet := range testFieldMap {
for _, field := range fieldSet {
m := MatchNode(labels.Everything(), field.AsSelector())
_, matchesSingle := m.MatchesSingle()
if e, a := expectedResult, matchesSingle; e != a {
t.Errorf("%+v: expected %v, got %v", fieldSet, e, a)
}
}
}
}
func TestSelectableFieldLabelConversions(t *testing.T) {
apitesting.TestSelectableFieldLabelConversionsOfKind(t,
"v1",
"Node",
NodeToSelectableFields(&api.Node{}),
nil,
)
}
// helper creates a NodeNode with a set of PodCIDRs, Spec.ConfigSource, Status.Config
func makeNode(podCIDRs []string, addSpecDynamicConfig bool, addStatusDynamicConfig bool) *api.Node {
node := &api.Node{
Spec: api.NodeSpec{
PodCIDRs: podCIDRs,
},
}
if addSpecDynamicConfig {
node.Spec.ConfigSource = &api.NodeConfigSource{}
}
if addStatusDynamicConfig {
node.Status = api.NodeStatus{
Config: &api.NodeConfigStatus{},
}
}
return node
}
func TestDropFields(t *testing.T) {
testCases := []struct {
name string
node *api.Node
oldNode *api.Node
compareNode *api.Node
enableDualStack bool
enableNodeDynamicConfig bool
}{
{
name: "nil pod cidrs",
enableDualStack: false,
node: makeNode(nil, false, false),
oldNode: nil,
compareNode: makeNode(nil, false, false),
},
{
name: "empty pod ips",
enableDualStack: false,
node: makeNode([]string{}, false, false),
oldNode: nil,
compareNode: makeNode([]string{}, false, false),
},
{
name: "single family ipv6",
enableDualStack: false,
node: makeNode([]string{"2000::/10"}, false, false),
compareNode: makeNode([]string{"2000::/10"}, false, false),
},
{
name: "single family ipv4",
enableDualStack: false,
node: makeNode([]string{"10.0.0.0/8"}, false, false),
compareNode: makeNode([]string{"10.0.0.0/8"}, false, false),
},
{
name: "dualstack 4-6",
enableDualStack: true,
node: makeNode([]string{"10.0.0.0/8", "2000::/10"}, false, false),
compareNode: makeNode([]string{"10.0.0.0/8", "2000::/10"}, false, false),
},
{
name: "dualstack 6-4",
enableDualStack: true,
node: makeNode([]string{"2000::/10", "10.0.0.0/8"}, false, false),
compareNode: makeNode([]string{"2000::/10", "10.0.0.0/8"}, false, false),
},
{
name: "not dualstack 6-4=>4only",
enableDualStack: false,
node: makeNode([]string{"2000::/10", "10.0.0.0/8"}, false, false),
oldNode: nil,
compareNode: makeNode([]string{"2000::/10"}, false, false),
},
{
name: "not dualstack 6-4=>as is (used in old)",
enableDualStack: false,
node: makeNode([]string{"2000::/10", "10.0.0.0/8"}, false, false),
oldNode: makeNode([]string{"2000::/10", "10.0.0.0/8"}, false, false),
compareNode: makeNode([]string{"2000::/10", "10.0.0.0/8"}, false, false),
},
{
name: "not dualstack 6-4=>6only",
enableDualStack: false,
node: makeNode([]string{"2000::/10", "10.0.0.0/8"}, false, false),
oldNode: nil,
compareNode: makeNode([]string{"2000::/10"}, false, false),
},
{
name: "not dualstack 6-4=>as is (used in old)",
enableDualStack: false,
node: makeNode([]string{"2000::/10", "10.0.0.0/8"}, false, false),
oldNode: makeNode([]string{"2000::/10", "10.0.0.0/8"}, false, false),
compareNode: makeNode([]string{"2000::/10", "10.0.0.0/8"}, false, false),
},
{
name: "new with no Spec.ConfigSource and no Status.Config , enableNodeDynamicConfig disabled",
enableDualStack: false,
enableNodeDynamicConfig: false,
node: makeNode(nil, false, false),
oldNode: nil,
compareNode: makeNode(nil, false, false),
},
{
name: "new with Spec.ConfigSource and no Status.Config, enableNodeDynamicConfig disabled",
enableDualStack: false,
enableNodeDynamicConfig: false,
node: makeNode(nil, true, false),
oldNode: nil,
compareNode: makeNode(nil, false, false),
},
{
name: "new with Spec.ConfigSource and Status.Config, enableNodeDynamicConfig disabled",
enableDualStack: false,
enableNodeDynamicConfig: false,
node: makeNode(nil, true, true),
oldNode: nil,
compareNode: makeNode(nil, false, false),
},
{
name: "update with Spec.ConfigSource and Status.Config (old has none), enableNodeDynamicConfig disabled",
enableDualStack: false,
enableNodeDynamicConfig: false,
node: makeNode(nil, true, true),
oldNode: makeNode(nil, false, false),
compareNode: makeNode(nil, false, true),
},
{
name: "update with Spec.ConfigSource and Status.Config (old has them), enableNodeDynamicConfig disabled",
enableDualStack: false,
enableNodeDynamicConfig: false,
node: makeNode(nil, true, true),
oldNode: makeNode(nil, true, true),
compareNode: makeNode(nil, true, true),
},
{
name: "update with Spec.ConfigSource and Status.Config (old has Status.Config), enableNodeDynamicConfig disabled",
enableDualStack: false,<|fim▁hole|> compareNode: makeNode(nil, false, true),
},
{
name: "new with Spec.ConfigSource and Status.Config, enableNodeDynamicConfig enabled",
enableDualStack: false,
enableNodeDynamicConfig: true,
node: makeNode(nil, true, true),
oldNode: nil,
compareNode: makeNode(nil, true, true),
},
}
for _, tc := range testCases {
func() {
defer featuregatetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.IPv6DualStack, tc.enableDualStack)()
defer featuregatetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.DynamicKubeletConfig, tc.enableNodeDynamicConfig)()
dropDisabledFields(tc.node, tc.oldNode)
old := tc.oldNode.DeepCopy()
// old node should never be changed
if !reflect.DeepEqual(tc.oldNode, old) {
t.Errorf("%v: old node changed: %v", tc.name, diff.ObjectReflectDiff(tc.oldNode, old))
}
if !reflect.DeepEqual(tc.node, tc.compareNode) {
t.Errorf("%v: unexpected node spec: %v", tc.name, diff.ObjectReflectDiff(tc.node, tc.compareNode))
}
}()
}
}<|fim▁end|>
|
enableNodeDynamicConfig: false,
node: makeNode(nil, true, true),
oldNode: makeNode(nil, false, true),
|
<|file_name|>animator.py<|end_file_name|><|fim▁begin|>"""
Simple utility code for animations.
"""
# Author: Prabhu Ramachandran <prabhu at aerodotiitbdotacdotin>
# Copyright (c) 2009, Enthought, Inc.
# License: BSD Style.
import types
from functools import wraps
try:
from decorator import decorator
HAS_DECORATOR = True
except ImportError:
HAS_DECORATOR = False
<|fim▁hole|>from traitsui.api import View, Group, Item
###############################################################################
# `Animator` class.
###############################################################################
class Animator(HasTraits):
""" Convenience class to manage a timer and present a convenient
UI. This is based on the code in `tvtk.tools.visual`.
Here is a simple example of using this class::
>>> from mayavi import mlab
>>> def anim():
... f = mlab.gcf()
... while 1:
... f.scene.camera.azimuth(10)
... f.scene.render()
... yield
...
>>> anim = anim()
>>> t = Animator(500, anim.next)
>>> t.edit_traits()
This makes it very easy to animate your visualizations and control
it from a simple UI.
**Notes**
If you want to modify the data plotted by an `mlab` function call,
please refer to the section on: :ref:`mlab-animating-data`
"""
########################################
# Traits.
start = Button('Start Animation')
stop = Button('Stop Animation')
delay = Range(10, 100000, 500,
desc='frequency with which timer is called')
# The internal timer we manage.
timer = Instance(Timer)
######################################################################
# User interface view
traits_view = View(Group(Item('start'),
Item('stop'),
show_labels=False),
Item('_'),
Item(name='delay'),
title='Animation Controller',
buttons=['OK'])
######################################################################
# Initialize object
def __init__(self, millisec, callable, *args, **kwargs):
"""Constructor.
**Parameters**
:millisec: int specifying the delay in milliseconds
between calls to the callable.
:callable: callable function to call after the specified
delay.
:\*args: optional arguments to be passed to the callable.
:\*\*kwargs: optional keyword arguments to be passed to the callable.
"""
HasTraits.__init__(self)
self.delay = millisec
self.ui = None
self.timer = Timer(millisec, callable, *args, **kwargs)
######################################################################
# `Animator` protocol.
######################################################################
def show(self):
"""Show the animator UI.
"""
self.ui = self.edit_traits()
def close(self):
"""Close the animator UI.
"""
if self.ui is not None:
self.ui.dispose()
######################################################################
# Non-public methods, Event handlers
def _start_fired(self):
self.timer.Start(self.delay)
def _stop_fired(self):
self.timer.Stop()
def _delay_changed(self, value):
t = self.timer
if t is None:
return
if t.IsRunning():
t.Stop()
t.Start(value)
###############################################################################
# Decorators.
def animate(func=None, delay=500, ui=True):
""" A convenient decorator to animate a generator that performs an
animation. The `delay` parameter specifies the delay (in
milliseconds) between calls to the decorated function. If `ui` is
True, then a simple UI for the animator is also popped up. The
decorated function will return the `Animator` instance used and a
user may call its `Stop` method to stop the animation.
If an ordinary function is decorated a `TypeError` will be raised.
**Parameters**
:delay: int specifying the time interval in milliseconds between
calls to the function.
:ui: bool specifying if a UI controlling the animation is to be
provided.
**Returns**
The decorated function returns an `Animator` instance.
**Examples**
Here is the example provided in the Animator class documentation::
>>> from mayavi import mlab
>>> @mlab.animate
... def anim():
... f = mlab.gcf()
... while 1:
... f.scene.camera.azimuth(10)
... f.scene.render()
... yield
...
>>> a = anim() # Starts the animation.
For more specialized use you can pass arguments to the decorator::
>>> from mayavi import mlab
>>> @mlab.animate(delay=500, ui=False)
... def anim():
... f = mlab.gcf()
... while 1:
... f.scene.camera.azimuth(10)
... f.scene.render()
... yield
...
>>> a = anim() # Starts the animation without a UI.
**Notes**
If you want to modify the data plotted by an `mlab` function call,
please refer to the section on: :ref:`mlab-animating-data`.
"""
class Wrapper(object):
# The wrapper which calls the decorated function.
def __init__(self, function):
self.func = function
self.ui = ui
self.delay = delay
def __call__(self, *args, **kw):
if isinstance(self.func, types.GeneratorType):
f = self.func
else:
f = self.func(*args, **kw)
if isinstance(f, types.GeneratorType):
a = Animator(self.delay, f.next)
if self.ui:
a.show()
return a
else:
msg = 'The function "%s" must be a generator '\
'(use yield)!' % (self.func.__name__)
raise TypeError(msg)
def decorator_call(self, func, *args, **kw):
return self(*args, **kw)
def _wrapper(function):
# Needed to create the Wrapper in the right scope.
if HAS_DECORATOR:
# The decorator calls a callable with (func, *args, **kw) signature
return decorator(Wrapper(function).decorator_call, function)
else:
return wraps(function)(Wrapper(function))
if func is None:
return _wrapper
else:
return _wrapper(func)<|fim▁end|>
|
from pyface.timer.api import Timer
from traits.api import HasTraits, Button, Instance, Range
|
<|file_name|>test_new_tests.py<|end_file_name|><|fim▁begin|>from cStringIO import StringIO
import unittest
import unittest2
from unittest2.test.support import resultFactory
class TestUnittest(unittest2.TestCase):
def assertIsSubclass(self, actual, klass):
self.assertTrue(issubclass(actual, klass), "Not a subclass.")
def testInheritance(self):
self.assertIsSubclass(unittest2.TestCase, unittest.TestCase)
self.assertIsSubclass(unittest2.TestResult, unittest.TestResult)
self.assertIsSubclass(unittest2.TestSuite, unittest.TestSuite)
self.assertIsSubclass(
unittest2.TextTestRunner,
unittest.TextTestRunner)
self.assertIsSubclass(unittest2.TestLoader, unittest.TestLoader)
self.assertIsSubclass(unittest2.TextTestResult, unittest.TestResult)
def test_new_runner_old_case(self):
runner = unittest2.TextTestRunner(resultclass=resultFactory,
stream=StringIO())
class Test(unittest.TestCase):
def testOne(self):
pass
suite = unittest2.TestSuite((Test('testOne'),))
result = runner.run(suite)
self.assertEqual(result.testsRun, 1)
self.assertEqual(len(result.errors), 0)
def test_old_runner_new_case(self):
runner = unittest.TextTestRunner(stream=StringIO())
class Test(unittest2.TestCase):
def testOne(self):
self.assertDictEqual({}, {})
suite = unittest.TestSuite((Test('testOne'),))
result = runner.run(suite)<|fim▁hole|>
if __name__ == '__main__':
unittest2.main()<|fim▁end|>
|
self.assertEqual(result.testsRun, 1)
self.assertEqual(len(result.errors), 0)
|
<|file_name|>factory_method.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <string>
class Shape {
public :
virtual void draw (void) = 0;
static Shape *Create (std::string type);
};
class circle : public Shape {
public :
void draw(void){
std::cout << "circle" << std::endl;
}
};
class square : public Shape {
public :
void draw(void){
std::cout << "square" << std::endl;
}
};
Shape * Shape::Create (std::string type){
if(type == "circle"){
std::cout << "creating circle" << std::endl;
return new circle();
}
if(type == "square") {
std::cout << "creating circle" << std::endl;
return new square();<|fim▁hole|> }
return NULL;
};
int main (){
Shape *cir = Shape::Create("circle");
if ( cir != NULL ) cir->draw();
return 0;
}<|fim▁end|>
| |
<|file_name|>health_test.py<|end_file_name|><|fim▁begin|>import mox
import time
import unittest
from zoom.agent.predicate.health import PredicateHealth
from zoom.common.types import PlatformType
class PredicateHealthTest(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
self.interval = 0.1
def tearDown(self):
self.mox.UnsetStubs()
def test_start(self):
pred = PredicateHealth("test", "echo", self.interval, PlatformType.LINUX)
self.mox.StubOutWithMock(pred, "_run")
pred._run().MultipleTimes()<|fim▁hole|> self.mox.ReplayAll()
print "This test should complete quickly"
pred.start()
pred.start() # should noop
pred.start() # should noop
time.sleep(0.25) # give other thread time to check
pred.stop()
self.mox.VerifyAll()
def test_stop(self):
pred = PredicateHealth("test", "echo", self.interval, PlatformType.LINUX)
self.mox.StubOutWithMock(pred, "_run")
pred._run().MultipleTimes()
self.mox.ReplayAll()
pred.start()
time.sleep(0.25) # give other thread time to check
pred.stop()
pred.stop()
pred.stop()
self.mox.VerifyAll()<|fim▁end|>
| |
<|file_name|>multi_profile_credentials_filter_unittest.cc<|end_file_name|><|fim▁begin|>// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/password_manager/multi_profile_credentials_filter.h"
#include <string>
#include "base/bind.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/profiles/profile_attributes_entry.h"
#include "chrome/browser/profiles/profile_attributes_storage.h"
#include "chrome/browser/signin/chrome_signin_client_factory.h"
#include "chrome/browser/signin/chrome_signin_client_test_util.h"
#include "chrome/browser/signin/dice_web_signin_interceptor.h"
#include "chrome/browser/signin/identity_test_environment_profile_adaptor.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "chrome/common/pref_names.h"
#include "chrome/test/base/browser_with_test_window_test.h"
#include "chrome/test/base/testing_profile.h"
#include "chrome/test/base/testing_profile_manager.h"
#include "components/password_manager/core/browser/password_form.h"
#include "components/password_manager/core/browser/stub_password_manager_client.h"
#include "components/password_manager/core/browser/sync_username_test_base.h"
#include "components/signin/public/identity_manager/account_info.h"
#include "components/signin/public/identity_manager/identity_test_environment.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "url/gurl.h"
namespace {
// Dummy DiceWebSigninInterceptor::Delegate that does nothing.
class TestDiceWebSigninInterceptorDelegate
: public DiceWebSigninInterceptor::Delegate {
public:
std::unique_ptr<ScopedDiceWebSigninInterceptionBubbleHandle>
ShowSigninInterceptionBubble(
content::WebContents* web_contents,
const BubbleParameters& bubble_parameters,
base::OnceCallback<void(SigninInterceptionResult)> callback) override {
return nullptr;
}
void ShowProfileCustomizationBubble(Browser* browser) override {}
};
class TestPasswordManagerClient
: public password_manager::StubPasswordManagerClient {
public:
// PasswordManagerClient:
signin::IdentityManager* GetIdentityManager() override {
return identity_manager_;
}
void set_identity_manager(signin::IdentityManager* manager) {
identity_manager_ = manager;
}
private:
signin::IdentityManager* identity_manager_ = nullptr;
};
} // namespace
class MultiProfileCredentialsFilterTest : public BrowserWithTestWindowTest {
public:
MultiProfileCredentialsFilterTest()
: sync_filter_(&test_password_manager_client_, GetSyncServiceCallback()) {
}
password_manager::SyncCredentialsFilter::SyncServiceFactoryFunction
GetSyncServiceCallback() {
return base::BindRepeating(&MultiProfileCredentialsFilterTest::sync_service,
base::Unretained(this));
}
signin::IdentityTestEnvironment* identity_test_env() {
return identity_test_env_profile_adaptor_->identity_test_env();
}
password_manager::PasswordManagerClient* password_manager_client() {
return &test_password_manager_client_;
}
DiceWebSigninInterceptor* dice_web_signin_interceptor() {
return dice_web_signin_interceptor_.get();
}
// Creates a profile, a tab and an account so that signing in this account
// will be intercepted in the tab.
AccountInfo SetupInterception() {
std::string email = "[email protected]";
AccountInfo account_info = identity_test_env()->MakeAccountAvailable(email);
Profile* profile_2 = profile_manager()->CreateTestingProfile("Profile 2");
ProfileAttributesEntry* entry =
profile_manager()
->profile_attributes_storage()
->GetProfileAttributesWithPath(profile_2->GetPath());
entry->SetAuthInfo(account_info.gaia, base::UTF8ToUTF16(email),
/*is_consented_primary_account=*/false);
AddTab(browser(), GURL("http://foo/1"));
return account_info;
}
// BrowserWithTestWindowTest:
void SetUp() override {
BrowserWithTestWindowTest::SetUp();
identity_test_env_profile_adaptor_ =
std::make_unique<IdentityTestEnvironmentProfileAdaptor>(profile());
identity_test_env()->SetTestURLLoaderFactory(&test_url_loader_factory_);
dice_web_signin_interceptor_ = std::make_unique<DiceWebSigninInterceptor>(
profile(), std::make_unique<TestDiceWebSigninInterceptorDelegate>());
test_password_manager_client_.set_identity_manager(
identity_test_env()->identity_manager());
// If features::kEnablePasswordsAccountStorage is enabled, then the browser
// never asks to save the primary account's password. So fake-signin an
// arbitrary primary account here, so that any follow-up signs to the Gaia
// page aren't considered primary account sign-ins and hence trigger the
// password save prompt.
identity_test_env()->MakePrimaryAccountAvailable(
"[email protected]", signin::ConsentLevel::kSync);
}
void TearDown() override {
dice_web_signin_interceptor_->Shutdown();
identity_test_env_profile_adaptor_.reset();
BrowserWithTestWindowTest::TearDown();
}
TestingProfile::TestingFactories GetTestingFactories() override {
TestingProfile::TestingFactories factories =
IdentityTestEnvironmentProfileAdaptor::
GetIdentityTestEnvironmentFactories();
factories.push_back(
{ChromeSigninClientFactory::GetInstance(),
base::BindRepeating(&BuildChromeSigninClientWithURLLoader,
&test_url_loader_factory_)});
return factories;
}
protected:
const syncer::SyncService* sync_service() { return &sync_service_; }
network::TestURLLoaderFactory test_url_loader_factory_;
TestPasswordManagerClient test_password_manager_client_;
std::unique_ptr<IdentityTestEnvironmentProfileAdaptor>
identity_test_env_profile_adaptor_;
syncer::TestSyncService sync_service_;
password_manager::SyncCredentialsFilter sync_filter_;
std::unique_ptr<DiceWebSigninInterceptor> dice_web_signin_interceptor_;
};
// Checks that MultiProfileCredentialsFilter returns false when
// SyncCredentialsFilter returns false.
TEST_F(MultiProfileCredentialsFilterTest, SyncCredentialsFilter) {
password_manager::PasswordForm form =
password_manager::SyncUsernameTestBase::SimpleGaiaForm(
"[email protected]");
form.form_data.is_gaia_with_skip_save_password_form = true;
ASSERT_FALSE(sync_filter_.ShouldSave(form));
MultiProfileCredentialsFilter multi_profile_filter(
password_manager_client(), GetSyncServiceCallback(),
/*dice_web_signin_interceptor=*/nullptr);
EXPECT_FALSE(multi_profile_filter.ShouldSave(form));
}
// Returns true when the interceptor is nullptr.
TEST_F(MultiProfileCredentialsFilterTest, NullInterceptor) {
password_manager::PasswordForm form =
password_manager::SyncUsernameTestBase::SimpleGaiaForm(
"[email protected]");
ASSERT_TRUE(sync_filter_.ShouldSave(form));
MultiProfileCredentialsFilter multi_profile_filter(
password_manager_client(), GetSyncServiceCallback(),
/*dice_web_signin_interceptor=*/nullptr);
EXPECT_TRUE(multi_profile_filter.ShouldSave(form));
}
// Returns true for non-gaia forms.
TEST_F(MultiProfileCredentialsFilterTest, NonGaia) {
password_manager::PasswordForm form =
password_manager::SyncUsernameTestBase::SimpleNonGaiaForm(
"[email protected]");
ASSERT_TRUE(sync_filter_.ShouldSave(form));
MultiProfileCredentialsFilter multi_profile_filter(
password_manager_client(), GetSyncServiceCallback(),
dice_web_signin_interceptor());
EXPECT_TRUE(multi_profile_filter.ShouldSave(form));
}
// Returns false when interception is already in progress.
TEST_F(MultiProfileCredentialsFilterTest, InterceptInProgress) {
password_manager::PasswordForm form =
password_manager::SyncUsernameTestBase::SimpleGaiaForm(
"[email protected]");
ASSERT_TRUE(sync_filter_.ShouldSave(form));
// Start an interception for the sign-in.
AccountInfo account_info = SetupInterception();
dice_web_signin_interceptor_->MaybeInterceptWebSignin(
browser()->tab_strip_model()->GetActiveWebContents(),
account_info.account_id, /*is_new_account=*/true,
/*is_sync_signin=*/false);
ASSERT_TRUE(dice_web_signin_interceptor_->is_interception_in_progress());
MultiProfileCredentialsFilter multi_profile_filter(
password_manager_client(), GetSyncServiceCallback(),
dice_web_signin_interceptor());
EXPECT_FALSE(multi_profile_filter.ShouldSave(form));
}
// Returns false when the signin is not in progress yet, but the signin will be
// intercepted.
TEST_F(MultiProfileCredentialsFilterTest, SigninIntercepted) {
const char kFormEmail[] = "[email protected]";
password_manager::PasswordForm form =
password_manager::SyncUsernameTestBase::SimpleGaiaForm(kFormEmail);
ASSERT_TRUE(sync_filter_.ShouldSave(form));
// Setup the account for interception, but do not intercept.
AccountInfo account_info = SetupInterception();
ASSERT_FALSE(dice_web_signin_interceptor_->is_interception_in_progress());
ASSERT_EQ(dice_web_signin_interceptor_->GetHeuristicOutcome(
/*is_new_account=*/true, /*is_sync_signin=*/false,
account_info.email),
SigninInterceptionHeuristicOutcome::kInterceptProfileSwitch);
MultiProfileCredentialsFilter multi_profile_filter(
password_manager_client(), GetSyncServiceCallback(),
dice_web_signin_interceptor());
EXPECT_FALSE(multi_profile_filter.ShouldSave(form));
}
// Returns false when the outcome of the interception is unknown.
TEST_F(MultiProfileCredentialsFilterTest, SigninInterceptionUnknown) {
const char kFormEmail[] = "[email protected]";
password_manager::PasswordForm form =
password_manager::SyncUsernameTestBase::SimpleGaiaForm(kFormEmail);
ASSERT_TRUE(sync_filter_.ShouldSave(form));
// Add extra Gaia account with incomplete info, so that interception outcome
// is unknown.
std::string dummy_email = "[email protected]";
AccountInfo account_info =
identity_test_env()->MakeAccountAvailable(dummy_email);
ASSERT_FALSE(dice_web_signin_interceptor_->is_interception_in_progress());
ASSERT_FALSE(dice_web_signin_interceptor_->GetHeuristicOutcome(<|fim▁hole|> password_manager_client(), GetSyncServiceCallback(),
dice_web_signin_interceptor());
EXPECT_FALSE(multi_profile_filter.ShouldSave(form));
}
// Returns true when the signin is not intercepted.
TEST_F(MultiProfileCredentialsFilterTest, SigninNotIntercepted) {
// Disallow profile creation to prevent the intercept.
g_browser_process->local_state()->SetBoolean(prefs::kBrowserAddPersonEnabled,
false);
password_manager::PasswordForm form =
password_manager::SyncUsernameTestBase::SimpleGaiaForm(
"[email protected]");
ASSERT_TRUE(sync_filter_.ShouldSave(form));
// Not interception, credentials should be saved.
ASSERT_FALSE(dice_web_signin_interceptor_->is_interception_in_progress());
MultiProfileCredentialsFilter multi_profile_filter(
password_manager_client(), GetSyncServiceCallback(),
dice_web_signin_interceptor());
EXPECT_TRUE(multi_profile_filter.ShouldSave(form));
}<|fim▁end|>
|
/*is_new_account=*/true, /*is_sync_signin=*/false, kFormEmail));
MultiProfileCredentialsFilter multi_profile_filter(
|
<|file_name|>UAsyncIO.hh<|end_file_name|><|fim▁begin|>/******************************************************************************
*******************************************************************************
*******************************************************************************
libferris
Copyright (C) 2001 Ben Martin
libferris is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libferris is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libferris. If not, see <http://www.gnu.org/licenses/>.
For more details see the COPYING file in the root directory of this
distribution.
$Id: UAsyncIO.hh,v 1.2 2010/09/24 21:31:06 ben Exp $
*******************************************************************************
*******************************************************************************
******************************************************************************/
#ifndef _ALREADY_INCLUDED_FERRIS_UASYNCIO_H_
#define _ALREADY_INCLUDED_FERRIS_UASYNCIO_H_
#include <Ferris/HiddenSymbolSupport.hh>
#include <Ferris/Ferris.hh>
#include <Ferris/Runner.hh>
#include <Ferris/AsyncIO.hh>
namespace Ferris
{
template <class StreamClass = fh_stringstream >
class FERRISEXP_API GTK_StreamCollector
:
public StreamCollector< StreamClass >
{
typedef GTK_StreamCollector< StreamClass > _Self;
typedef StreamCollector< StreamClass > _Base;
GtkProgressBar* w_progress;
public:
GTK_StreamCollector( GtkProgressBar* w, StreamClass oss = StreamClass() )
:
_Base( oss ),
w_progress( w )
{}
virtual fh_istream io_cb( fh_istream iss )
{
iss = _Base::io_cb( iss );
if( this->m_totalsz > 0 )
{
double d = this->m_donesz;
d /= this->m_totalsz;
gtk_progress_set_percentage( GTK_PROGRESS(w_progress), d );
}
if( this->w_progress )
{
fh_stringstream ss;
ss << this->m_donesz << " / " << this->m_totalsz;
gtk_progress_bar_set_text( GTK_PROGRESS_BAR(w_progress), tostr(ss).c_str() );
}
return iss;
}
};
typedef GTK_StreamCollector< fh_stringstream > GTK_StringStreamCollector;
FERRIS_SMARTPTR( GTK_StringStreamCollector, fh_gtk_sstreamcol );
<|fim▁hole|> FERRIS_SMARTPTR( GTK_FileStreamCollector, fh_gtk_fstreamcol );
namespace Factory
{
FERRISEXP_API fh_sstreamcol MakeGTKStringStreamCol( GtkProgressBar* w );
FERRISEXP_API fh_fstreamcol MakeGTKFileStreamCol(
GtkProgressBar* w,
const std::string& s,
std::ios_base::openmode m = std::ios_base::out );
};
};
#endif<|fim▁end|>
|
typedef GTK_StreamCollector< fh_fstream > GTK_FileStreamCollector;
|
<|file_name|>SVG.js<|end_file_name|><|fim▁begin|>/*
* L.SVG renders vector layers with SVG. All SVG-specific code goes here.
*/
L.SVG = L.Renderer.extend({
_initContainer: function () {
this._container = L.SVG.create('svg');
// makes it possible to click through svg root; we'll reset it back in individual paths
this._container.setAttribute('pointer-events', 'none');
},
_update: function () {
if (this._map._animatingZoom && this._bounds) { return; }
L.Renderer.prototype._update.call(this);
var b = this._bounds,
size = b.getSize(),
container = this._container;
L.DomUtil.setPosition(container, b.min);
// set size of svg-container if changed
if (!this._svgSize || !this._svgSize.equals(size)) {
this._svgSize = size;
container.setAttribute('width', size.x);
container.setAttribute('height', size.y);
}
// movement: update container viewBox so that we don't have to change coordinates of individual layers
L.DomUtil.setPosition(container, b.min);
container.setAttribute('viewBox', [b.min.x, b.min.y, size.x, size.y].join(' '));
},
// methods below are called by vector layers implementations
_initPath: function (layer) {
var path = layer._path = L.SVG.create('path');
if (layer.options.className) {
L.DomUtil.addClass(path, layer.options.className);
}
if (layer.options.interactive) {
L.DomUtil.addClass(path, 'leaflet-interactive');
}
this._updateStyle(layer);
},
_addPath: function (layer) {
this._container.appendChild(layer._path);
layer.addInteractiveTarget(layer._path);
},
_removePath: function (layer) {<|fim▁hole|> },
_updatePath: function (layer) {
layer._project();
layer._update();
},
_updateStyle: function (layer) {
var path = layer._path,
options = layer.options;
if (!path) { return; }
if (options.stroke) {
path.setAttribute('stroke', options.color);
path.setAttribute('stroke-opacity', options.opacity);
path.setAttribute('stroke-width', options.weight);
path.setAttribute('stroke-linecap', options.lineCap);
path.setAttribute('stroke-linejoin', options.lineJoin);
if (options.dashArray) {
path.setAttribute('stroke-dasharray', options.dashArray);
} else {
path.removeAttribute('stroke-dasharray');
}
if (options.dashOffset) {
path.setAttribute('stroke-dashoffset', options.dashOffset);
} else {
path.removeAttribute('stroke-dashoffset');
}
} else {
path.setAttribute('stroke', 'none');
}
if (options.fill) {
path.setAttribute('fill', options.fillColor || options.color);
path.setAttribute('fill-opacity', options.fillOpacity);
path.setAttribute('fill-rule', options.fillRule || 'evenodd');
} else {
path.setAttribute('fill', 'none');
}
path.setAttribute('pointer-events', options.pointerEvents || (options.interactive ? 'visiblePainted' : 'none'));
},
_updatePoly: function (layer, closed) {
this._setPath(layer, L.SVG.pointsToPath(layer._parts, closed));
},
_updateCircle: function (layer) {
var p = layer._point,
r = layer._radius,
r2 = layer._radiusY || r,
arc = 'a' + r + ',' + r2 + ' 0 1,0 ';
// drawing a circle with two half-arcs
var d = layer._empty() ? 'M0 0' :
'M' + (p.x - r) + ',' + p.y +
arc + (r * 2) + ',0 ' +
arc + (-r * 2) + ',0 ';
this._setPath(layer, d);
},
_setPath: function (layer, path) {
layer._path.setAttribute('d', path);
},
// SVG does not have the concept of zIndex so we resort to changing the DOM order of elements
_bringToFront: function (layer) {
L.DomUtil.toFront(layer._path);
},
_bringToBack: function (layer) {
L.DomUtil.toBack(layer._path);
}
});
L.extend(L.SVG, {
create: function (name) {
return document.createElementNS('http://www.w3.org/2000/svg', name);
},
// generates SVG path string for multiple rings, with each ring turning into "M..L..L.." instructions
pointsToPath: function (rings, closed) {
var str = '',
i, j, len, len2, points, p;
for (i = 0, len = rings.length; i < len; i++) {
points = rings[i];
for (j = 0, len2 = points.length; j < len2; j++) {
p = points[j];
str += (j ? 'L' : 'M') + p.x + ' ' + p.y;
}
// closes the ring for polygons; "x" is VML syntax
str += closed ? (L.Browser.svg ? 'z' : 'x') : '';
}
// SVG complains about empty path strings
return str || 'M0 0';
}
});
L.Browser.svg = !!(document.createElementNS && L.SVG.create('svg').createSVGRect);
L.svg = function (options) {
return L.Browser.svg || L.Browser.vml ? new L.SVG(options) : null;
};<|fim▁end|>
|
L.DomUtil.remove(layer._path);
layer.removeInteractiveTarget(layer._path);
|
<|file_name|>fraunhofer_orientdb.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Transport functions for `Fraunhofer's OrientDB <http://graphstore.scai.fraunhofer.de>`_.
`Fraunhofer <https://www.scai.fraunhofer.de/en/business-research-areas/bioinformatics.html>`_ hosts
an instance of `OrientDB <https://orientdb.com/>`_ that contains BEL in a schema similar to
:mod:`pybel.io.umbrella_nodelink`. However, they include custom relations that do not come
from a controlled vocabulary, and have not made the schema, ETL scripts, or documentation available.
Unlike BioDati and BEL Commons, the Fraunhofer OrientDB does not allow for uploads, so only
a single function :func:`pybel.from_fraunhofer_orientdb` is provided by PyBEL.
"""
import logging
from typing import Any, Iterable, Mapping, Optional
from urllib.parse import quote_plus
import requests
from pyparsing import ParseException
from .. import constants as pc
from ..parser import BELParser
from ..struct import BELGraph
__all__ = [
"from_fraunhofer_orientdb",
]
logger = logging.getLogger(__name__)
def from_fraunhofer_orientdb( # noqa:S107
database: str = "covid",
user: str = "covid_user",
password: str = "covid",
query: Optional[str] = None,
) -> BELGraph:
"""Get a BEL graph from the Fraunhofer OrientDB.
:param database: The OrientDB database to connect to
:param user: The user to connect to OrientDB
:param password: The password to connect to OrientDB
:param query: The query to run. Defaults to the URL encoded version of ``select from E``,
where ``E`` is all edges in the OrientDB edge database. Likely does not need to be changed,
except in the case of selecting specific subsets of edges. Make sure you URL encode it
properly, because OrientDB's RESTful API puts it in the URL's path.
By default, this function connects to the ``covid`` database, that corresponds to the
COVID-19 Knowledge Graph [0]_. If other databases in the Fraunhofer OrientDB are
published and demo username/password combinations are given, the following table will
be updated.
+----------+------------+----------+
| Database | Username | Password |
+==========+============+==========+
| covid | covid_user | covid |
+----------+------------+----------+
The ``covid`` database can be downloaded and converted to a BEL graph like this:
.. code-block:: python
import pybel
graph = pybel.from_fraunhofer_orientdb(
database='covid',
user='covid_user',
password='covid',
)
graph.summarize()
However, because the source BEL scripts for the COVID-19 Knowledge Graph are available on
`GitHub <https://github.com/covid19kg/covid19kg>`_ and the authors pre-enabled it for PyBEL, it can
be downloaded with ``pip install git+https://github.com/covid19kg/covid19kg.git`` and used
with the following python code:
.. code-block:: python
import covid19kg
graph = covid19kg.get_graph()
graph.summarize()
.. warning::
It was initially planned to handle some of the non-standard relationships listed in the
Fraunhofer OrientDB's `schema <http://graphstore.scai.fraunhofer.de/studio/index.html#/database/covid/schema>`_
in their OrientDB Studio instance, but none of them actually appear in the only network that is accessible.
If this changes, please leave an issue at https://github.com/pybel/pybel/issues so it can be addressed.
.. [0] Domingo-Fernández, D., *et al.* (2020). `COVID-19 Knowledge Graph: a computable, multi-modal,
cause-and-effect knowledge model of COVID-19 pathophysiology
<https://doi.org/10.1101/2020.04.14.040667>`_. *bioRxiv* 2020.04.14.040667.
"""
graph = BELGraph(name="Fraunhofer OrientDB: {}".format(database))
parser = BELParser(graph, skip_validation=True)
results = _request_graphstore(database, user, password, select_query_template=query)
for result in results:
_parse_result(parser, result)
return graph
def _parse_result(parser: BELParser, result: Mapping[str, Any]) -> None:
citation_db, citation_id = pc.CITATION_TYPE_PUBMED, result.get("pmid")
if citation_id is None:
citation_db, citation_id = pc.CITATION_TYPE_PMC, result.get("pmc")
if citation_id is None:
if "citation" in result:
logger.warning(
"incorrect citation information for %s: %s",
result["@rid"],
result["citation"],
)
else:
logger.debug("no citation information for %s", result["@rid"])
return
parser.control_parser.clear()
parser.control_parser.citation_db = citation_db
parser.control_parser.citation_db_id = citation_id
parser.control_parser.evidence = result["evidence"]
parser.control_parser.annotations.update(result["annotation"])
source = result["in"]["bel"]
relation = result["@class"]
relation = RELATION_MAP.get(relation, relation)
target = result["out"]["bel"]
statement = " ".join([source, relation, target])
try:
parser.parseString(statement)
except ParseException:
logger.warning("could not parse %s", statement)
RELATION_MAP = {
"causes_no_change": pc.CAUSES_NO_CHANGE,
"positive_correlation": pc.POSITIVE_CORRELATION,
"negative_correlation": pc.NEGATIVE_CORRELATION,
"is_a": pc.IS_A,
"has_member": "hasMember",
"has_members": "hasMembers",
"has_component": "hasComponent",
"has_components": "hasComponents",
}
def _request_graphstore(
database: str,
user: str,
password: str,
count_query: Optional[str] = None,
select_query_template: Optional[str] = None,
page_size: int = 500,
base: str = "http://graphstore.scai.fraunhofer.de/query",
) -> Iterable[Mapping[str, Any]]:
"""Make an API call to the OrientDB."""<|fim▁hole|> count_query = quote_plus(count_query)
count_url = "{base}/{database}/sql/{count_query}".format(base=base, database=database, count_query=count_query)
count_res = requests.get(count_url, auth=(user, password))
count = count_res.json()["result"][0]["count"]
logging.debug("fraunhofer orientdb has %d edges", count)
if select_query_template is None:
select_query_template = "select from E order by @rid limit {limit} offset {offset}"
offsets = count // page_size
for offset in range(offsets + 1):
select_query = select_query_template.format(limit=page_size, offset=offset * page_size)
logger.debug("query: %s", select_query)
select_query = quote_plus(select_query)
select_url = "{base}/{database}/sql/{select_query}/{page_size}/*:1".format(
base=base,
database=database,
select_query=select_query,
page_size=page_size,
)
res = requests.get(select_url, auth=(user, password))
res_json = res.json()
result = res_json["result"]
yield from result<|fim▁end|>
|
if count_query is None:
count_query = "select count(@rid) from E"
|
<|file_name|>custom_build.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap;
use std::fs::{self, File};
use std::io::prelude::*;
use std::path::PathBuf;
use std::str;
use std::sync::Mutex;
use core::{Package, Target, PackageId, PackageSet, Profile};
use util::{CargoResult, human, Human};
use util::{internal, ChainError, profile};
use super::job::Work;
use super::{fingerprint, process, Kind, Context, Platform};
use super::CommandType;
use util::Freshness;
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug)]
pub struct BuildOutput {
/// Paths to pass to rustc with the `-L` flag
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag
pub library_links: Vec<String>,
/// Various `--cfg` flags to pass to the compiler
pub cfgs: Vec<String>,
/// Metadata to pass to the immediate dependencies
pub metadata: Vec<(String, String)>,
}
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare(pkg: &Package, target: &Target, req: Platform,
cx: &mut Context) -> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
pkg, target.name()));
let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target, };
let (script_output, build_output) = {
(cx.layout(pkg, Kind::Host).build(pkg),
cx.layout(pkg, kind).build_out(pkg))
};
// Building the command to execute
let to_exec = script_output.join(target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
let profile = cx.lib_profile(pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx));
p.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET", &match kind {
Kind::Host => &cx.config.rustc_info().host[..],
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.to_string())
.env("OPT_LEVEL", &profile.opt_level.to_string())
.env("PROFILE", if cx.build_config.release {"release"} else {"debug"})
.env("HOST", &cx.config.rustc_info().host);
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
match cx.resolve.features(pkg.package_id()) {
Some(features) => {
for feat in features.iter() {
p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
}
}
None => {}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
let not_custom = pkg.targets().iter().find(|t| {
!t.is_custom_build()
}).unwrap();
cx.dep_targets(pkg, not_custom, profile).iter().filter_map(|&(pkg, t, _)| {
if !t.linkable() { return None }
pkg.manifest().links().map(|links| {
(links.to_string(), pkg.package_id().clone())
})
}).collect::<Vec<_>>()
};
let pkg_name = pkg.to_string();
let build_state = cx.build_state.clone();
let id = pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
let plugin_deps = super::load_build_deps(cx, pkg, target, profile,
Kind::Host);
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Target).build(pkg)));
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Host).build(pkg)));
let exec_engine = cx.exec_engine.clone();
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let work = Work::new(move |desc_tx| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
if fs::metadata(&build_output).is_err() {
try!(fs::create_dir(&build_output).chain_error(|| {
internal("failed to create script output directory for \
build command")
}));
}
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
for &(ref name, ref id) in lib_deps.iter() {
let data = &build_state[&(id.clone(), kind)].metadata;
for &(ref key, ref value) in data.iter() {
p.env(&format!("DEP_{}_{}", super::envify(name),
super::envify(key)), value);
}
}
try!(super::add_plugin_deps(&mut p, &build_state, plugin_deps));
}
// And now finally, run the build command itself!
desc_tx.send(p.to_string()).ok();
let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| {
e.desc = format!("failed to run custom build command for `{}`\n{}",
pkg_name, e.desc);
Human(e)
}));
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
let output = try!(str::from_utf8(&output.stdout).map_err(|_| {
human("build script output was not valid utf-8")
}));
let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
build_state.insert(id, req, parsed_output);
try!(File::create(&build_output.parent().unwrap().join("output"))
.and_then(|mut f| f.write_all(output.as_bytes()))
.map_err(|e| {
human(format!("failed to write output of custom build command: {}",
e))
}));
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
//
// Note that the freshness calculation here is the build_cmd freshness, not
// target specific freshness. This is because we don't actually know what
// the inputs are to this command!
//
// Also note that a fresh build command needs to
let (freshness, dirty, fresh) =
try!(fingerprint::prepare_build_cmd(cx, pkg, kind));
let dirty = Work::new(move |tx| {
try!(work.call((tx.clone())));
dirty.call(tx)
});
let fresh = Work::new(move |tx| {
let (id, pkg_name, build_state, build_output) = all;
let new_loc = build_output.parent().unwrap().join("output");
let mut f = try!(File::open(&new_loc).map_err(|e| {
human(format!("failed to read cached build command output: {}", e))
}));
let mut contents = String::new();
try!(f.read_to_string(&mut contents));
let output = try!(BuildOutput::parse(&contents, &pkg_name));
build_state.insert(id, req, output);
fresh.call(tx)
});
Ok((dirty, fresh, freshness))
}
impl BuildState {
pub fn new(config: &super::BuildConfig,
packages: &PackageSet) -> BuildState {
let mut sources = HashMap::new();
for package in packages.iter() {
match package.manifest().links() {
Some(links) => {
sources.insert(links.to_string(),
package.package_id().clone());
}
None => {}
}
}
let mut outputs = HashMap::new();
let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
// If no package is using the library named `name`, then this is
// just an override that we ignore.
if let Some(id) = sources.get(name) {
outputs.insert((id.clone(), kind), output.clone());
}
}
BuildState { outputs: Mutex::new(outputs) }
}
fn insert(&self, id: PackageId, req: Platform,
output: BuildOutput) {
let mut outputs = self.outputs.lock().unwrap();
match req {
Platform::Target => { outputs.insert((id, Kind::Target), output); }
Platform::Plugin => { outputs.insert((id, Kind::Host), output); }
// If this build output was for both the host and target platforms,
// we need to insert it at both places.
Platform::PluginAndTarget => {
outputs.insert((id.clone(), Kind::Host), output.clone());
outputs.insert((id, Kind::Target), output);
}
}
}
}
impl BuildOutput {
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(input: &str, pkg_name: &str) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut cfgs = Vec::new();
let mut metadata = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.lines() {
let mut iter = line.splitn(2, ':');
if iter.next() != Some("cargo") {
// skip this line since it doesn't start with "cargo:"
continue;
}
let data = match iter.next() {
Some(val) => val,
None => continue
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '=');
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_right()),
// line started with `cargo:` but didn't match `key=value`
_ => return Err(human(format!("Wrong output in {}: `{}`",
whence, line)))
};
match key {<|fim▁hole|> let (libs, links) = try!(
BuildOutput::parse_rustc_flags(value, &whence)
);
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
"rustc-link-lib" => library_links.push(value.to_string()),
"rustc-link-search" => library_paths.push(PathBuf::from(value)),
"rustc-cfg" => cfgs.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
cfgs: cfgs,
metadata: metadata,
})
}
pub fn parse_rustc_flags(value: &str, whence: &str)
-> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c| !c.is_whitespace()));
let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());
loop {
let flag = match flags_iter.next() {
Some(f) => f,
None => break
};
if flag != "-l" && flag != "-L" {
return Err(human(format!("Only `-l` and `-L` flags are allowed \
in {}: `{}`",
whence, value)))
}
let value = match flags_iter.next() {
Some(v) => v,
None => return Err(human(format!("Flag in rustc-flags has no \
value in {}: `{}`",
whence, value)))
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => return Err(human("only -l and -L flags are allowed"))
};
}
Ok((library_paths, library_links))
}
}
/// Compute the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
pkg: &'b Package,
targets: &[(&'b Target, &'b Profile)]) {
let mut ret = HashMap::new();
for &(target, profile) in targets {
build(&mut ret, Kind::Target, pkg, target, profile, cx);
build(&mut ret, Kind::Host, pkg, target, profile, cx);
}
// Make the output a little more deterministic by sorting all dependencies
for (&(id, target, _, kind), slot) in ret.iter_mut() {
slot.sort();
slot.dedup();
debug!("script deps: {}/{}/{:?} => {:?}", id, target.name(), kind,
slot.iter().map(|s| s.to_string()).collect::<Vec<_>>());
}
cx.build_scripts = ret;
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<(&'b PackageId, &'b Target,
&'b Profile, Kind),
Vec<&'b PackageId>>,
kind: Kind,
pkg: &'b Package,
target: &'b Target,
profile: &'b Profile,
cx: &Context<'b, 'cfg>)
-> &'a [&'b PackageId] {
// If this target has crossed into "host-land" we need to change the
// kind that we're compiling for, and otherwise just do a quick
// pre-flight check to see if we've already calculated the set of
// dependencies.
let kind = if target.for_host() {Kind::Host} else {kind};
let id = pkg.package_id();
if out.contains_key(&(id, target, profile, kind)) {
return &out[&(id, target, profile, kind)]
}
// This loop is both the recursive and additive portion of this
// function, the key part of the logic being around determining the
// right `kind` to recurse on. If a dependency fits in the kind that
// we've got specified, then we just keep plazing a trail, but otherwise
// we *switch* the kind we're looking at because it must fit into the
// other category.
//
// We always recurse, but only add to our own array if the target is
// linkable to us (e.g. not a binary) and it's for the same original
// `kind`.
let mut ret = Vec::new();
for &(pkg, target, p) in cx.dep_targets(pkg, target, profile).iter() {
let req = cx.get_requirement(pkg, target);
let dep_kind = if req.includes(kind) {
kind
} else if kind == Kind::Target {
Kind::Host
} else {
Kind::Target
};
let dep_scripts = build(out, dep_kind, pkg, target, p, cx);
if target.linkable() && kind == dep_kind {
if pkg.has_custom_build() {
ret.push(pkg.package_id());
}
ret.extend(dep_scripts.iter().cloned());
}
}
let prev = out.entry((id, target, profile, kind)).or_insert(Vec::new());
prev.extend(ret);
return prev
}
}<|fim▁end|>
|
"rustc-flags" => {
|
<|file_name|>class_reader.go<|end_file_name|><|fim▁begin|>package classfile
import "encoding/binary"
type ClassReader struct {
data []byte
}
func (self *ClassReader) readUint8() uint8 {
val := self.data[0]
self.data = self.data[1:]
return val
}
func (self *ClassReader) readUint16() uint16 {
val := binary.BigEndian.Uint16(self.data)
self.data = self.data[2:]
return val
}
func (self *ClassReader) readUint32() uint32 {<|fim▁hole|> return val
}
func (self *ClassReader) readUint64() uint64 {
val := binary.BigEndian.Uint64(self.data)
self.data = self.data[8:]
return val
}
func (self *ClassReader) readUint16s() []uint16 {
n := self.readUint16()
s := make([]uint16, n)
for i := range s {
s[i] = self.readUint16()
}
return s
}
func (self *ClassReader) readBytes(n uint32) []byte {
bytes := self.data[:n]
self.data = self.data[n:]
return bytes
}<|fim▁end|>
|
val := binary.BigEndian.Uint32(self.data)
self.data = self.data[4:]
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Contains the types and functions to communicate with the MusicBrainz API.
use crate::error::{Error, ErrorKind};
use crate::entities::{Mbid, ResourceOld, Resource};
use reqwest_mock::Client as MockClient;
use reqwest_mock::GenericClient as HttpClient;
use reqwest_mock::{StatusCode, Url};
use reqwest_mock::header::UserAgent;
use xpath_reader::reader::{FromXml, Reader};
use std::time::{Duration, Instant};
use std::thread::sleep;
use crate::search::{ReleaseGroupSearchBuilder, SearchBuilder};
mod error;
pub(crate) use self::error::check_response_error;
/// Helper extracting the number of milliseconds from a `Duration`.
fn as_millis(duration: &Duration) -> u64 {
((duration.as_secs() as f64) + (duration.subsec_nanos() as f64) * 1e6) as u64
}
/// Returns an `Instant` at least 1000 seconds ago.<|fim▁hole|>/// Configuration for the client.
#[derive(Clone, Debug)]
pub struct ClientConfig {
/// The user-agent to be sent with every request to the API.
///
/// Provide a meaningful one as it will be used by MusicBrainz to identify
/// your application and without a user agent sever throttling will be
/// undertaken. The official suggestion is to use either one of the
/// following two options:
///
/// * `Application name/<version> ( contact-url )`
/// * `Application name/<version> ( contact-email )`
///
/// For more information see:
/// https://musicbrainz.org/doc/XML_Web_Service/Rate_Limiting
pub user_agent: String,
/// How many times to retry requests where MusicBrainz returned 503 because
/// too many requests were being made.
pub max_retries: u8,
/// Specifies amounts of time to wait between certain actions.
pub waits: ClientWaits,
}
/// Specification of the wait time between requests.
///
/// Times are specified in miliseconds.
#[derive(Clone, Debug)]
pub struct ClientWaits {
/// Initial wait time after a ServiceUnavailable to use for the exponential
/// backoff strategy.
pub backoff_init: u64,
// TODO: Make this configurable if and only if a custom server instance is used,
// to make abuse of the main servers harder.
/// Minimal time between requests
requests: u64,
}
impl Default for ClientWaits {
fn default() -> Self {
ClientWaits {
backoff_init: 400,
requests: 1000,
}
}
}
/// The main struct to be used to communicate with the MusicBrainz API.
///
/// Please create only one instance and use it troughout your application
/// as it will ensure appropriate wait times between requests to prevent
/// being blocked for making to many requests.
pub struct Client {
http_client: HttpClient,
config: ClientConfig,
/// The time the last request was made.
/// According to the documentation we have to wait at least one second
/// between any two requests
/// to the MusicBrainz API.
last_request: Instant,
}
/// A request to be performed on the client.
///
/// Note: You most likely won't have to use it directly, it's public for trait visibility
/// reasons.
#[derive(Clone, Debug)]
pub struct Request {
pub name: String,
pub include: String,
}
impl Client {
/// Create a new `Client` instance.
pub fn new(config: ClientConfig) -> Self {
Client {
config: config,
http_client: HttpClient::direct(),
last_request: past_instant(),
}
}
/// Create a new `Client` instance with the specified `HttpClient`.
///
/// This is useful for testing purposes where you can inject a different
/// `HttpClient`, i. e. one replaying requests to save API calls or one
/// providing explicit stubbing.
pub fn with_http_client(config: ClientConfig, client: HttpClient) -> Self {
Client {
config: config,
http_client: client,
last_request: past_instant(),
}
}
/// Waits until we are allowed to make the next request to the MusicBrainz
/// API.
fn wait_if_needed(&mut self) {
let now = Instant::now();
let elapsed = now.duration_since(self.last_request);
if as_millis(&elapsed) < self.config.waits.requests {
sleep(Duration::from_millis(self.config.waits.requests) - elapsed);
}
self.last_request = now;
}
pub fn get_by_mbid<Res, Resp, Opt>(&mut self, mbid: &Mbid, options: Opt) -> Result<Res, Error>
where
Res: Resource<Options = Opt, Response = Resp>,
Resp: FromXml,
{
let request = Res::request(&options);
let url = request.get_by_mbid_url(mbid);
let response_body = self.get_body(url.parse()?)?;
let context = crate::util::musicbrainz_context();
let reader = Reader::from_str(response_body.as_str(), Some(&context))?;
check_response_error(&reader)?;
let response = Resp::from_xml(&reader)?;
Ok(Res::from_response(response, options))
}
/// Fetch the specified resource from the server and parse it.
pub fn get_by_mbid_old<Res>(&mut self, mbid: &Mbid) -> Result<Res, Error>
where
Res: ResourceOld + FromXml,
{
let url = Res::get_url(mbid);
let response_body = self.get_body(url.parse()?)?;
// Parse the response.
let context = crate::util::musicbrainz_context();
let reader = Reader::from_str(&response_body[..], Some(&context))?;
check_response_error(&reader)?;
Ok(Res::from_xml(&reader)?)
}
pub(crate) fn get_body(&mut self, url: Url) -> Result<String, Error> {
self.wait_if_needed();
let mut attempts = 0;
let mut backoff = self.config.waits.backoff_init;
while attempts < self.config.max_retries {
let response = self
.http_client
.get(url.clone())
.header(UserAgent::new(self.config.user_agent.clone()))
.send()?;
if response.status == StatusCode::ServiceUnavailable {
sleep(Duration::from_millis(backoff));
attempts += 1;
backoff *= 2;
// If we are in testing we want to avoid always failing.
self.http_client.force_record_next();
} else {
let response_body = response.body_to_utf8()?;
return Ok(response_body);
}
}
Err(Error::new(
"MusicBrainz returned 503 (ServiceUnavailable) too many times.",
ErrorKind::Communication,
))
}
/*
/// Returns a search builder to search for an area.
pub fn search_area<'cl>(&'cl mut self) -> AreaSearchBuilder<'cl> {
AreaSearchBuilder::new(self)
}
/// Returns a search biulder to search for an artist.
pub fn search_artist<'cl>(&'cl mut self) -> ArtistSearchBuilder<'cl> {
ArtistSearchBuilder::new(self)
}*/
/// Returns a search builder to search for a release group.
pub fn search_release_group<'cl>(&'cl mut self) -> ReleaseGroupSearchBuilder<'cl> {
ReleaseGroupSearchBuilder::new(self)
}
}
impl Request {
/// Returns the url where one can get a resource in the valid format for
/// parsing from.
fn get_by_mbid_url(&self, mbid: &Mbid) -> String {
format!(
"https://musicbrainz.org/ws/2/{}/{}?inc={}",
self.name, mbid, self.include
)
}
}
#[cfg(test)]
mod tests {
use super::*;
fn get_client(testname: &str) -> Client {
Client::with_http_client(
ClientConfig {
user_agent: "MusicBrainz-Rust/Testing".to_string(),
max_retries: 5,
waits: ClientWaits::default(),
},
HttpClient::replay_file(format!("replay/test_client/search/{}.json", testname)),
)
}
#[test]
fn search_release_group() {
let mut client = get_client("release_group_01");
let results = client
.search_release_group()
.add(crate::search::fields::release_group::ReleaseGroupName(
"霊魂消滅".to_owned(),
))
.search()
.unwrap();
assert_eq!(results.len(), 1);
assert_eq!(results[0].score, 100);
assert_eq!(
results[0].entity.mbid,
"739de9cd-7e81-4bb0-9fdb-0feb7ea709c7".parse().unwrap()
);
assert_eq!(results[0].entity.title, "霊魂消滅".to_string());
}
}<|fim▁end|>
|
fn past_instant() -> Instant {
Instant::now() - Duration::new(1000, 0)
}
|
<|file_name|>region-object-lifetime-5.rs<|end_file_name|><|fim▁begin|>// Various tests related to testing how region inference works
// with respect to the object receivers.
trait Foo {
fn borrowed<'a>(&'a self) -> &'a ();
}
// Here, the object is bounded by an anonymous lifetime and returned
// as `&'static`, so you get an error.<|fim▁hole|>}
fn main() {}<|fim▁end|>
|
fn owned_receiver(x: Box<dyn Foo>) -> &'static () {
x.borrowed() //~ ERROR cannot return reference to local data `*x`
|
<|file_name|>DihedralFieldOfViewDetector.java<|end_file_name|><|fim▁begin|>/* Copyright 2002-2014 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.propagation.events;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import org.apache.commons.math3.util.FastMath;
import org.orekit.errors.OrekitException;
import org.orekit.propagation.SpacecraftState;
import org.orekit.propagation.events.handlers.EventHandler;
import org.orekit.propagation.events.handlers.StopOnDecreasing;
import org.orekit.utils.PVCoordinatesProvider;
/** Finder for body entering/exiting dihedral FOV events.
* <p>This class finds dihedral field of view events (i.e. body entry and exit in FOV).</p>
* <p>The default implementation behavior is to {@link
* org.orekit.propagation.events.handlers.EventHandler.Action#CONTINUE continue}
* propagation at entry and to {@link
* org.orekit.propagation.events.handlers.EventHandler.Action#STOP stop} propagation
* at exit. This can be changed by calling
* {@link #withHandler(EventHandler)} after construction.</p>
* @see org.orekit.propagation.Propagator#addEventDetector(EventDetector)
* @see CircularFieldOfViewDetector
* @author Véronique Pommier-Maurussane
*/
public class DihedralFieldOfViewDetector extends AbstractDetector<DihedralFieldOfViewDetector> {
/** Serializable UID. */
private static final long serialVersionUID = 20131118L;
/** Position/velocity provider of the considered target. */
private final PVCoordinatesProvider targetPVProvider;
/** Direction of the FOV center. */
private final Vector3D center;
/** FOV dihedral axis 1. */
private final Vector3D axis1;
/** FOV normal to first center plane. */
private final Vector3D normalCenterPlane1;
/** FOV dihedral half aperture angle 1. */
private final double halfAperture1;
/** FOV dihedral axis 2. */
private final Vector3D axis2;
/** FOV normal to second center plane. */
private final Vector3D normalCenterPlane2;
/** FOV dihedral half aperture angle 2. */
private final double halfAperture2;
/** Build a new instance.
* <p>The maximal interval between distance to FOV boundary checks should
* be smaller than the half duration of the minimal pass to handle,
* otherwise some short passes could be missed.</p>
* @param maxCheck maximal interval in seconds
* @param pvTarget Position/velocity provider of the considered target
* @param center Direction of the FOV center
* @param axis1 FOV dihedral axis 1
* @param halfAperture1 FOV dihedral half aperture angle 1
* @param axis2 FOV dihedral axis 2
* @param halfAperture2 FOV dihedral half aperture angle 2<|fim▁hole|> */
public DihedralFieldOfViewDetector(final double maxCheck,
final PVCoordinatesProvider pvTarget, final Vector3D center,
final Vector3D axis1, final double halfAperture1,
final Vector3D axis2, final double halfAperture2) {
this(maxCheck, 1.0e-3, DEFAULT_MAX_ITER, new StopOnDecreasing<DihedralFieldOfViewDetector>(),
pvTarget, center, axis1, halfAperture1, axis2, halfAperture2);
}
/** Private constructor with full parameters.
* <p>
* This constructor is private as users are expected to use the builder
* API with the various {@code withXxx()} methods to set up the instance
* in a readable manner without using a huge amount of parameters.
* </p>
* @param maxCheck maximum checking interval (s)
* @param threshold convergence threshold (s)
* @param maxIter maximum number of iterations in the event time search
* @param handler event handler to call at event occurrences
* @param pvTarget Position/velocity provider of the considered target
* @param center Direction of the FOV center
* @param axis1 FOV dihedral axis 1
* @param halfAperture1 FOV dihedral half aperture angle 1
* @param axis2 FOV dihedral axis 2
* @param halfAperture2 FOV dihedral half aperture angle 2
* @since 6.1
*/
private DihedralFieldOfViewDetector(final double maxCheck, final double threshold,
final int maxIter, final EventHandler<DihedralFieldOfViewDetector> handler,
final PVCoordinatesProvider pvTarget, final Vector3D center,
final Vector3D axis1, final double halfAperture1,
final Vector3D axis2, final double halfAperture2) {
super(maxCheck, threshold, maxIter, handler);
this.targetPVProvider = pvTarget;
this.center = center;
// Computation of the center plane normal for dihedra 1
this.axis1 = axis1;
this.normalCenterPlane1 = Vector3D.crossProduct(axis1, center);
// Computation of the center plane normal for dihedra 2
this.axis2 = axis2;
this.normalCenterPlane2 = Vector3D.crossProduct(axis2, center);
this.halfAperture1 = halfAperture1;
this.halfAperture2 = halfAperture2;
}
/** {@inheritDoc} */
@Override
protected DihedralFieldOfViewDetector create(final double newMaxCheck, final double newThreshold,
final int newMaxIter,
final EventHandler<DihedralFieldOfViewDetector> newHandler) {
return new DihedralFieldOfViewDetector(newMaxCheck, newThreshold, newMaxIter, newHandler,
targetPVProvider, center,
axis1, halfAperture1,
axis2, halfAperture2);
}
/** Get the position/velocity provider of the target .
* @return the position/velocity provider of the target
*/
public PVCoordinatesProvider getPVTarget() {
return targetPVProvider;
}
/** Get the direction of FOV center.
* @return the direction of FOV center
*/
public Vector3D getCenter() {
return center;
}
/** Get the direction of FOV 1st dihedral axis.
* @return the direction of FOV 1st dihedral axis
*/
public Vector3D getAxis1() {
return axis1;
}
/** Get the half aperture angle of FOV 1st dihedra.
* @return the half aperture angle of FOV 1st dihedras
*/
public double getHalfAperture1() {
return halfAperture1;
}
/** Get the half aperture angle of FOV 2nd dihedra.
* @return the half aperture angle of FOV 2nd dihedras
*/
public double getHalfAperture2() {
return halfAperture2;
}
/** Get the direction of FOV 2nd dihedral axis.
* @return the direction of FOV 2nd dihedral axis
*/
public Vector3D getAxis2() {
return axis2;
}
/** {@inheritDoc}
* g function value is the target signed distance to the closest FOV boundary.
* It is positive inside the FOV, and negative outside. */
public double g(final SpacecraftState s) throws OrekitException {
// Get position of target at current date in spacecraft frame.
final Vector3D targetPosInert = new Vector3D(1, targetPVProvider.getPVCoordinates(s.getDate(), s.getFrame()).getPosition(),
-1, s.getPVCoordinates().getPosition());
final Vector3D targetPosSat = s.getAttitude().getRotation().applyTo(targetPosInert);
// Compute the four angles from the four FOV boundaries.
final double angle1 = FastMath.atan2(Vector3D.dotProduct(targetPosSat, normalCenterPlane1),
Vector3D.dotProduct(targetPosSat, center));
final double angle2 = FastMath.atan2(Vector3D.dotProduct(targetPosSat, normalCenterPlane2),
Vector3D.dotProduct(targetPosSat, center));
// g function value is distance to the FOV boundary, computed as a dihedral angle.
// It is positive inside the FOV, and negative outside.
return FastMath.min(halfAperture1 - FastMath.abs(angle1) , halfAperture2 - FastMath.abs(angle2));
}
}<|fim▁end|>
| |
<|file_name|>ProductionPanel.java<|end_file_name|><|fim▁begin|>package de.hub.cses.ces.jsf.bean.game.play;
/*
* #%L
* CES-Game
* %%
* Copyright (C) 2015 Humboldt-Universität zu Berlin,
* Department of Computer Science,
* Research Group "Computer Science Education / Computer Science and Society"
* Sebastian Gross <[email protected]>
* Sven Strickroth <[email protected]>
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
import de.hub.cses.ces.entity.production.Production;
import de.hub.cses.ces.entity.production.ProductionPlan;
import de.hub.cses.ces.jsf.bean.game.PlayBean;
import de.hub.cses.ces.jsf.config.GamePlayComponent;
import de.hub.cses.ces.service.persistence.production.ProductionFacade;
import de.hub.cses.ces.service.persistence.production.ProductionPlanFacade;
import de.hub.cses.ces.util.ComponentUpdateUtil;
import java.io.Serializable;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.faces.view.ViewScoped;
import javax.inject.Inject;
import javax.inject.Named;
/**
*
* @author Sebastian Gross <[email protected]>
*/
@Named("ProductionPanel")
@ViewScoped
public class ProductionPanel implements Serializable {
@Inject
@SuppressWarnings("NonConstantLogger")
private transient Logger logger;
@Inject<|fim▁hole|>
@EJB
private ProductionFacade productionFacade;
@EJB
private ProductionPlanFacade productionPlanFacade;
/**
*
*/
@PostConstruct
public void init() {
}
/**
*
* @return
*/
public Production getProduction() {
return gamePlay.getCooperator().getCompany().getProduction();
}
/**
*
* @param event
* @throws Exception
*/
public void edit(org.primefaces.event.RowEditEvent event) throws Exception {
ProductionPlan productionPlan = (ProductionPlan) event.getObject();
try {
logger.log(Level.INFO, "edit production plan {0}", (productionPlan != null) ? productionPlan.getId() : null);
logger.log(Level.INFO, "workforce {0}", (productionPlan != null) ? productionPlan.getWorkforce() : null);
productionPlanFacade.edit(productionPlan);
} catch (Exception ex) {
Exception ne = (Exception) ex.getCause();
if ("org.eclipse.persistence.exceptions.OptimisticLockException".equals(ne.getClass().getName())
|| "javax.persistence.OptimisticLockException".equals(ne.getClass().getName())) {
throw new javax.persistence.OptimisticLockException("fehler...");
}
} finally {
componentUpdateUtil.companyUpdate(gamePlay.getCooperator().getCompany().getId(), GamePlayComponent.PRODUCTION);
}
}
/**
*
* @param event
*/
public void cancel(org.primefaces.event.RowEditEvent event) {
//gamePlay.updateData();
}
}<|fim▁end|>
|
private PlayBean gamePlay;
@Inject
private ComponentUpdateUtil componentUpdateUtil;
|
<|file_name|>InstantSearch-integration-test.ts<|end_file_name|><|fim▁begin|>/**
* @jest-environment jsdom
*/
import { getByText, fireEvent } from '@testing-library/dom';
import instantsearch from '../../index.es';
import { configure, searchBox } from '../../widgets';
import { connectConfigure } from '../../connectors';
import { createSearchClient } from '../../../test/mock/createSearchClient';
import type { MiddlewareDefinition } from '../../types';
import { wait } from '../../../test/utils/wait';
describe('configure', () => {
it('provides up-to-date uiState to onStateChange', () => {
const container = document.createElement('div');
const onStateChange = jest.fn();
const search = instantsearch({
indexName: 'instant_search',
searchClient: createSearchClient(),
onStateChange({ uiState, setUiState }) {
onStateChange(uiState);
setUiState(uiState);
},
});
const customComp = connectConfigure(({ refine }, isFirstRendering) => {
if (isFirstRendering) {
const button = document.createElement('button');
button.setAttribute('type', 'button');
button.textContent = 'click me';
container.appendChild(button);
container.querySelector('button')!.addEventListener('click', () => {
refine({ hitsPerPage: 4 });
});
}
});
search.addWidgets([
configure({
hitsPerPage: 10,
}),
customComp({ searchParameters: {} }),
]);
search.start();
expect(onStateChange).not.toHaveBeenCalled();
fireEvent.click(getByText(container, 'click me'));
expect(onStateChange).toHaveBeenCalledTimes(1);
expect(onStateChange).toHaveBeenCalledWith({
instant_search: { configure: { hitsPerPage: 4 } },
});
});
});
describe('middleware', () => {
it("runs middlewares' onStateChange when uiState changes", async () => {
const container = document.createElement('div');
const search = instantsearch({
indexName: 'instant_search',
searchClient: createSearchClient(),
});
const middlewareDefinition: MiddlewareDefinition = {
onStateChange: jest.fn(),
subscribe: jest.fn(),
unsubscribe: jest.fn(),
};
search.use(() => middlewareDefinition);
search.addWidgets([
searchBox({
container,
placeholder: 'search',
}),
]);
search.start();
fireEvent.input(container.querySelector('input')!, {
target: { value: 'q' },
});
await wait(0);
expect(middlewareDefinition.onStateChange).toHaveBeenCalledTimes(1);
});
it("runs middlewares' onStateChange when uiState changes with user-provided onStateChange param", async () => {
const container = document.createElement('div');
const search = instantsearch({
indexName: 'instant_search',
searchClient: createSearchClient(),
onStateChange({ uiState, setUiState }) {
setUiState(uiState);
},
});
const middlewareDefinition: MiddlewareDefinition = {
onStateChange: jest.fn(),
subscribe: jest.fn(),
unsubscribe: jest.fn(),
};
search.use(() => middlewareDefinition);
search.addWidgets([
searchBox({
container,
placeholder: 'search',
}),
]);
search.start();
fireEvent.input(container.querySelector('input')!, {
target: { value: 'q' },
});
await wait(0);
expect(middlewareDefinition.onStateChange).toHaveBeenCalledTimes(1);
});
});
describe('errors', () => {
it('client errors can be handled', () => {
const search = instantsearch({
searchClient: createSearchClient({
search() {
return Promise.reject(new Error('test!'));
},
}),
indexName: '123',
});
expect.assertions(4);
search.on('error', (error) => {
expect(error).toBeInstanceOf(Error);
expect(error.message).toBe('test!');<|fim▁hole|> expect(error.error.message).toBe('test!');
});
search.start();
});
});<|fim▁end|>
|
expect(error.error).toBeInstanceOf(Error);
|
<|file_name|>stlc.rs<|end_file_name|><|fim▁begin|>#[phase(plugin)]
extern crate peg_syntax_ext;
pub use self::peg::sym;
<|fim▁hole|>
#[pub]
sym
-> Box<syntax::sym::Sym>
= [0-9]+
{
box syntax::sym::Con(String::from_str(match_str))
}
"#)<|fim▁end|>
|
peg! peg(r#"
use core::lang::stlc::syntax;
|
<|file_name|>wallet_importmulti.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the importmulti RPC.
Test importmulti by generating keys on node0, importing the scriptPubKeys and
addresses on node1 and then testing the address info for the different address
variants.
- `get_key()` and `get_multisig()` are called to generate keys on node0 and
return the privkeys, pubkeys and all variants of scriptPubKey and address.
- `test_importmulti()` is called to send an importmulti call to node1, test
success, and (if unsuccessful) test the error code and error message returned.
- `test_address()` is called to call getaddressinfo for an address on node1
and test the values returned."""
from test_framework.descriptors import descsum_create
from test_framework.script import OP_NOP, CScript
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
)
from test_framework.wallet_util import get_key, get_multisig, test_address
class ImportMultiTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
self.setup_nodes()
def test_importmulti(self, req, success, error_code=None,
error_message=None, warnings=None):
"""Run importmulti and assert success"""
if warnings is None:
warnings = []
result = self.nodes[1].importmulti([req])
observed_warnings = []
if 'warnings' in result[0]:
observed_warnings = result[0]['warnings']
assert_equal(
"\n".join(
sorted(warnings)), "\n".join(
sorted(observed_warnings)))
assert_equal(result[0]['success'], success)
if error_code is not None:
assert_equal(result[0]['error']['code'], error_code)
assert_equal(result[0]['error']['message'], error_message)
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(
self.nodes[1].getbestblockhash())['mediantime']
# Sync the timestamp to the wallet, so that importmulti works
self.nodes[1].syncwithvalidationinterfacequeue()
node0_address1 = self.nodes[0].getaddressinfo(
self.nodes[0].getnewaddress())
# Check only one address
assert_equal(node0_address1['ismine'], True)
# Node 1 sync test
assert_equal(self.nodes[1].getblockcount(), 1)
# Address Test - before import
address_info = self.nodes[1].getaddressinfo(node0_address1['address'])
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
# RPC importmulti -----------------------------------------------
# Bitcoin Address (implicit non-internal)
self.log.info("Should import an address")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp,
ischange=False)
watchonly_address = key.p2pkh_addr
watchonly_timestamp = timestamp
self.log.info("Should not import an invalid address")
self.test_importmulti({"scriptPubKey": {"address": "not valid address"},
"timestamp": "now"},
success=False,
error_code=-5,
error_message='Invalid address \"not valid address\"')
# ScriptPubKey + internal
self.log.info("Should import a scriptPubKey with internal flag")<|fim▁hole|> key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"internal": True},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp,
ischange=True)
# ScriptPubKey + internal + label
self.log.info(
"Should not allow a label to be specified when internal is true")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"internal": True,
"label": "Unsuccessful labelling for internal addresses"},
success=False,
error_code=-8,
error_message='Internal addresses should not have a label')
# Nonstandard scriptPubKey + !internal
self.log.info(
"Should not import a nonstandard scriptPubKey without internal flag")
nonstandardScriptPubKey = key.p2pkh_script + CScript([OP_NOP]).hex()
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
"timestamp": "now"},
success=False,
error_code=-8,
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=False,
timestamp=None)
# Address + Public key + !Internal(explicit)
self.log.info("Should import an address with public key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"pubkeys": [key.pubkey],
"internal": False},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp)
# ScriptPubKey + Public key + internal
self.log.info(
"Should import a scriptPubKey with internal and with public key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"pubkeys": [key.pubkey],
"internal": True},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp)
# Nonstandard scriptPubKey + Public key + !internal
self.log.info(
"Should not import a nonstandard scriptPubKey without internal and with public key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
"timestamp": "now",
"pubkeys": [key.pubkey]},
success=False,
error_code=-8,
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=False,
timestamp=None)
# Address + Private key + !watchonly
self.log.info("Should import an address with private key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [key.privkey]},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=True,
timestamp=timestamp)
self.log.info(
"Should not import an address with private key if is already imported")
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [key.privkey]},
success=False,
error_code=-4,
error_message='The wallet already contains the private key for this address or script ("' + key.p2pkh_script + '")')
# Address + Private key + watchonly
self.log.info(
"Should import an address with private key and with watchonly")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [key.privkey],
"watchonly": True},
success=True,
warnings=["All private keys are provided, outputs will be considered spendable. If this is intentional, do not specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=True,
timestamp=timestamp)
# ScriptPubKey + Private key + internal
self.log.info(
"Should import a scriptPubKey with internal and with private key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"keys": [key.privkey],
"internal": True},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=True,
timestamp=timestamp)
# Nonstandard scriptPubKey + Private key + !internal
self.log.info(
"Should not import a nonstandard scriptPubKey without internal and with private key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
"timestamp": "now",
"keys": [key.privkey]},
success=False,
error_code=-8,
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=False,
timestamp=None)
# P2SH address
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(
self.nodes[1].getbestblockhash())['mediantime']
self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
multisig.p2sh_addr,
isscript=True,
iswatchonly=True,
timestamp=timestamp)
p2shunspent = self.nodes[1].listunspent(
0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], False)
# P2SH + Redeem script
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(
self.nodes[1].getbestblockhash())['mediantime']
self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh with respective redeem script")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now",
"redeemscript": multisig.redeem_script},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(
self.nodes[1],
multisig.p2sh_addr,
timestamp=timestamp,
iswatchonly=True,
ismine=False,
solvable=True)
p2shunspent = self.nodes[1].listunspent(
0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + !Watchonly
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(
self.nodes[1].getbestblockhash())['mediantime']
self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info(
"Should import a p2sh with respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now",
"redeemscript": multisig.redeem_script,
"keys": multisig.privkeys[0:2]},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
multisig.p2sh_addr,
timestamp=timestamp,
ismine=False,
iswatchonly=True,
solvable=True)
p2shunspent = self.nodes[1].listunspent(
0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + Watchonly
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(
self.nodes[1].getbestblockhash())['mediantime']
self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info(
"Should import a p2sh with respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now",
"redeemscript": multisig.redeem_script,
"keys": multisig.privkeys[0:2],
"watchonly": True},
success=True)
test_address(self.nodes[1],
multisig.p2sh_addr,
iswatchonly=True,
ismine=False,
solvable=True,
timestamp=timestamp)
# Address + Public key + !Internal + Wrong pubkey
self.log.info(
"Should not import an address with the wrong public key as non-solvable")
key = get_key(self.nodes[0])
wrong_key = get_key(self.nodes[0]).pubkey
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"pubkeys": [wrong_key]},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# ScriptPubKey + Public key + internal + Wrong pubkey
self.log.info(
"Should import a scriptPubKey with internal and with a wrong public key as non-solvable")
key = get_key(self.nodes[0])
wrong_key = get_key(self.nodes[0]).pubkey
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"pubkeys": [wrong_key],
"internal": True},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# Address + Private key + !watchonly + Wrong private key
self.log.info(
"Should import an address with a wrong private key as non-solvable")
key = get_key(self.nodes[0])
wrong_privkey = get_key(self.nodes[0]).privkey
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [wrong_privkey]},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# ScriptPubKey + Private key + internal + Wrong private key
self.log.info(
"Should import a scriptPubKey with internal and with a wrong private key as non-solvable")
key = get_key(self.nodes[0])
wrong_privkey = get_key(self.nodes[0]).privkey
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"keys": [wrong_privkey],
"internal": True},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# Importing existing watch only address with new timestamp should
# replace saved timestamp.
assert_greater_than(timestamp, watchonly_timestamp)
self.log.info("Should replace previously saved watch only timestamp.")
self.test_importmulti({"scriptPubKey": {"address": watchonly_address},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
watchonly_address,
iswatchonly=True,
ismine=False,
timestamp=timestamp)
watchonly_timestamp = timestamp
# restart nodes to check for proper serialization/deserialization of
# watch only address
self.stop_nodes()
self.start_nodes()
test_address(self.nodes[1],
watchonly_address,
iswatchonly=True,
ismine=False,
timestamp=watchonly_timestamp)
# Bad or missing timestamps
self.log.info("Should throw on invalid or missing timestamp values")
assert_raises_rpc_error(-3, 'Missing required timestamp field for key',
self.nodes[1].importmulti, [{"scriptPubKey": key.p2pkh_script}])
assert_raises_rpc_error(-3, 'Expected number or "now" timestamp value for key. got type string',
self.nodes[1].importmulti, [{
"scriptPubKey": key.p2pkh_script,
"timestamp": ""
}])
# Test that importing of a P2PKH address via descriptor without
# checksum fails
key = get_key(self.nodes[0])
self.log.info(
"Should fail to import a p2pkh address from descriptor with no checksum")
self.test_importmulti({"desc": "pkh(" + key.pubkey + ")",
"timestamp": "now",
"label": "Descriptor import test"},
success=False,
error_code=-5,
error_message='Missing checksum')
# Test ranged descriptor fails if range is not specified
xpriv = "tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg"
# hdkeypath=m/0'/0'/0' and 1'
addresses = [
"ecregtest:prvn9ycvgr5atuyh49sua3mapskh2mnnzg7t9yp6dt",
"ecregtest:pp3n087yx0njv2e5wcvltahfxqst7l66rutz8ceeat"]
# pkh subscripts corresponding to the above addresses
addresses += [
"ecregtest:qqdkxd2xnzftq2p8wr3sqqyw8lntap7tncs546s6pr",
"ecregtest:qpyryy83jfaec5u0gpzldk6teadsuq8zlyqh5l30uq",
]
desc = "sh(pkh(" + xpriv + "/0'/0'/*'" + "))"
self.log.info(
"Ranged descriptor import should fail without a specified range")
self.test_importmulti({"desc": descsum_create(desc),
"timestamp": "now"},
success=False,
error_code=-8,
error_message='Descriptor is ranged, please specify the range')
# Test importing of a ranged descriptor with xpriv
self.log.info(
"Should import the ranged descriptor with specified range as solvable")
self.test_importmulti({"desc": descsum_create(desc),
"timestamp": "now",
"range": 1},
success=True)
for address in addresses:
test_address(self.nodes[1], address, solvable=True, ismine=True)
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": -1},
success=False, error_code=-8, error_message='End of range is too high')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [-1, 10]},
success=False, error_code=-8, error_message='Range should be greater or equal than 0')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]},
success=False, error_code=-8, error_message='End of range is too high')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [2, 1]},
success=False, error_code=-8, error_message='Range specified as [begin,end] must not have begin after end')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [0, 1000001]},
success=False, error_code=-8, error_message='Range is too large')
# Test importing a descriptor containing a WIF private key
wif_priv = "cTe1f5rdT8A8DFgVWTjyPwACsDPJM9ff4QngFxUixCSvvbg1x6sh"
# Note: in Core's test, this address refers to the sh(wpkh()) address.
# For a sh(pkh()) this does not refer to a key, so we use the subscript
# address instead, which returns the same privkey.
address = "ecregtest:qzh6rch6st3wjvp0h2ud87gn7xnxvf6h8yrk8gcg8t"
desc = "sh(pkh(" + wif_priv + "))"
self.log.info(
"Should import a descriptor with a WIF private key as spendable")
self.test_importmulti({"desc": descsum_create(desc),
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
address,
solvable=True,
ismine=True)
# dump the private key to ensure it matches what was imported
privkey = self.nodes[1].dumpprivkey(address)
assert_equal(privkey, wif_priv)
# Test importing of a P2PKH address via descriptor
key = get_key(self.nodes[0])
p2pkh_label = "P2PKH descriptor import"
self.log.info("Should import a p2pkh address from descriptor")
self.test_importmulti({"desc": descsum_create("pkh(" + key.pubkey + ")"),
"timestamp": "now",
"label": p2pkh_label},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
solvable=True,
ismine=False,
labels=[p2pkh_label])
# Test import fails if both desc and scriptPubKey are provided
key = get_key(self.nodes[0])
self.log.info(
"Import should fail if both scriptPubKey and desc are provided")
self.test_importmulti({"desc": descsum_create("pkh(" + key.pubkey + ")"),
"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now"},
success=False,
error_code=-8,
error_message='Both a descriptor and a scriptPubKey should not be provided.')
# Test import fails if neither desc nor scriptPubKey are present
key = get_key(self.nodes[0])
self.log.info(
"Import should fail if neither a descriptor nor a scriptPubKey are provided")
self.test_importmulti({"timestamp": "now"},
success=False,
error_code=-8,
error_message='Either a descriptor or scriptPubKey must be provided.')
# Test importing of a multisig via descriptor
key1 = get_key(self.nodes[0])
key2 = get_key(self.nodes[0])
self.log.info("Should import a 1-of-2 bare multisig from descriptor")
self.test_importmulti({"desc": descsum_create("multi(1," + key1.pubkey + "," + key2.pubkey + ")"),
"timestamp": "now"},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
self.log.info(
"Should not treat individual keys from the imported bare multisig as watchonly")
test_address(self.nodes[1],
key1.p2pkh_addr,
ismine=False,
iswatchonly=False)
# Import pubkeys with key origin info
self.log.info(
"Addresses should have hd keypath and master key id after import with key origin")
pub_addr = self.nodes[1].getnewaddress()
pub_addr = self.nodes[1].getnewaddress()
info = self.nodes[1].getaddressinfo(pub_addr)
pub = info['pubkey']
pub_keypath = info['hdkeypath']
pub_fpr = info['hdmasterfingerprint']
result = self.nodes[0].importmulti(
[{
'desc': descsum_create("pkh([" + pub_fpr + pub_keypath[1:] + "]" + pub + ")"),
"timestamp": "now",
}]
)
assert result[0]['success']
pub_import_info = self.nodes[0].getaddressinfo(pub_addr)
assert_equal(pub_import_info['hdmasterfingerprint'], pub_fpr)
assert_equal(pub_import_info['pubkey'], pub)
assert_equal(pub_import_info['hdkeypath'], pub_keypath)
# Import privkeys with key origin info
priv_addr = self.nodes[1].getnewaddress()
info = self.nodes[1].getaddressinfo(priv_addr)
priv = self.nodes[1].dumpprivkey(priv_addr)
priv_keypath = info['hdkeypath']
priv_fpr = info['hdmasterfingerprint']
result = self.nodes[0].importmulti(
[{
'desc': descsum_create("pkh([" + priv_fpr + priv_keypath[1:] + "]" + priv + ")"),
"timestamp": "now",
}]
)
assert result[0]['success']
priv_import_info = self.nodes[0].getaddressinfo(priv_addr)
assert_equal(priv_import_info['hdmasterfingerprint'], priv_fpr)
assert_equal(priv_import_info['hdkeypath'], priv_keypath)
# Make sure the key origin info are still there after a restart
self.stop_nodes()
self.start_nodes()
import_info = self.nodes[0].getaddressinfo(pub_addr)
assert_equal(import_info['hdmasterfingerprint'], pub_fpr)
assert_equal(import_info['hdkeypath'], pub_keypath)
import_info = self.nodes[0].getaddressinfo(priv_addr)
assert_equal(import_info['hdmasterfingerprint'], priv_fpr)
assert_equal(import_info['hdkeypath'], priv_keypath)
# Check legacy import does not import key origin info
self.log.info("Legacy imports don't have key origin info")
pub_addr = self.nodes[1].getnewaddress()
info = self.nodes[1].getaddressinfo(pub_addr)
pub = info['pubkey']
result = self.nodes[0].importmulti(
[{
'scriptPubKey': {'address': pub_addr},
'pubkeys': [pub],
"timestamp": "now",
}]
)
assert result[0]['success']
pub_import_info = self.nodes[0].getaddressinfo(pub_addr)
assert_equal(pub_import_info['pubkey'], pub)
assert 'hdmasterfingerprint' not in pub_import_info
assert 'hdkeypath' not in pub_import_info
# Import some public keys to the keypool of a no privkey wallet
self.log.info("Adding pubkey to keypool of disableprivkey wallet")
self.nodes[1].createwallet(
wallet_name="noprivkeys",
disable_private_keys=True)
wrpc = self.nodes[1].get_wallet_rpc("noprivkeys")
addr1 = self.nodes[0].getnewaddress()
addr2 = self.nodes[0].getnewaddress()
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
result = wrpc.importmulti(
[{
'desc': descsum_create('pkh(' + pub1 + ')'),
'keypool': True,
"timestamp": "now",
},
{
'desc': descsum_create('pkh(' + pub2 + ')'),
'keypool': True,
"timestamp": "now",
}]
)
assert result[0]['success']
assert result[1]['success']
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 2)
newaddr1 = wrpc.getnewaddress()
assert_equal(addr1, newaddr1)
newaddr2 = wrpc.getnewaddress()
assert_equal(addr2, newaddr2)
# Import some public keys to the internal keypool of a no privkey
# wallet
self.log.info(
"Adding pubkey to internal keypool of disableprivkey wallet")
addr1 = self.nodes[0].getnewaddress()
addr2 = self.nodes[0].getnewaddress()
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
result = wrpc.importmulti(
[{
'desc': descsum_create('pkh(' + pub1 + ')'),
'keypool': True,
'internal': True,
"timestamp": "now",
},
{
'desc': descsum_create('pkh(' + pub2 + ')'),
'keypool': True,
'internal': True,
"timestamp": "now",
}]
)
assert result[0]['success']
assert result[1]['success']
assert_equal(wrpc.getwalletinfo()["keypoolsize_hd_internal"], 2)
newaddr1 = wrpc.getrawchangeaddress()
assert_equal(addr1, newaddr1)
newaddr2 = wrpc.getrawchangeaddress()
assert_equal(addr2, newaddr2)
# Import a multisig and make sure the keys don't go into the keypool
self.log.info(
'Imported scripts with pubkeys shoud not have their pubkeys go into the keypool')
addr1 = self.nodes[0].getnewaddress()
addr2 = self.nodes[0].getnewaddress()
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
result = wrpc.importmulti(
[{
'desc': descsum_create('sh(multi(2,' + pub1 + ',' + pub2 + '))'),
'keypool': True,
"timestamp": "now",
}]
)
assert result[0]['success']
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0)
# Cannot import those pubkeys to keypool of wallet with privkeys
self.log.info(
"Pubkeys cannot be added to the keypool of a wallet with private keys")
wrpc = self.nodes[1].get_wallet_rpc(self.default_wallet_name)
assert wrpc.getwalletinfo()['private_keys_enabled']
result = wrpc.importmulti(
[{
'desc': descsum_create('pkh(' + pub1 + ')'),
'keypool': True,
"timestamp": "now",
}]
)
assert_equal(result[0]['error']['code'], -8)
assert_equal(
result[0]['error']['message'],
"Keys can only be imported to the keypool when private keys are disabled")
# Make sure ranged imports import keys in order
self.log.info('Key ranges should be imported in order')
wrpc = self.nodes[1].get_wallet_rpc("noprivkeys")
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0)
assert_equal(wrpc.getwalletinfo()["private_keys_enabled"], False)
xpub = "tpubDAXcJ7s7ZwicqjprRaEWdPoHKrCS215qxGYxpusRLLmJuT69ZSicuGdSfyvyKpvUNYBW1s2U3NSrT6vrCYB9e6nZUEvrqnwXPF8ArTCRXMY"
addresses = [
'ecregtest:qp0v86h53rc92hjrlpwzpjtdlgzsxu25svv6g40fpl', # m/0'/0'/0
'ecregtest:qqasy0zlkdleqt4pkn8fs4ehm5gnnz6qpgdcpt90fq', # m/0'/0'/1
'ecregtest:qp0sp4wlhctvprqvdt2dgvqcfdjssu04xgey0l3syw', # m/0'/0'/2
'ecregtest:qrhn24tegn04cptfv4ldhtkduxq55zcwrycjfdj9vr', # m/0'/0'/3
'ecregtest:qzpqhett2uwltq803vrxv7zkqhft5vsnmcjeh50v0p', # m/0'/0'/4
]
result = wrpc.importmulti(
[{
'desc': descsum_create('pkh([80002067/0h/0h]' + xpub + '/*)'),
'keypool': True,
'timestamp': 'now',
'range': [0, 4],
}]
)
self.log.info(result)
for i in range(0, 5):
addr = wrpc.getnewaddress('')
assert_equal(addr, addresses[i])
if __name__ == '__main__':
ImportMultiTest().main()<|fim▁end|>
| |
<|file_name|>duration_pb2.py<|end_file_name|><|fim▁begin|># Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/duration.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/duration.proto',
package='google.protobuf',
syntax='proto3',
serialized_pb=_b('\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42|\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z*github.com/golang/protobuf/ptypes/duration\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
)<|fim▁hole|>_DURATION = _descriptor.Descriptor(
name='Duration',
full_name='google.protobuf.Duration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='seconds', full_name='google.protobuf.Duration.seconds', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nanos', full_name='google.protobuf.Duration.nanos', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=51,
serialized_end=93,
)
DESCRIPTOR.message_types_by_name['Duration'] = _DURATION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Duration = _reflection.GeneratedProtocolMessageType('Duration', (_message.Message,), dict(
DESCRIPTOR = _DURATION,
__module__ = 'google.protobuf.duration_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Duration)
))
_sym_db.RegisterMessage(Duration)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\rDurationProtoP\001Z*github.com/golang/protobuf/ptypes/duration\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
# @@protoc_insertion_point(module_scope)<|fim▁end|>
| |
<|file_name|>PojoPrefab.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.entitySystem.prefab.internal;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.terasology.assets.AssetType;
import org.terasology.assets.ResourceUrn;
import org.terasology.entitySystem.Component;<|fim▁hole|>import org.terasology.entitySystem.prefab.PrefabData;
import java.util.List;
import java.util.Map;
/**
* @author Immortius
*/
public class PojoPrefab extends Prefab {
private Prefab parent;
private Map<Class<? extends Component>, Component> componentMap;
private List<Prefab> children = Lists.newArrayList();
private boolean persisted;
private boolean alwaysRelevant = true;
public PojoPrefab(ResourceUrn urn, AssetType<?, PrefabData> assetType, PrefabData data) {
super(urn, assetType);
reload(data);
}
@Override
public Prefab getParent() {
return parent;
}
@Override
public List<Prefab> getChildren() {
return ImmutableList.copyOf(children);
}
@Override
public boolean isPersisted() {
return persisted;
}
@Override
public boolean isAlwaysRelevant() {
return alwaysRelevant;
}
@Override
public boolean exists() {
return true;
}
@Override
public boolean hasComponent(Class<? extends Component> component) {
return componentMap.containsKey(component);
}
@Override
public <T extends Component> T getComponent(Class<T> componentClass) {
return componentClass.cast(componentMap.get(componentClass));
}
@Override
public Iterable<Component> iterateComponents() {
return ImmutableList.copyOf(componentMap.values());
}
@Override
protected void doDispose() {
}
@Override
protected void doReload(PrefabData data) {
this.componentMap = ImmutableMap.copyOf(data.getComponents());
this.persisted = data.isPersisted();
this.alwaysRelevant = data.isAlwaysRelevant();
this.parent = data.getParent();
if (parent != null && parent instanceof PojoPrefab) {
((PojoPrefab) parent).children.add(this);
}
}
}<|fim▁end|>
|
import org.terasology.entitySystem.prefab.Prefab;
|
<|file_name|>LoteRpsConsultaTest.java<|end_file_name|><|fim▁begin|>package com.pablodomingos.classes.rps.servicos;
import java.io.IOException;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.junit.Test;
import com.pablodomingos.classes.FabricaDeObjetosFake;
public class LoteRpsConsultaTest {
@Test
public void xmlDeveSerGeradoCorretamente() throws IOException{
String xmlTest = IOUtils.toString(getClass().getClassLoader().getResourceAsStream("loteRPSConsulta.xml"));
LoteRpsConsulta consultaLote = new LoteRpsConsulta("AP1057893n16X103sfhF4RPm", FabricaDeObjetosFake.getRpsPrestador());
String xml = consultaLote.converterParaXml();
Assert.assertEquals(xml, xmlTest);
}
<|fim▁hole|>
}<|fim▁end|>
| |
<|file_name|>IPostComponentState.ts<|end_file_name|><|fim▁begin|>export interface IPostComponentState {
/**
* Post text
*/
text: string
/**
* It's true if whole the text post is visible
*/
readMoreState: boolean
/**
* Handle open comment from parent component
*/
openComments: boolean
/**
* If it's true, share dialog will be open
*/
shareOpen: boolean
/**
* If it's true comment will be disabled on post
*/
disableComments: boolean
/**
* If it's true share will be disabled on post
*/
disableSharing: boolean
/**
* Title of share post
*/
shareTitle: string
/**
* If it's true, post link will be visible in share post dialog
*/
openCopyLink: boolean
/**
* If it's true, post write will be open
*/
openPostWrite: boolean
/**
* Open the comment group
*/
openCommentGroup?: () => void
/**
* Post menu anchor element
*/
postMenuAnchorEl?: any
<|fim▁hole|> /**
* Whether post menu open
*/
isPostMenuOpen?: boolean
}<|fim▁end|>
| |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::cell::Cell;
use std::rt::io::{Reader, Writer, Listener};
use std::rt::io::net::tcp::*;
use std::rt::io::net::ip::*;
fn allocate_buffer(buffer_size: uint) -> ~[u8] {
let mut buffer: ~[u8] = std::vec::with_capacity(buffer_size);
unsafe { std::vec::raw::set_len(&mut buffer, buffer_size); }
return buffer<|fim▁hole|>
fn main() {
let listen_address = Ipv4(127, 0, 0, 1, 8000);
let mut listener = match TcpListener::bind(listen_address) {
Some(x) => x,
None => {
fail!("Unable to bind to " + listen_address.to_str());
}
};
loop {
let stream = Cell::new(listener.accept().unwrap());
do spawn {
let mut dest_buffer = allocate_buffer(512);
let mut stream = stream.take();
loop {
match stream.read(dest_buffer) {
Some(x) => stream.write(dest_buffer.slice_to(x)),
None => {
break
},
};
}
}
}
}<|fim▁end|>
|
}
|
<|file_name|>tile.rs<|end_file_name|><|fim▁begin|>use ndarray::{ArrayBase, DataMut, NdIndex, IntoDimension};
#[derive(Copy, Clone, Debug)]
#[cfg_attr(
feature = "serde",
derive(Serialize, Deserialize),
serde(crate = "serde_crate")
)]
pub struct Tile<D: ndarray::Dimension, I: NdIndex<D>> {
dim: D,
active: Option<(I, f64)>,
}
impl<D: ndarray::Dimension, I: NdIndex<D>> Tile<D, I> {
pub fn new<T: IntoDimension<Dim = D>>(dim: T, active: Option<(I, f64)>) -> Self {
Tile {
dim: dim.into_dimension(),
active,
}
}
}
impl<D: ndarray::Dimension, I: NdIndex<D> + Clone> crate::params::Buffer for Tile<D, I> {
type Dim = D;<|fim▁hole|> fn addto<E: DataMut<Elem = f64>>(&self, arr: &mut ArrayBase<E, Self::Dim>) {
if let Some((idx, activation)) = &self.active {
arr[idx.clone()] += activation;
}
}
fn scaled_addto<E: DataMut<Elem = f64>>(&self, alpha: f64, arr: &mut ArrayBase<E, Self::Dim>) {
if let Some((idx, activation)) = &self.active {
arr[idx.clone()] += alpha * activation;
}
}
}
impl<D, I> crate::params::BufferMut for Tile<D, I>
where
D: ndarray::Dimension,
I: NdIndex<D> + PartialEq + Clone,
{
fn zeros<T: IntoDimension<Dim = D>>(dim: T) -> Self { Tile::new(dim, None) }
fn map(&self, f: impl Fn(f64) -> f64) -> Self {
self.clone().map_into(f)
}
fn map_into(self, f: impl Fn(f64) -> f64) -> Self {
Tile {
dim: self.dim,
active: self.active.map(|(idx, a)| (idx, f(a))),
}
}
fn map_inplace(&mut self, f: impl Fn(f64) -> f64) {
if let Some((_, x)) = &mut self.active {
*x = f(*x);
}
}
fn merge(&self, other: &Self, f: impl Fn(f64, f64) -> f64) -> Self {
self.clone().merge_into(other, f)
}
fn merge_into(mut self, other: &Self, f: impl Fn(f64, f64) -> f64) -> Self {
self.merge_inplace(other, f);
self
}
fn merge_inplace(&mut self, other: &Self, f: impl Fn(f64, f64) -> f64) {
if self.dim != other.dim {
panic!("Incompatible buffers shapes.")
}
match (&mut self.active, &other.active) {
(Some((i, x)), Some((j, y))) if i == j => *x = f(*x, *y),
_ => panic!("Incompatible buffer indices."),
}
}
}<|fim▁end|>
|
fn raw_dim(&self) -> D { self.dim.clone() }
|
<|file_name|>random_search.py<|end_file_name|><|fim▁begin|>"""
Class for parallelizing RandomizedSearchCV jobs in scikit-learn
"""
from sklearn.model_selection import ParameterSampler
from spark_sklearn.base_search import SparkBaseSearchCV
class RandomizedSearchCV(SparkBaseSearchCV):
"""Randomized search on hyper parameters.
RandomizedSearchCV implements a "fit" and a "score" method.
It also implements "predict", "predict_proba", "decision_function",
"transform" and "inverse_transform" if they are implemented in the
estimator used.
The parameters of the estimator used to apply these methods are optimized
by cross-validated search over parameter settings.
In contrast to GridSearchCV, not all combinations of parameter values are tried
out, but rather a fixed number of parameter settings is sampled from the specified
distributions. The number of parameter settings that are tried is
given by n_iter.
If all parameters are presented as a list,
sampling without replacement is performed. If at least one parameter
is given as a distribution, sampling with replacement is used for all parameters.
It is highly recommended to use continuous distributions for continuous
parameters.
Parameters
----------
estimator : estimator object.
A object of that type is instantiated for each grid point.
This is assumed to implement the scikit-learn estimator interface.
Either estimator needs to provide a ``score`` function,
or ``scoring`` must be passed.
param_distributions : dict
Dictionary with parameters names (string) as keys and distributions
or lists of parameters to try. Distributions must provide a ``rvs``
method for sampling (such as those from scipy.stats.distributions).
If a list is given, it is sampled uniformly.
n_iter : int, default=10
Number of parameter settings that are sampled. n_iter trades
off runtime vs quality of the solution.
scoring : string, callable or None, default=None
A string (see model evaluation documentation) or
a scorer callable object / function with signature
``scorer(estimator, X, y)``.
If ``None``, the ``score`` method of the estimator is used.
fit_params : dict, optional
Parameters to pass to the fit method.
n_jobs : int, default=1
Number of jobs to run in parallel.
pre_dispatch : int, or string, optional
Not used; exists for scikit-learn compatibility.
iid : boolean, default=True
If True, the data is assumed to be identically distributed across
the folds, and the loss minimized is the total loss per sample,
and not the mean loss across the folds.
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if the estimator is a classifier and ``y`` is
either binary or multiclass,
:class:`sklearn.model_selection.StratifiedKFold` is used. In all
other cases, :class:`sklearn.model_selection.KFold` is used.
refit : boolean, default=True
Refit the best estimator with the entire dataset.
If "False", it is impossible to make predictions using
this RandomizedSearchCV instance after fitting.
verbose : integer
Controls the verbosity: the higher, the more messages.
random_state : int, RandomState instance or None, optional, default=None
Pseudo random number generator state used for random uniform sampling
from lists of possible values instead of scipy.stats distributions.
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
error_score : 'raise' (default) or numeric
Value to assign to the score if an error occurs in estimator fitting.
If set to 'raise', the error is raised. If a numeric value is given,
FitFailedWarning is raised. This parameter does not affect the refit
step, which will always raise the error.
Attributes
----------
cv_results_ : dict of numpy (masked) ndarrays
A dict with keys as column headers and values as columns, that can be
imported into a pandas ``DataFrame``.
For instance the below given table
+------------+-----------+------------+-----------------+---+---------+
|param_kernel|param_gamma|param_degree|split0_test_score|...|rank_....|
+============+===========+============+=================+===+=========+
| 'poly' | -- | 2 | 0.8 |...| 2 |
+------------+-----------+------------+-----------------+---+---------+
| 'poly' | -- | 3 | 0.7 |...| 4 |
+------------+-----------+------------+-----------------+---+---------+
| 'rbf' | 0.1 | -- | 0.8 |...| 3 |
+------------+-----------+------------+-----------------+---+---------+
| 'rbf' | 0.2 | -- | 0.9 |...| 1 |
+------------+-----------+------------+-----------------+---+---------+
will be represented by a ``cv_results_`` dict of::
{
'param_kernel': masked_array(data = ['poly', 'poly', 'rbf', 'rbf'],
mask = [False False False False]...)<|fim▁hole|> 'param_degree': masked_array(data = [2.0 3.0 -- --],
mask = [False False True True]...),
'split0_test_score' : [0.8, 0.7, 0.8, 0.9],
'split1_test_score' : [0.82, 0.5, 0.7, 0.78],
'mean_test_score' : [0.81, 0.60, 0.75, 0.82],
'std_test_score' : [0.02, 0.01, 0.03, 0.03],
'rank_test_score' : [2, 4, 3, 1],
'split0_train_score' : [0.8, 0.9, 0.7],
'split1_train_score' : [0.82, 0.5, 0.7],
'mean_train_score' : [0.81, 0.7, 0.7],
'std_train_score' : [0.03, 0.03, 0.04],
'mean_fit_time' : [0.73, 0.63, 0.43, 0.49],
'std_fit_time' : [0.01, 0.02, 0.01, 0.01],
'mean_score_time' : [0.007, 0.06, 0.04, 0.04],
'std_score_time' : [0.001, 0.002, 0.003, 0.005],
'params' : [{'kernel': 'poly', 'degree': 2}, ...],
}
NOTE that the key ``'params'`` is used to store a list of parameter
settings dict for all the parameter candidates.
The ``mean_fit_time``, ``std_fit_time``, ``mean_score_time`` and
``std_score_time`` are all in seconds.
best_estimator_ : estimator
Estimator that was chosen by the search, i.e. estimator
which gave highest score (or smallest loss if specified)
on the left out data. Not available if refit=False.
best_score_ : float
Score of best_estimator on the left out data.
best_params_ : dict
Parameter setting that gave the best results on the hold out data.
best_index_ : int
The index (of the ``cv_results_`` arrays) which corresponds to the best
candidate parameter setting.
The dict at ``search.cv_results_['params'][search.best_index_]`` gives
the parameter setting for the best model, that gives the highest
mean score (``search.best_score_``).
Notes
-----
The parameters selected are those that maximize the score of the held-out
data, according to the scoring parameter.
See Also
--------
:class:`GridSearchCV`:
Does exhaustive search over a grid of parameters.
:class:`ParameterSampler`:
A generator over parameter settings, constructed from
param_distributions.
"""
def __init__(self, sc, estimator, param_distributions, n_iter=10, scoring=None,
fit_params=None, n_jobs=1, iid=True, refit=True, cv=None,
verbose=0, pre_dispatch='2*n_jobs', random_state=None,
error_score='raise'):
self.param_distributions = param_distributions
self.n_iter = n_iter
self.random_state = random_state
super(RandomizedSearchCV, self).__init__(
estimator=estimator, scoring=scoring, fit_params=fit_params,
n_jobs=n_jobs, iid=iid, refit=refit, cv=cv, verbose=verbose,
pre_dispatch=pre_dispatch, error_score=error_score)
self.fit_params = fit_params if fit_params is not None else {}
self.sc = sc
self.cv_results_ = None
def fit(self, X, y=None, groups=None):
"""Run fit on the estimator with randomly drawn parameters.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array-like, shape = [n_samples] or [n_samples, n_output], optional
Target relative to X for classification or regression;
None for unsupervised learning.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set.
"""
sampled_params = ParameterSampler(self.param_distributions,
self.n_iter,
random_state=self.random_state)
return self._fit(X, y, groups, sampled_params)<|fim▁end|>
|
'param_gamma': masked_array(data = [-- -- 0.1 0.2],
mask = [ True True False False]...),
|
<|file_name|>test_h7PolarRead.py<|end_file_name|><|fim▁begin|>import time
import bluetooth
from h7PolarDataPoints import h7PolarDataPoint
from h7PolarDataPointReader import h7PolarDataPointReader
if __name__ == '__main__':
h7PolarDataPointReader = h7PolarDataPointReader()
h7PolarDataPointReader.start()<|fim▁hole|><|fim▁end|>
|
while(True):
dataPoint = h7PolarDataPointReader.readNextDataPoint()
print (dataPoint)
|
<|file_name|>augmentation_transforms.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2019 The Google UDA Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transforms used in the Augmentation Policies.
Copied from AutoAugment: https://github.com/tensorflow/models/blob/master/research/autoaugment/
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import numpy as np
# pylint:disable=g-multiple-import
from PIL import ImageOps, ImageEnhance, ImageFilter, Image
# pylint:enable=g-multiple-import
import tensorflow as tf
FLAGS = tf.flags.FLAGS
IMAGE_SIZE = 32
# What is the dataset mean and std of the images on the training set
PARAMETER_MAX = 10 # What is the max 'level' a transform could be predicted
def get_mean_and_std():
if FLAGS.task_name == "cifar10":
means = [0.49139968, 0.48215841, 0.44653091]
stds = [0.24703223, 0.24348513, 0.26158784]
elif FLAGS.task_name == "svhn":
means = [0.4376821, 0.4437697, 0.47280442]
stds = [0.19803012, 0.20101562, 0.19703614]
else:
assert False
return means, stds
def _width_height_from_img_shape(img_shape):
"""`img_shape` in autoaugment is (height, width)."""
return (img_shape[1], img_shape[0])
def random_flip(x):
"""Flip the input x horizontally with 50% probability."""
if np.random.rand(1)[0] > 0.5:
return np.fliplr(x)
return x
def zero_pad_and_crop(img, amount=4):
"""Zero pad by `amount` zero pixels on each side then take a random crop.
Args:
img: numpy image that will be zero padded and cropped.
amount: amount of zeros to pad `img` with horizontally and verically.
Returns:
The cropped zero padded img. The returned numpy array will be of the same
shape as `img`.
"""
padded_img = np.zeros((img.shape[0] + amount * 2, img.shape[1] + amount * 2,
img.shape[2]))
padded_img[amount:img.shape[0] + amount, amount:
img.shape[1] + amount, :] = img
top = np.random.randint(low=0, high=2 * amount)
left = np.random.randint(low=0, high=2 * amount)
new_img = padded_img[top:top + img.shape[0], left:left + img.shape[1], :]
return new_img
def create_cutout_mask(img_height, img_width, num_channels, size):
"""Creates a zero mask used for cutout of shape `img_height` x `img_width`.
Args:
img_height: Height of image cutout mask will be applied to.
img_width: Width of image cutout mask will be applied to.
num_channels: Number of channels in the image.
size: Size of the zeros mask.
Returns:
A mask of shape `img_height` x `img_width` with all ones except for a
square of zeros of shape `size` x `size`. This mask is meant to be
elementwise multiplied with the original image. Additionally returns
the `upper_coord` and `lower_coord` which specify where the cutout mask
will be applied.
"""
assert img_height == img_width
# Sample center where cutout mask will be applied
height_loc = np.random.randint(low=0, high=img_height)
width_loc = np.random.randint(low=0, high=img_width)
# Determine upper right and lower left corners of patch
upper_coord = (max(0, height_loc - size // 2), max(0, width_loc - size // 2))
lower_coord = (min(img_height, height_loc + size // 2),
min(img_width, width_loc + size // 2))
mask_height = lower_coord[0] - upper_coord[0]
mask_width = lower_coord[1] - upper_coord[1]
assert mask_height > 0
assert mask_width > 0
mask = np.ones((img_height, img_width, num_channels))
zeros = np.zeros((mask_height, mask_width, num_channels))
mask[upper_coord[0]:lower_coord[0], upper_coord[1]:lower_coord[1], :] = (
zeros)
return mask, upper_coord, lower_coord
def cutout_numpy(img, size=16):
"""Apply cutout with mask of shape `size` x `size` to `img`.
The cutout operation is from the paper https://arxiv.org/abs/1708.04552.
This operation applies a `size`x`size` mask of zeros to a random location
within `img`.
Args:
img: Numpy image that cutout will be applied to.
size: Height/width of the cutout mask that will be
Returns:
A numpy tensor that is the result of applying the cutout mask to `img`.
"""
img_height, img_width, num_channels = (img.shape[0], img.shape[1],
img.shape[2])
assert len(img.shape) == 3
mask, _, _ = create_cutout_mask(img_height, img_width, num_channels, size)
return img * mask
def float_parameter(level, maxval):
"""Helper function to scale `val` between 0 and maxval .
Args:
level: Level of the operation that will be between [0, `PARAMETER_MAX`].
maxval: Maximum value that the operation can have. This will be scaled
to level/PARAMETER_MAX.
Returns:
A float that results from scaling `maxval` according to `level`.
"""
return float(level) * maxval / PARAMETER_MAX
def int_parameter(level, maxval):
"""Helper function to scale `val` between 0 and maxval .
Args:
level: Level of the operation that will be between [0, `PARAMETER_MAX`].
maxval: Maximum value that the operation can have. This will be scaled
to level/PARAMETER_MAX.
Returns:
An int that results from scaling `maxval` according to `level`.
"""
return int(level * maxval / PARAMETER_MAX)
def pil_wrap(img, use_mean_std):
"""Convert the `img` numpy tensor to a PIL Image."""
if use_mean_std:
MEANS, STDS = get_mean_and_std()
else:
MEANS = [0, 0, 0]
STDS = [1, 1, 1]
img_ori = (img * STDS + MEANS) * 255
return Image.fromarray(
np.uint8((img * STDS + MEANS) * 255.0)).convert('RGBA')
def pil_unwrap(pil_img, use_mean_std, img_shape):
"""Converts the PIL img to a numpy array."""
if use_mean_std:
MEANS, STDS = get_mean_and_std()
else:
MEANS = [0, 0, 0]
STDS = [1, 1, 1]
pic_array = np.array(pil_img.getdata()).reshape((img_shape[0], img_shape[1], 4)) / 255.0
i1, i2 = np.where(pic_array[:, :, 3] == 0)
pic_array = (pic_array[:, :, :3] - MEANS) / STDS
pic_array[i1, i2] = [0, 0, 0]
return pic_array
def apply_policy(policy, img, use_mean_std=True):
"""Apply the `policy` to the numpy `img`.
Args:
policy: A list of tuples with the form (name, probability, level) where
`name` is the name of the augmentation operation to apply, `probability`
is the probability of applying the operation and `level` is what strength
the operation to apply.
img: Numpy image that will have `policy` applied to it.
Returns:
The result of applying `policy` to `img`.
"""
img_shape = img.shape
pil_img = pil_wrap(img, use_mean_std)
for xform in policy:
assert len(xform) == 3
name, probability, level = xform
xform_fn = NAME_TO_TRANSFORM[name].pil_transformer(
probability, level, img_shape)
pil_img = xform_fn(pil_img)
return pil_unwrap(pil_img, use_mean_std, img_shape)
class TransformFunction(object):
"""Wraps the Transform function for pretty printing options."""
def __init__(self, func, name):
self.f = func
self.name = name
def __repr__(self):
return '<' + self.name + '>'
def __call__(self, pil_img):
return self.f(pil_img)
class TransformT(object):
"""Each instance of this class represents a specific transform."""
def __init__(self, name, xform_fn):
self.name = name
self.xform = xform_fn
def pil_transformer(self, probability, level, img_shape):
def return_function(im):
if random.random() < probability:
im = self.xform(im, level, img_shape)
return im
name = self.name + '({:.1f},{})'.format(probability, level)
return TransformFunction(return_function, name)
################## Transform Functions ##################
identity = TransformT('identity', lambda pil_img, level, _: pil_img)
flip_lr = TransformT(
'FlipLR',
lambda pil_img, level, _: pil_img.transpose(Image.FLIP_LEFT_RIGHT))
flip_ud = TransformT(
'FlipUD',
lambda pil_img, level, _: pil_img.transpose(Image.FLIP_TOP_BOTTOM))
# pylint:disable=g-long-lambda
auto_contrast = TransformT(
'AutoContrast',
lambda pil_img, level, _: ImageOps.autocontrast(
pil_img.convert('RGB')).convert('RGBA'))
equalize = TransformT(
'Equalize',
lambda pil_img, level, _: ImageOps.equalize(
pil_img.convert('RGB')).convert('RGBA'))
invert = TransformT(
'Invert',
lambda pil_img, level, _: ImageOps.invert(
pil_img.convert('RGB')).convert('RGBA'))
# pylint:enable=g-long-lambda
blur = TransformT(
'Blur', lambda pil_img, level, _: pil_img.filter(ImageFilter.BLUR))
smooth = TransformT(
'Smooth',
lambda pil_img, level, _: pil_img.filter(ImageFilter.SMOOTH))
def _rotate_impl(pil_img, level, _):
"""Rotates `pil_img` from -30 to 30 degrees depending on `level`."""
degrees = int_parameter(level, 30)
if random.random() > 0.5:
degrees = -degrees
return pil_img.rotate(degrees)
rotate = TransformT('Rotate', _rotate_impl)
def _posterize_impl(pil_img, level, _):
"""Applies PIL Posterize to `pil_img`."""
level = int_parameter(level, 4)
return ImageOps.posterize(pil_img.convert('RGB'), 4 - level).convert('RGBA')
posterize = TransformT('Posterize', _posterize_impl)
def _shear_x_impl(pil_img, level, img_shape):
"""Applies PIL ShearX to `pil_img`.
The ShearX operation shears the image along the horizontal axis with `level`
magnitude.
Args:
pil_img: Image in PIL object.
level: Strength of the operation specified as an Integer from
[0, `PARAMETER_MAX`].
Returns:
A PIL Image that has had ShearX applied to it.
"""
level = float_parameter(level, 0.3)
if random.random() > 0.5:
level = -level
return pil_img.transform(
_width_height_from_img_shape(img_shape),
Image.AFFINE,
(1, level, 0, 0, 1, 0))
shear_x = TransformT('ShearX', _shear_x_impl)
def _shear_y_impl(pil_img, level, img_shape):
"""Applies PIL ShearY to `pil_img`.
The ShearY operation shears the image along the vertical axis with `level`
magnitude.
Args:
pil_img: Image in PIL object.
level: Strength of the operation specified as an Integer from
[0, `PARAMETER_MAX`].
Returns:
A PIL Image that has had ShearX applied to it.
"""
level = float_parameter(level, 0.3)
if random.random() > 0.5:
level = -level
return pil_img.transform(
_width_height_from_img_shape(img_shape),
Image.AFFINE,
(1, 0, 0, level, 1, 0))
shear_y = TransformT('ShearY', _shear_y_impl)
def _translate_x_impl(pil_img, level, img_shape):
"""Applies PIL TranslateX to `pil_img`.
Translate the image in the horizontal direction by `level`
number of pixels.
Args:
pil_img: Image in PIL object.
level: Strength of the operation specified as an Integer from
[0, `PARAMETER_MAX`].
Returns:
A PIL Image that has had TranslateX applied to it.
"""
level = int_parameter(level, 10)
if random.random() > 0.5:
level = -level
return pil_img.transform(
_width_height_from_img_shape(img_shape),
Image.AFFINE,
(1, 0, level, 0, 1, 0))
translate_x = TransformT('TranslateX', _translate_x_impl)
def _translate_y_impl(pil_img, level, img_shape):
"""Applies PIL TranslateY to `pil_img`.
Translate the image in the vertical direction by `level`
number of pixels.
Args:<|fim▁hole|>
Returns:
A PIL Image that has had TranslateY applied to it.
"""
level = int_parameter(level, 10)
if random.random() > 0.5:
level = -level
return pil_img.transform(
_width_height_from_img_shape(img_shape),
Image.AFFINE,
(1, 0, 0, 0, 1, level))
translate_y = TransformT('TranslateY', _translate_y_impl)
def _crop_impl(pil_img, level, img_shape, interpolation=Image.BILINEAR):
"""Applies a crop to `pil_img` with the size depending on the `level`."""
cropped = pil_img.crop((level, level, img_shape[0] - level, img_shape[1] - level))
resized = cropped.resize((img_shape[0], img_shape[1]), interpolation)
return resized
crop_bilinear = TransformT('CropBilinear', _crop_impl)
def _solarize_impl(pil_img, level, _):
"""Applies PIL Solarize to `pil_img`.
Translate the image in the vertical direction by `level`
number of pixels.
Args:
pil_img: Image in PIL object.
level: Strength of the operation specified as an Integer from
[0, `PARAMETER_MAX`].
Returns:
A PIL Image that has had Solarize applied to it.
"""
level = int_parameter(level, 256)
return ImageOps.solarize(pil_img.convert('RGB'), 256 - level).convert('RGBA')
solarize = TransformT('Solarize', _solarize_impl)
def _cutout_pil_impl(pil_img, level, img_shape):
"""Apply cutout to pil_img at the specified level."""
size = int_parameter(level, 20)
if size <= 0:
return pil_img
img_height, img_width, num_channels = (img_shape[0], img_shape[1], 3)
_, upper_coord, lower_coord = (
create_cutout_mask(img_height, img_width, num_channels, size))
pixels = pil_img.load() # create the pixel map
for i in range(upper_coord[0], lower_coord[0]): # for every col:
for j in range(upper_coord[1], lower_coord[1]): # For every row
pixels[i, j] = (125, 122, 113, 0) # set the colour accordingly
return pil_img
cutout = TransformT('Cutout', _cutout_pil_impl)
def _enhancer_impl(enhancer):
"""Sets level to be between 0.1 and 1.8 for ImageEnhance transforms of PIL."""
def impl(pil_img, level, _):
v = float_parameter(level, 1.8) + .1 # going to 0 just destroys it
return enhancer(pil_img).enhance(v)
return impl
color = TransformT('Color', _enhancer_impl(ImageEnhance.Color))
contrast = TransformT('Contrast', _enhancer_impl(ImageEnhance.Contrast))
brightness = TransformT('Brightness', _enhancer_impl(
ImageEnhance.Brightness))
sharpness = TransformT('Sharpness', _enhancer_impl(ImageEnhance.Sharpness))
ALL_TRANSFORMS = [
flip_lr,
flip_ud,
auto_contrast,
equalize,
invert,
rotate,
posterize,
crop_bilinear,
solarize,
color,
contrast,
brightness,
sharpness,
shear_x,
shear_y,
translate_x,
translate_y,
cutout,
blur,
smooth
]
NAME_TO_TRANSFORM = {t.name: t for t in ALL_TRANSFORMS}
TRANSFORM_NAMES = NAME_TO_TRANSFORM.keys()<|fim▁end|>
|
pil_img: Image in PIL object.
level: Strength of the operation specified as an Integer from
[0, `PARAMETER_MAX`].
|
<|file_name|>test_unit_store.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
import zipfile
import config
import store
import common
from db import db_session, Source
import crypto_util
# Set environment variable so config.py uses a test environment
os.environ['SECUREDROP_ENV'] = 'test'
class TestStore(unittest.TestCase):
"""The set of tests for store.py."""
def setUp(self):
common.shared_setup()
def tearDown(self):
common.shared_teardown()
def test_verify(self):
with self.assertRaises(store.PathException):
store.verify(os.path.join(config.STORE_DIR, '..', 'etc', 'passwd'))<|fim▁hole|>
def test_get_zip(self):
sid = 'EQZGCJBRGISGOTC2NZVWG6LILJBHEV3CINNEWSCLLFTUWZJPKJFECLS2NZ4G4U3QOZCFKTTPNZMVIWDCJBBHMUDBGFHXCQ3R'
source = Source(sid, crypto_util.display_id())
db_session.add(source)
db_session.commit()
files = ['1-abc1-msg.gpg', '2-abc2-msg.gpg']
filenames = common.setup_test_docs(sid, files)
archive = zipfile.ZipFile(store.get_bulk_archive(filenames))
archivefile_contents = archive.namelist()
for archived_file, actual_file in zip(archivefile_contents, filenames):
actual_file_content = open(actual_file).read()
zipped_file_content = archive.read(archived_file)
self.assertEquals(zipped_file_content, actual_file_content)
if __name__ == "__main__":
unittest.main(verbosity=2)<|fim▁end|>
|
with self.assertRaises(store.PathException):
store.verify(config.STORE_DIR + "_backup")
|
<|file_name|>ListAction.java<|end_file_name|><|fim▁begin|>/*
* SonarQube
* Copyright (C) 2009-2017 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.server.language.ws;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.Resources;
import org.sonar.api.resources.Language;
import org.sonar.api.resources.Languages;
import org.sonar.api.server.ws.Request;
import org.sonar.api.server.ws.RequestHandler;
import org.sonar.api.server.ws.Response;
import org.sonar.api.server.ws.WebService;
import org.sonar.api.server.ws.WebService.NewAction;
import org.sonar.api.server.ws.WebService.Param;
import org.sonar.api.utils.text.JsonWriter;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.List;
import java.util.SortedMap;
import java.util.regex.Pattern;
/**
* @since 5.1
*/
public class ListAction implements RequestHandler {<|fim▁hole|>
public ListAction(Languages languages) {
this.languages = languages;
}
@Override
public void handle(Request request, Response response) throws Exception {
String query = request.param(Param.TEXT_QUERY);
int pageSize = request.mandatoryParamAsInt("ps");
JsonWriter json = response.newJsonWriter().beginObject().name("languages").beginArray();
for (Language language : listMatchingLanguages(query, pageSize)) {
json.beginObject().prop("key", language.getKey()).prop("name", language.getName()).endObject();
}
json.endArray().endObject().close();
}
void define(WebService.NewController controller) {
NewAction action = controller.createAction("list")
.setDescription("List supported programming languages")
.setSince("5.1")
.setHandler(this)
.setResponseExample(Resources.getResource(getClass(), "example-list.json"));
action.createParam(Param.TEXT_QUERY)
.setDescription("A pattern to match language keys/names against")
.setExampleValue("java");
action.createParam("ps")
.setDescription("The size of the list to return, 0 for all languages")
.setExampleValue("25")
.setDefaultValue("0");
}
private Collection<Language> listMatchingLanguages(@Nullable String query, int pageSize) {
Pattern pattern = Pattern.compile(query == null ? MATCH_ALL : MATCH_ALL + Pattern.quote(query) + MATCH_ALL, Pattern.CASE_INSENSITIVE);
SortedMap<String, Language> languagesByName = Maps.newTreeMap();
for (Language lang : languages.all()) {
if (pattern.matcher(lang.getKey()).matches() || pattern.matcher(lang.getName()).matches()) {
languagesByName.put(lang.getName(), lang);
}
}
List<Language> result = Lists.newArrayList(languagesByName.values());
if (pageSize > 0 && pageSize < result.size()) {
result = result.subList(0, pageSize);
}
return result;
}
}<|fim▁end|>
|
private static final String MATCH_ALL = ".*";
private final Languages languages;
|
<|file_name|>PreviewResponseParserTest.java<|end_file_name|><|fim▁begin|>package com.box.boxjavalibv2.responseparsers;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import junit.framework.Assert;
import org.apache.commons.io.IOUtils;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.entity.StringEntity;
import org.apache.http.message.BasicHeader;
import org.apache.http.message.BasicHttpResponse;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
import com.box.boxjavalibv2.dao.BoxPreview;
import com.box.restclientv2.exceptions.BoxRestException;
import com.box.restclientv2.responses.DefaultBoxResponse;
public class PreviewResponseParserTest {
private final static String PREVIEW_MOCK_CONTENT = "arbitrary string";
private final static String LINK_VALUE = "<https://api.box.com/2.0/files/5000369410/preview.png?page=%d>; rel=\"first\", <https://api.box.com/2.0/files/5000369410/preview.png?page=%d>; rel=\"last\"";
private final static String LINK_NAME = "Link";
private final static int firstPage = 1;
private final static int lastPage = 2;
private final static double length = 213;
private BoxPreview preview;
private DefaultBoxResponse boxResponse;
private HttpResponse response;
private HttpEntity entity;
private InputStream inputStream;
private Header header;
@Before
public void setUp() {
preview = new BoxPreview();
preview.setFirstPage(firstPage);
preview.setLastPage(lastPage);
boxResponse = EasyMock.createMock(DefaultBoxResponse.class);
response = EasyMock.createMock(BasicHttpResponse.class);
entity = EasyMock.createMock(StringEntity.class);
header = new BasicHeader("Link", String.format(LINK_VALUE, firstPage, lastPage));
}
@Test
public void testCanParsePreview() throws IllegalStateException, IOException, BoxRestException {
EasyMock.reset(boxResponse, response, entity);
inputStream = new ByteArrayInputStream(PREVIEW_MOCK_CONTENT.getBytes());
EasyMock.expect(boxResponse.getHttpResponse()).andReturn(response);
EasyMock.expect(boxResponse.getContentLength()).andReturn(length);
EasyMock.expect(response.getEntity()).andReturn(entity);
EasyMock.expect(entity.getContent()).andReturn(inputStream);
EasyMock.expect(boxResponse.getHttpResponse()).andReturn(response);
EasyMock.expect(response.getFirstHeader("Link")).andReturn(header);
EasyMock.replay(boxResponse, response, entity);
PreviewResponseParser parser = new PreviewResponseParser();
Object object = parser.parse(boxResponse);
Assert.assertEquals(BoxPreview.class, object.getClass());
<|fim▁hole|> Assert.assertEquals(length, parsed.getContentLength());
Assert.assertEquals(firstPage, parsed.getFirstPage().intValue());
Assert.assertEquals(lastPage, parsed.getLastPage().intValue());
Assert.assertEquals(PREVIEW_MOCK_CONTENT, IOUtils.toString(parsed.getContent()));
EasyMock.verify(boxResponse, response, entity);
}
}<|fim▁end|>
|
BoxPreview parsed = (BoxPreview) object;
|
<|file_name|>JohnGLRenderer.java<|end_file_name|><|fim▁begin|>package com.johnsoft.library.swing.component.gl;
import javax.media.opengl.GL;
import javax.media.opengl.GL2;
import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.GLEventListener;
import javax.media.opengl.fixedfunc.GLMatrixFunc;<|fim▁hole|>public class JohnGLRenderer implements GLEventListener
{
private GLU glu = new GLU();
private JohnGLPane pane;
public final void setGLPane(JohnGLPane pane)
{
this.pane = pane;
}
protected JohnGLPane getGLPane()
{
return pane;
}
protected GLU getGLU()
{
return glu;
}
protected void defaultReshape(GLAutoDrawable drawable, int w, int h, float fovy, float zNear, float zFar)
{
GL2 gl = drawable.getGL().getGL2();
gl.glViewport(0, 0, w, h);
gl.glMatrixMode(GLMatrixFunc.GL_PROJECTION);
gl.glLoadIdentity();
glu.gluPerspective(fovy, (float)w/h, zNear, zFar);
}
protected GL2 optionalDisposeMethodInitialize(GLAutoDrawable drawable)
{
GL2 gl = drawable.getGL().getGL2();
gl.glClear(GL.GL_COLOR_BUFFER_BIT);
gl.glMatrixMode(GLMatrixFunc.GL_MODELVIEW);
gl.glLoadIdentity();
return gl;
}
@Override
public void display(GLAutoDrawable drawable)
{
}
@Override
public void dispose(GLAutoDrawable drawable)
{
}
@Override
public void init(GLAutoDrawable drawable)
{
}
@Override
public void reshape(GLAutoDrawable drawable, int x, int y, int w, int h)
{
}
}<|fim▁end|>
|
import javax.media.opengl.glu.GLU;
|
<|file_name|>render_from_log.cc<|end_file_name|><|fim▁begin|>#include <sys/time.h>
#include <pangomm/init.h>
#include "pbd/compose.h"
#include "pbd/xml++.h"
#include "canvas/group.h"
#include "canvas/canvas.h"
#include "canvas/root_group.h"
#include "canvas/rectangle.h"
#include "benchmark.h"
using namespace std;
using namespace Canvas;
class RenderFromLog : public Benchmark
{
public:<|fim▁hole|> {
canvas.set_log_renders (false);
list<Rect> const & renders = canvas.renders ();
for (list<Rect>::const_iterator i = renders.begin(); i != renders.end(); ++i) {
canvas.render_to_image (*i);
}
}
};
int main (int argc, char* argv[])
{
if (argc < 2) {
cerr << "Syntax: render_parts <session>\n";
exit (EXIT_FAILURE);
}
Pango::init ();
RenderFromLog render_from_log (argv[1]);
cout << render_from_log.run () << "\n";
return 0;
}<|fim▁end|>
|
RenderFromLog (string const & session) : Benchmark (session) {}
void do_run (ImageCanvas& canvas)
|
<|file_name|>pyconsole.py<|end_file_name|><|fim▁begin|>#
# pyconsole.py
#
# Copyright (C) 2004-2006 by Yevgen Muntyan <[email protected]>
# Portions of code by Geoffrey French.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public version 2.1 as
# published by the Free Software Foundation.
#
# See COPYING.lib file that comes with this distribution for full text
# of the license.
#
# This module 'runs' python interpreter in a TextView widget.
# The main class is Console, usage is:
# Console(locals=None, banner=None, completer=None, use_rlcompleter=True, start_script='') -
# it creates the widget and 'starts' interactive session; see the end
# of this file. If start_script is not empty, it pastes it as it was
# entered from keyboard.
#
# Console has "command" signal which is emitted when code is about to
# be executed. You may connect to it using console.connect or
# console.connect_after to get your callback ran before or after the
# code is executed.
#
# To modify output appearance, set attributes of console.stdout_tag and
# console.stderr_tag.
#
# Console may subclass a type other than gtk.TextView, to allow syntax
# highlighting and stuff,
# e.g.:
# console_type = pyconsole.ConsoleType(moo.edit.TextView)
# console = console_type(use_rlcompleter=False, start_script="import moo\nimport gtk\n")
#
# This widget is not a replacement for real terminal with python running
# inside: GtkTextView is not a terminal.
# The use case is: you have a python program, you create this widget,
# and inspect your program interiors.
import gtk
import gtk.gdk as gdk
import gobject
import pango
import gtk.keysyms as _keys
import code
import sys
import keyword
import re
# commonprefix() from posixpath
def _commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
prefix = m[0]
for item in m:
for i in range(len(prefix)):
if prefix[:i+1] != item[:i+1]:
prefix = prefix[:i]
if i == 0:<|fim▁hole|>
class _ReadLine(object):
class Output(object):
def __init__(self, console, tag_name):
object.__init__(self)
self.buffer = console.get_buffer()
self.tag_name = tag_name
def write(self, text):
pos = self.buffer.get_iter_at_mark(self.buffer.get_insert())
self.buffer.insert_with_tags_by_name(pos, text, self.tag_name)
class History(object):
def __init__(self):
object.__init__(self)
self.items = ['']
self.ptr = 0
self.edited = {}
def commit(self, text):
if text and self.items[-1] != text:
self.items.append(text)
self.ptr = 0
self.edited = {}
def get(self, dir, text):
if len(self.items) == 1:
return None
if text != self.items[self.ptr]:
self.edited[self.ptr] = text
elif self.edited.has_key(self.ptr):
del self.edited[self.ptr]
self.ptr = self.ptr + dir
if self.ptr >= len(self.items):
self.ptr = 0
elif self.ptr < 0:
self.ptr = len(self.items) - 1
try:
return self.edited[self.ptr]
except KeyError:
return self.items[self.ptr]
def __init__(self, quit_func=None):
object.__init__(self)
self.quit_func = quit_func
self.set_wrap_mode(gtk.WRAP_CHAR)
self.modify_font(pango.FontDescription("Monospace"))
self.buffer = self.get_buffer()
self.buffer.connect("insert-text", self.on_buf_insert)
self.buffer.connect("delete-range", self.on_buf_delete)
self.buffer.connect("mark-set", self.on_buf_mark_set)
self.do_insert = False
self.do_delete = False
self.stdout_tag = self.buffer.create_tag("stdout", foreground="#006000")
self.stderr_tag = self.buffer.create_tag("stderr", foreground="#B00000")
self._stdout = _ReadLine.Output(self, "stdout")
self._stderr = _ReadLine.Output(self, "stderr")
self.cursor = self.buffer.create_mark("cursor",
self.buffer.get_start_iter(),
False)
insert = self.buffer.get_insert()
self.cursor.set_visible(True)
insert.set_visible(False)
self.ps = ''
self.in_raw_input = False
self.run_on_raw_input = None
self.tab_pressed = 0
self.history = _ReadLine.History()
self.nonword_re = re.compile("[^\w\._]")
def freeze_undo(self):
try: self.begin_not_undoable_action()
except: pass
def thaw_undo(self):
try: self.end_not_undoable_action()
except: pass
def raw_input(self, ps=None):
if ps:
self.ps = ps
else:
self.ps = ''
iter = self.buffer.get_iter_at_mark(self.buffer.get_insert())
if ps:
self.freeze_undo()
self.buffer.insert(iter, self.ps)
self.thaw_undo()
self.__move_cursor_to(iter)
self.scroll_to_mark(self.cursor, 0.2)
self.in_raw_input = True
if self.run_on_raw_input:
run_now = self.run_on_raw_input
self.run_on_raw_input = None
self.buffer.insert_at_cursor(run_now + '\n')
def on_buf_mark_set(self, buffer, iter, mark):
if mark is not buffer.get_insert():
return
start = self.__get_start()
end = self.__get_end()
if iter.compare(self.__get_start()) >= 0 and \
iter.compare(self.__get_end()) <= 0:
buffer.move_mark_by_name("cursor", iter)
self.scroll_to_mark(self.cursor, 0.2)
def __insert(self, iter, text):
self.do_insert = True
self.buffer.insert(iter, text)
self.do_insert = False
def on_buf_insert(self, buf, iter, text, len):
if not self.in_raw_input or self.do_insert or not len:
return
buf.stop_emission("insert-text")
lines = text.splitlines()
need_eol = False
for l in lines:
if need_eol:
self._commit()
iter = self.__get_cursor()
else:
cursor = self.__get_cursor()
if iter.compare(self.__get_start()) < 0:
iter = cursor
elif iter.compare(self.__get_end()) > 0:
iter = cursor
else:
self.__move_cursor_to(iter)
need_eol = True
self.__insert(iter, l)
self.__move_cursor(0)
def __delete(self, start, end):
self.do_delete = True
self.buffer.delete(start, end)
self.do_delete = False
def on_buf_delete(self, buf, start, end):
if not self.in_raw_input or self.do_delete:
return
buf.stop_emission("delete-range")
start.order(end)
line_start = self.__get_start()
line_end = self.__get_end()
if start.compare(line_end) > 0:
return
if end.compare(line_start) < 0:
return
self.__move_cursor(0)
if start.compare(line_start) < 0:
start = line_start
if end.compare(line_end) > 0:
end = line_end
self.__delete(start, end)
def do_key_press_event(self, event, parent_type):
if not self.in_raw_input:
return parent_type.do_key_press_event(self, event)
tab_pressed = self.tab_pressed
self.tab_pressed = 0
handled = True
state = event.state & (gdk.SHIFT_MASK |
gdk.CONTROL_MASK |
gdk.MOD1_MASK)
keyval = event.keyval
if not state:
if keyval == _keys.Return:
self._commit()
elif keyval == _keys.Up:
self.__history(-1)
elif keyval == _keys.Down:
self.__history(1)
elif keyval == _keys.Left:
self.__move_cursor(-1)
elif keyval == _keys.Right:
self.__move_cursor(1)
elif keyval == _keys.Home:
self.__move_cursor(-10000)
elif keyval == _keys.End:
self.__move_cursor(10000)
elif keyval == _keys.Tab:
cursor = self.__get_cursor()
if cursor.starts_line():
handled = False
else:
cursor.backward_char()
if cursor.get_char().isspace():
handled = False
else:
self.tab_pressed = tab_pressed + 1
self.__complete()
else:
handled = False
elif state == gdk.CONTROL_MASK:
if keyval == _keys.u:
start = self.__get_start()
end = self.__get_cursor()
self.__delete(start, end)
elif keyval == _keys.d:
if self.quit_func:
self.quit_func()
else:
handled = False
else:
handled = False
if not handled:
return parent_type.do_key_press_event(self, event)
else:
return True
def __history(self, dir):
text = self._get_line()
new_text = self.history.get(dir, text)
if not new_text is None:
self.__replace_line(new_text)
self.__move_cursor(0)
self.scroll_to_mark(self.cursor, 0.2)
def __get_cursor(self):
return self.buffer.get_iter_at_mark(self.cursor)
def __get_start(self):
iter = self.__get_cursor()
iter.set_line_offset(len(self.ps))
return iter
def __get_end(self):
iter = self.__get_cursor()
if not iter.ends_line():
iter.forward_to_line_end()
return iter
def __get_text(self, start, end):
return self.buffer.get_text(start, end, False)
def __move_cursor_to(self, iter):
self.buffer.place_cursor(iter)
self.buffer.move_mark_by_name("cursor", iter)
def __move_cursor(self, howmany):
iter = self.__get_cursor()
end = self.__get_cursor()
if not end.ends_line():
end.forward_to_line_end()
line_len = end.get_line_offset()
move_to = iter.get_line_offset() + howmany
move_to = min(max(move_to, len(self.ps)), line_len)
iter.set_line_offset(move_to)
self.__move_cursor_to(iter)
def __delete_at_cursor(self, howmany):
iter = self.__get_cursor()
end = self.__get_cursor()
if not end.ends_line():
end.forward_to_line_end()
line_len = end.get_line_offset()
erase_to = iter.get_line_offset() + howmany
if erase_to > line_len:
erase_to = line_len
elif erase_to < len(self.ps):
erase_to = len(self.ps)
end.set_line_offset(erase_to)
self.__delete(iter, end)
def __get_width(self):
if not (self.flags() & gtk.REALIZED):
return 80
layout = pango.Layout(self.get_pango_context())
letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
layout.set_text(letters)
pix_width = layout.get_pixel_size()[0]
return self.allocation.width * len(letters) / pix_width
def __print_completions(self, completions):
line_start = self.__get_text(self.__get_start(), self.__get_cursor())
line_end = self.__get_text(self.__get_cursor(), self.__get_end())
iter = self.buffer.get_end_iter()
self.__move_cursor_to(iter)
self.__insert(iter, "\n")
width = max(self.__get_width(), 4)
max_width = max([len(s) for s in completions])
n_columns = max(int(width / (max_width + 1)), 1)
col_width = int(width / n_columns)
total = len(completions)
col_length = total / n_columns
if total % n_columns:
col_length = col_length + 1
col_length = max(col_length, 1)
if col_length == 1:
n_columns = total
col_width = width / total
for i in range(col_length):
for j in range(n_columns):
ind = i + j*col_length
if ind < total:
if j == n_columns - 1:
n_spaces = 0
else:
n_spaces = col_width - len(completions[ind])
self.__insert(iter, completions[ind] + " " * n_spaces)
self.__insert(iter, "\n")
self.__insert(iter, "%s%s%s" % (self.ps, line_start, line_end))
iter.set_line_offset(len(self.ps) + len(line_start))
self.__move_cursor_to(iter)
self.scroll_to_mark(self.cursor, 0.2)
def __complete(self):
text = self.__get_text(self.__get_start(), self.__get_cursor())
start = ''
word = text
nonwords = self.nonword_re.findall(text)
if nonwords:
last = text.rfind(nonwords[-1]) + len(nonwords[-1])
start = text[:last]
word = text[last:]
completions = self.complete(word)
if completions:
prefix = _commonprefix(completions)
if prefix != word:
start_iter = self.__get_start()
start_iter.forward_chars(len(start))
end_iter = start_iter.copy()
end_iter.forward_chars(len(word))
self.__delete(start_iter, end_iter)
self.__insert(end_iter, prefix)
elif self.tab_pressed > 1:
self.freeze_undo()
self.__print_completions(completions)
self.thaw_undo()
self.tab_pressed = 0
def complete(self, text):
return None
def _get_line(self):
start = self.__get_start()
end = self.__get_end()
return self.buffer.get_text(start, end, False)
def __replace_line(self, new_text):
start = self.__get_start()
end = self.__get_end()
self.__delete(start, end)
self.__insert(end, new_text)
def _commit(self):
end = self.__get_cursor()
if not end.ends_line():
end.forward_to_line_end()
text = self._get_line()
self.__move_cursor_to(end)
self.freeze_undo()
self.__insert(end, "\n")
self.in_raw_input = False
self.history.commit(text)
self.do_raw_input(text)
self.thaw_undo()
def do_raw_input(self, text):
pass
class _Console(_ReadLine, code.InteractiveInterpreter):
def __init__(self, locals=None, banner=None,
completer=None, use_rlcompleter=True,
start_script=None, quit_func=None):
_ReadLine.__init__(self, quit_func)
code.InteractiveInterpreter.__init__(self, locals)
self.locals["__console__"] = self
self.start_script = start_script
self.completer = completer
self.banner = banner
if not self.completer and use_rlcompleter:
try:
import rlcompleter
self.completer = rlcompleter.Completer()
except ImportError:
pass
self.ps1 = ">>> "
self.ps2 = "... "
self.__start()
self.run_on_raw_input = start_script
self.raw_input(self.ps1)
def __start(self):
self.cmd_buffer = ""
self.freeze_undo()
self.thaw_undo()
self.do_delete = True
self.buffer.set_text("")
self.do_delete = False
if self.banner:
iter = self.buffer.get_start_iter()
self.buffer.insert_with_tags_by_name(iter, self.banner, "stdout")
if not iter.starts_line():
self.buffer.insert(iter, "\n")
def clear(self, start_script=None):
if start_script is None:
start_script = self.start_script
else:
self.start_script = start_script
self.__start()
self.run_on_raw_input = start_script
def do_raw_input(self, text):
if self.cmd_buffer:
cmd = self.cmd_buffer + "\n" + text
else:
cmd = text
saved_stdout, saved_stderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = self._stdout, self._stderr
if self.runsource(cmd):
self.cmd_buffer = cmd
ps = self.ps2
else:
self.cmd_buffer = ''
ps = self.ps1
sys.stdout, sys.stderr = saved_stdout, saved_stderr
self.raw_input(ps)
def do_command(self, code):
try:
eval(code, self.locals)
except SystemExit:
raise
except:
self.showtraceback()
def runcode(self, code):
if gtk.pygtk_version[1] < 8:
self.do_command(code)
else:
self.emit("command", code)
def exec_command(self, command):
if self._get_line():
self._commit()
self.buffer.insert_at_cursor(command)
self._commit()
def complete_attr(self, start, end):
try:
obj = eval(start, self.locals)
strings = dir(obj)
if end:
completions = {}
for s in strings:
if s.startswith(end):
completions[s] = None
completions = completions.keys()
else:
completions = strings
completions.sort()
return [start + "." + s for s in completions]
except:
return None
def complete(self, text):
if self.completer:
completions = []
i = 0
try:
while 1:
s = self.completer.complete(text, i)
if s:
completions.append(s)
i = i + 1
else:
completions.sort()
return completions
except NameError:
return None
dot = text.rfind(".")
if dot >= 0:
return self.complete_attr(text[:dot], text[dot+1:])
completions = {}
strings = keyword.kwlist
if self.locals:
strings.extend(self.locals.keys())
try: strings.extend(eval("globals()", self.locals).keys())
except: pass
try:
exec "import __builtin__" in self.locals
strings.extend(eval("dir(__builtin__)", self.locals))
except:
pass
for s in strings:
if s.startswith(text):
completions[s] = None
completions = completions.keys()
completions.sort()
return completions
def ReadLineType(t=gtk.TextView):
class readline(t, _ReadLine):
def __init__(self, *args, **kwargs):
t.__init__(self)
_ReadLine.__init__(self, *args, **kwargs)
def do_key_press_event(self, event):
return _ReadLine.do_key_press_event(self, event, t)
gobject.type_register(readline)
return readline
def ConsoleType(t=gtk.TextView):
class console_type(t, _Console):
__gsignals__ = {
'command' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (object,)),
'key-press-event' : 'override'
}
def __init__(self, *args, **kwargs):
if gtk.pygtk_version[1] < 8:
gobject.GObject.__init__(self)
else:
t.__init__(self)
_Console.__init__(self, *args, **kwargs)
def do_command(self, code):
return _Console.do_command(self, code)
def do_key_press_event(self, event):
return _Console.do_key_press_event(self, event, t)
if gtk.pygtk_version[1] < 8:
gobject.type_register(console_type)
return console_type
ReadLine = ReadLineType()
Console = ConsoleType()
def _make_window():
window = gtk.Window()
window.set_title("pyconsole.py")
swin = gtk.ScrolledWindow()
swin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_ALWAYS)
window.add(swin)
console = Console(banner="Hello there!",
use_rlcompleter=False,
start_script="from gtk import *\n")
swin.add(console)
window.set_default_size(500, 400)
window.show_all()
if not gtk.main_level():
window.connect("destroy", gtk.main_quit)
gtk.main()
return console
if __name__ == '__main__':
if len(sys.argv) < 2 or sys.argv[1] != '-gimp':
_make_window()<|fim▁end|>
|
return ''
break
return prefix
|
<|file_name|>bootstrap-modal.js<|end_file_name|><|fim▁begin|>/* ===========================================================
* bootstrap-modal.js v2.1
* ===========================================================
* Copyright 2012 Jordan Schroter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
!function ($) {
"use strict"; // jshint ;_;
/* MODAL CLASS DEFINITION
* ====================== */
var Modal = function (element, options) {
this.init(element, options);
};
Modal.prototype = {
constructor: Modal,
init: function (element, options) {
this.options = options;
this.$element = $(element)
.delegate('[data-dismiss="modal"]', 'click.dismiss.modal', $.proxy(this.hide, this));
this.options.remote && this.$element.find('.modal-body').load(this.options.remote);
var manager = typeof this.options.manager === 'function' ?
this.options.manager.call(this) : this.options.manager;
manager = manager.appendModal ?
manager : $(manager).modalmanager().data('modalmanager');
manager.appendModal(this);
},
toggle: function () {
return this[!this.isShown ? 'show' : 'hide']();
},
show: function () {
var e = $.Event('show');
if (this.isShown) return;
this.$element.triggerHandler(e);
if (e.isDefaultPrevented()) return;
this.escape();
this.tab();
this.options.loading && this.loading();
},
hide: function (e) {
e && e.preventDefault();
e = $.Event('hide');
this.$element.triggerHandler(e);
if (!this.isShown || e.isDefaultPrevented()) return (this.isShown = false);
this.isShown = false;
this.escape();
this.tab();
this.isLoading && this.loading();
$(document).off('focusin.modal');
this.$element
.removeClass('in')<|fim▁hole|> .removeClass('modal-overflow')
.attr('aria-hidden', true);
$.support.transition && this.$element.hasClass('fade') ?
this.hideWithTransition() :
this.hideModal();
},
layout: function () {
var prop = this.options.height ? 'height' : 'max-height',
value = this.options.height || this.options.maxHeight;
if (this.options.width){
this.$element.css('width', this.options.width);
var that = this;
this.$element.css('margin-left', function () {
if (/%/ig.test(that.options.width)){
return -(parseInt(that.options.width) / 2) + '%';
} else {
return -($(this).width() / 2) + 'px';
}
});
} else {
this.$element.css('width', '');
this.$element.css('margin-left', '');
}
this.$element.find('.modal-body')
.css('overflow', '')
.css(prop, '');
var modalOverflow = $(window).height() - 10 < this.$element.height();
if (value){
this.$element.find('.modal-body')
.css('overflow', 'auto')
.css(prop, value);
}
if (modalOverflow || this.options.modalOverflow) {
this.$element
.css('margin-top', 0)
.addClass('modal-overflow');
} else {
this.$element
.css('margin-top', 0 - this.$element.height() / 2)
.removeClass('modal-overflow');
}
},
tab: function () {
var that = this;
if (this.isShown && this.options.consumeTab) {
this.$element.on('keydown.tabindex.modal', '[data-tabindex]', function (e) {
if (e.keyCode && e.keyCode == 9){
var $next = $(this),
$rollover = $(this);
that.$element.find('[data-tabindex]:enabled:not([readonly])').each(function (e) {
if (!e.shiftKey){
$next = $next.data('tabindex') < $(this).data('tabindex') ?
$next = $(this) :
$rollover = $(this);
} else {
$next = $next.data('tabindex') > $(this).data('tabindex') ?
$next = $(this) :
$rollover = $(this);
}
});
$next[0] !== $(this)[0] ?
$next.focus() : $rollover.focus();
e.preventDefault();
}
});
} else if (!this.isShown) {
this.$element.off('keydown.tabindex.modal');
}
},
escape: function () {
var that = this;
if (this.isShown && this.options.keyboard) {
if (!this.$element.attr('tabindex')) this.$element.attr('tabindex', -1);
this.$element.on('keyup.dismiss.modal', function (e) {
e.which == 27 && that.hide();
});
} else if (!this.isShown) {
this.$element.off('keyup.dismiss.modal')
}
},
hideWithTransition: function () {
var that = this
, timeout = setTimeout(function () {
that.$element.off($.support.transition.end);
that.hideModal();
}, 500);
this.$element.one($.support.transition.end, function () {
clearTimeout(timeout);
that.hideModal();
});
},
hideModal: function () {
this.$element
.hide()
.triggerHandler('hidden');
var prop = this.options.height ? 'height' : 'max-height';
var value = this.options.height || this.options.maxHeight;
if (value){
this.$element.find('.modal-body')
.css('overflow', '')
.css(prop, '');
}
},
removeLoading: function () {
this.$loading.remove();
this.$loading = null;
this.isLoading = false;
},
loading: function (callback) {
callback = callback || function () {};
var animate = this.$element.hasClass('fade') ? 'fade' : '';
if (!this.isLoading) {
var doAnimate = $.support.transition && animate;
this.$loading = $('<div class="loading-mask ' + animate + '">')
.append(this.options.spinner)
.appendTo(this.$element);
if (doAnimate) this.$loading[0].offsetWidth; // force reflow
this.$loading.addClass('in');
this.isLoading = true;
doAnimate ?
this.$loading.one($.support.transition.end, callback) :
callback();
} else if (this.isLoading && this.$loading) {
this.$loading.removeClass('in');
var that = this;
$.support.transition && this.$element.hasClass('fade')?
this.$loading.one($.support.transition.end, function () { that.removeLoading() }) :
that.removeLoading();
} else if (callback) {
callback(this.isLoading);
}
},
focus: function () {
var $focusElem = this.$element.find(this.options.focusOn);
$focusElem = $focusElem.length ? $focusElem : this.$element;
$focusElem.focus();
},
attention: function (){
// NOTE: transitionEnd with keyframes causes odd behaviour
if (this.options.attentionAnimation){
this.$element
.removeClass('animated')
.removeClass(this.options.attentionAnimation);
var that = this;
setTimeout(function () {
that.$element
.addClass('animated')
.addClass(that.options.attentionAnimation);
}, 0);
}
this.focus();
},
destroy: function () {
var e = $.Event('destroy');
this.$element.triggerHandler(e);
if (e.isDefaultPrevented()) return;
this.teardown();
},
teardown: function () {
if (!this.$parent.length){
this.$element.remove();
this.$element = null;
return;
}
if (this.$parent !== this.$element.parent()){
this.$element.appendTo(this.$parent);
}
this.$element.off('.modal');
this.$element.removeData('modal');
this.$element
.removeClass('in')
.attr('aria-hidden', true);
}
};
/* MODAL PLUGIN DEFINITION
* ======================= */
$.fn.modal = function (option, args) {
return this.each(function () {
var $this = $(this),
data = $this.data('modal'),
options = $.extend({}, $.fn.modal.defaults, $this.data(), typeof option == 'object' && option);
if (!data) $this.data('modal', (data = new Modal(this, options)));
if (typeof option == 'string') data[option].apply(data, [].concat(args));
else if (options.show) data.show()
})
};
$.fn.modal.defaults = {
keyboard: true,
backdrop: true,
loading: false,
show: true,
width: null,
height: null,
maxHeight: null,
modalOverflow: false,
consumeTab: true,
focusOn: null,
replace: false,
resize: false,
attentionAnimation: 'shake',
manager: 'body',
spinner: '<div class="loading-spinner" style="width: 200px; margin-left: -100px;"><div class="progress progress-striped active"><div class="bar" style="width: 100%;"></div></div></div>'
};
$.fn.modal.Constructor = Modal;
/* MODAL DATA-API
* ============== */
$(function () {
$(document).off('.modal').on('click.modal.data-api', '[data-toggle="modal"]', function (e) {
var $this = $(this),
href = $this.attr('href'),
$target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))), //strip for ie7
option = $target.data('modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data());
e.preventDefault();
$target
.modal(option)
.one('hide', function () {
$this.focus();
})
});
});
}(window.jQuery);<|fim▁end|>
|
.removeClass('animated')
.removeClass(this.options.attentionAnimation)
|
<|file_name|>command.isClean.spec.js<|end_file_name|><|fim▁begin|>'use strict';
var Promise = require('bluebird'),
git = require('../index');
describe('isClean command', function() {
var command = git.isClean;
it('should exists', function(){
expect(command).to.be.a('function');
});
it('should issue a git diff-index', function() {<|fim▁hole|> var call = mockSpawn.calls.pop();
expect(call.args).to.be.eql([
'diff-index',
'--quiet',
'HEAD',
'.'
]);
});
});
it('should return true when is clean', function() {
mockSpawn.sequence.add(mockSpawn.simple(0));
return command()
.tap(function(isClean) {
expect(isClean).to.be.true;
});
});
it('should return true when is dirty', function() {
mockSpawn.sequence.add(mockSpawn.simple(1));
return command()
.tap(function(isClean) {
expect(isClean).to.be.false;
});
});
it('should fail when git does', function() {
mockSpawn.sequence.add(mockSpawn.simple(128));
return command()
.then(Promise.reject)
.catch(Promise.resolve);
});
});<|fim▁end|>
|
return command()
.tap(function() {
|
<|file_name|>search.js<|end_file_name|><|fim▁begin|>Search = function(data, input, result) {
this.data = data;
this.$input = $(input);
this.$result = $(result);
this.$current = null;
this.$view = this.$result.parent();
this.searcher = new Searcher(data.index);
this.init();
};
Search.prototype = $.extend({}, Navigation, new function() {
var suid = 1;
this.init = function() {
var _this = this;
var observer = function(e) {
switch(e.originalEvent.keyCode) {
case 38: // Event.KEY_UP
case 40: // Event.KEY_DOWN
return;
}
_this.search(_this.$input[0].value);
};
this.$input.keyup(observer);
this.$input.click(observer); // mac's clear field
this.searcher.ready(function(results, isLast) {<|fim▁hole|> _this.addResults(results, isLast);
});
this.initNavigation();
this.setNavigationActive(false);
};
this.search = function(value, selectFirstMatch) {
value = jQuery.trim(value).toLowerCase();
if (value) {
this.setNavigationActive(true);
} else {
this.setNavigationActive(false);
}
if (value == '') {
this.lastQuery = value;
this.$result.empty();
this.$result.attr('aria-expanded', 'false');
this.setNavigationActive(false);
} else if (value != this.lastQuery) {
this.lastQuery = value;
this.$result.attr('aria-busy', 'true');
this.$result.attr('aria-expanded', 'true');
this.firstRun = true;
this.searcher.find(value);
}
};
this.addResults = function(results, isLast) {
var target = this.$result.get(0);
if (this.firstRun && (results.length > 0 || isLast)) {
this.$current = null;
this.$result.empty();
}
for (var i=0, l = results.length; i < l; i++) {
var item = this.renderItem.call(this, results[i]);
item.setAttribute('id', 'search-result-' + target.childElementCount);
target.appendChild(item);
}
if (this.firstRun && results.length > 0) {
this.firstRun = false;
this.$current = $(target.firstChild);
this.$current.addClass('search-selected');
}
if (jQuery.browser.msie) this.$element[0].className += '';
if (isLast) this.$result.attr('aria-busy', 'false');
};
this.move = function(isDown) {
if (!this.$current) return;
var $next = this.$current[isDown ? 'next' : 'prev']();
if ($next.length) {
this.$current.removeClass('search-selected');
$next.addClass('search-selected');
this.$input.attr('aria-activedescendant', $next.attr('id'));
this.scrollIntoView($next[0], this.$view[0]);
this.$current = $next;
this.$input.val($next[0].firstChild.firstChild.text);
this.$input.select();
}
return true;
};
this.hlt = function(html) {
return this.escapeHTML(html).
replace(/\u0001/g, '<em>').
replace(/\u0002/g, '</em>');
};
this.escapeHTML = function(html) {
return html.replace(/[&<>]/g, function(c) {
return '&#' + c.charCodeAt(0) + ';';
});
}
});<|fim▁end|>
| |
<|file_name|>resolve.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Resolution is the process of removing type variables and replacing
// them with their inferred values. Unfortunately our inference has
// become fairly complex and so there are a number of options to
// control *just how much* you want to resolve and how you want to do
// it.
//
// # Controlling the scope of resolution
//
// The options resolve_* determine what kinds of variables get
// resolved. Generally resolution starts with a top-level type
// variable; we will always resolve this. However, once we have
// resolved that variable, we may end up with a type that still
// contains type variables. For example, if we resolve `<T0>` we may
// end up with something like `[<T1>]`. If the option
// `resolve_nested_tvar` is passed, we will then go and recursively
// resolve `<T1>`.
//
// The options `resolve_rvar` controls whether we resolve region
// variables. The options `resolve_fvar` and `resolve_ivar` control
// whether we resolve floating point and integral variables,
// respectively.
//
// # What do if things are unconstrained
//
// Sometimes we will encounter a variable that has no constraints, and
// therefore cannot sensibly be mapped to any particular result. By
// default, we will leave such variables as is (so you will get back a
// variable in your result). The options force_* will cause the
// resolution to fail in this case intead, except for the case of
// integral variables, which resolve to `int` if forced.
//
// # resolve_all and force_all
//
// The options are a bit set, so you can use the *_all to resolve or
// force all kinds of variables (including those we may add in the
// future). If you want to resolve everything but one type, you are
// probably better off writing `resolve_all - resolve_ivar`.
use middle::ty::{FloatVar, FloatVid, IntVar, IntVid, RegionVid, TyVar, TyVid};
use middle::ty::{type_is_bot, IntType, UintType};
use middle::ty;
use middle::ty_fold;
use middle::typeck::infer::{Bounds, cyclic_ty, fixup_err, fres, InferCtxt};
use middle::typeck::infer::{region_var_bound_by_region_var, unresolved_ty};
use middle::typeck::infer::to_str::InferStr;
use middle::typeck::infer::unify::{Root, UnifyInferCtxtMethods};
use util::common::{indent, indenter};
use util::ppaux::ty_to_str;
use syntax::ast;
pub static resolve_nested_tvar: uint = 0b0000000001;
pub static resolve_rvar: uint = 0b0000000010;
pub static resolve_ivar: uint = 0b0000000100;
pub static resolve_fvar: uint = 0b0000001000;
pub static resolve_fnvar: uint = 0b0000010000;
pub static resolve_all: uint = 0b0000011111;
pub static force_tvar: uint = 0b0000100000;
pub static force_rvar: uint = 0b0001000000;
pub static force_ivar: uint = 0b0010000000;
pub static force_fvar: uint = 0b0100000000;
pub static force_fnvar: uint = 0b1000000000;
pub static force_all: uint = 0b1111100000;
pub static not_regions: uint = !(force_rvar | resolve_rvar);
pub static try_resolve_tvar_shallow: uint = 0;
pub static resolve_and_force_all_but_regions: uint =
(resolve_all | force_all) & not_regions;
pub struct ResolveState {
infcx: @InferCtxt,
modes: uint,
err: Option<fixup_err>,
v_seen: ~[TyVid],
type_depth: uint
}
pub fn resolver(infcx: @InferCtxt, modes: uint) -> ResolveState {
ResolveState {
infcx: infcx,<|fim▁hole|> v_seen: ~[],
type_depth: 0
}
}
impl ty_fold::TypeFolder for ResolveState {
fn tcx(&self) -> ty::ctxt {
self.infcx.tcx
}
fn fold_ty(&mut self, t: ty::t) -> ty::t {
self.resolve_type(t)
}
fn fold_region(&mut self, r: ty::Region) -> ty::Region {
self.resolve_region(r)
}
}
impl ResolveState {
pub fn should(&mut self, mode: uint) -> bool {
(self.modes & mode) == mode
}
pub fn resolve_type_chk(&mut self, typ: ty::t) -> fres<ty::t> {
self.err = None;
debug!("Resolving {} (modes={:x})",
ty_to_str(self.infcx.tcx, typ),
self.modes);
// n.b. This is a hokey mess because the current fold doesn't
// allow us to pass back errors in any useful way.
assert!(self.v_seen.is_empty());
let rty = indent(|| self.resolve_type(typ) );
assert!(self.v_seen.is_empty());
match self.err {
None => {
debug!("Resolved to {} + {} (modes={:x})",
ty_to_str(self.infcx.tcx, rty),
ty_to_str(self.infcx.tcx, rty),
self.modes);
return Ok(rty);
}
Some(e) => return Err(e)
}
}
pub fn resolve_region_chk(&mut self, orig: ty::Region)
-> fres<ty::Region> {
self.err = None;
let resolved = indent(|| self.resolve_region(orig) );
match self.err {
None => Ok(resolved),
Some(e) => Err(e)
}
}
pub fn resolve_type(&mut self, typ: ty::t) -> ty::t {
debug!("resolve_type({})", typ.inf_str(self.infcx));
let _i = indenter();
if !ty::type_needs_infer(typ) {
return typ;
}
if self.type_depth > 0 && !self.should(resolve_nested_tvar) {
return typ;
}
match ty::get(typ).sty {
ty::ty_infer(TyVar(vid)) => {
self.resolve_ty_var(vid)
}
ty::ty_infer(IntVar(vid)) => {
self.resolve_int_var(vid)
}
ty::ty_infer(FloatVar(vid)) => {
self.resolve_float_var(vid)
}
_ => {
if self.modes & resolve_all == 0 {
// if we are only resolving top-level type
// variables, and this is not a top-level type
// variable, then shortcircuit for efficiency
typ
} else {
self.type_depth += 1;
let result = ty_fold::super_fold_ty(self, typ);
self.type_depth -= 1;
result
}
}
}
}
pub fn resolve_region(&mut self, orig: ty::Region) -> ty::Region {
debug!("Resolve_region({})", orig.inf_str(self.infcx));
match orig {
ty::ReInfer(ty::ReVar(rid)) => self.resolve_region_var(rid),
_ => orig
}
}
pub fn resolve_region_var(&mut self, rid: RegionVid) -> ty::Region {
if !self.should(resolve_rvar) {
return ty::ReInfer(ty::ReVar(rid));
}
self.infcx.region_vars.resolve_var(rid)
}
pub fn assert_not_rvar(&mut self, rid: RegionVid, r: ty::Region) {
match r {
ty::ReInfer(ty::ReVar(rid2)) => {
self.err = Some(region_var_bound_by_region_var(rid, rid2));
}
_ => { }
}
}
pub fn resolve_ty_var(&mut self, vid: TyVid) -> ty::t {
if self.v_seen.contains(&vid) {
self.err = Some(cyclic_ty(vid));
return ty::mk_var(self.infcx.tcx, vid);
} else {
self.v_seen.push(vid);
let tcx = self.infcx.tcx;
// Nonobvious: prefer the most specific type
// (i.e., the lower bound) to the more general
// one. More general types in Rust (e.g., fn())
// tend to carry more restrictions or higher
// perf. penalties, so it pays to know more.
let nde = self.infcx.get(vid);
let bounds = nde.possible_types;
let t1 = match bounds {
Bounds { ub:_, lb:Some(t) } if !type_is_bot(t)
=> self.resolve_type(t),
Bounds { ub:Some(t), lb:_ } => self.resolve_type(t),
Bounds { ub:_, lb:Some(t) } => self.resolve_type(t),
Bounds { ub:None, lb:None } => {
if self.should(force_tvar) {
self.err = Some(unresolved_ty(vid));
}
ty::mk_var(tcx, vid)
}
};
self.v_seen.pop();
return t1;
}
}
pub fn resolve_int_var(&mut self, vid: IntVid) -> ty::t {
if !self.should(resolve_ivar) {
return ty::mk_int_var(self.infcx.tcx, vid);
}
let node = self.infcx.get(vid);
match node.possible_types {
Some(IntType(t)) => ty::mk_mach_int(t),
Some(UintType(t)) => ty::mk_mach_uint(t),
None => {
if self.should(force_ivar) {
// As a last resort, default to int.
let ty = ty::mk_int();
self.infcx.set(vid, Root(Some(IntType(ast::TyI)), node.rank));
ty
} else {
ty::mk_int_var(self.infcx.tcx, vid)
}
}
}
}
pub fn resolve_float_var(&mut self, vid: FloatVid) -> ty::t {
if !self.should(resolve_fvar) {
return ty::mk_float_var(self.infcx.tcx, vid);
}
let node = self.infcx.get(vid);
match node.possible_types {
Some(t) => ty::mk_mach_float(t),
None => {
if self.should(force_fvar) {
// As a last resort, default to f64.
let ty = ty::mk_f64();
self.infcx.set(vid, Root(Some(ast::TyF64), node.rank));
ty
} else {
ty::mk_float_var(self.infcx.tcx, vid)
}
}
}
}
}<|fim▁end|>
|
modes: modes,
err: None,
|
<|file_name|>sample.py<|end_file_name|><|fim▁begin|>'''
Created on 2015年1月19日
@author: Guan-yu Willie Chen
'''
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.support.select import Select
from selenium.webdriver.common.keys import Keys
import time
#browser = webdriver.Firefox()
#browser = webdriver.Ie()
browser = webdriver.Chrome("chromedriver.exe")
URL = ""
browser.get(URL+"/insurance/gs/sp/spLogin")
# 登入
browser.find_element_by_xpath("//input[@id='login:userName']").send_keys('')
browser.find_element_by_xpath("//input[@id='login:password']").send_keys('' + Keys.RETURN)
#進入前台
browser.find_element_by_xpath("//img[@name='Adminstration']").click()
#進入條碼列印作業
browser.get(URL+"insurance/eclaim/qrcodePrint.do")
# 選擇賠案號碼起
claimStartNo = browser.find_element_by_name("claimStartNo").send_keys("CLBR14V000000")
# 選擇文件名稱
docId = browser.find_element_by_name("docId")
for n in enumerate(docId.text.split("\n")):
<|fim▁hole|># 查詢
browser.find_element_by_xpath("//input[@name='queryBtn']").click()
# 分頁
browser.find_element_by_xpath("//input[@id='gotoPageNo']").send_keys(Keys.BACKSPACE)
browser.find_element_by_xpath("//input[@id='gotoPageNo']").send_keys("3")
browser.find_element_by_xpath("//div[@id='turnpage']/table/tbody/tr/td/input[@value='跳至']").click()<|fim▁end|>
|
print(n)
select = Select(docId)
select.select_by_index(1)
|
<|file_name|>delete_storage_metadata.py<|end_file_name|><|fim▁begin|>from geobricks_data_scripts.dev.utils.data_manager_util import get_data_manager
data_manager = get_data_manager()
# TODO How to handle the fact that is in storage?<|fim▁hole|><|fim▁end|>
|
data_manager.delete("mod13a2", True, False, False)
|
<|file_name|>tweeter.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>extern crate irc;
use std::default::Default;
use std::thread;
use std::time::Duration;
use irc::client::prelude::*;
fn main() {
let config = Config {
nickname: Some("pickles".to_owned()),
server: Some("irc.fyrechat.net".to_owned()),
channels: Some(vec!["#irc-crate".to_owned()]),
..Default::default()
};
let server = IrcServer::from_config(config).unwrap();
server.identify().unwrap();
let server2 = server.clone();
// Let's set up a loop that just prints the messages.
thread::spawn(move || {
server2.stream().map(|m| print!("{}", m)).wait().count();
});
loop {
server.send_privmsg("#irc-crate", "TWEET TWEET").unwrap();
thread::sleep(Duration::new(10, 0));
}
}<|fim▁end|>
| |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>extern crate cbindgen;
use std::env;
fn write_headers() {
let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let config = cbindgen::Config::from_file("cbindgen.toml").unwrap();
cbindgen::generate_with_config(&crate_dir, config)
.expect("Unable to generate bindings")
.write_to_file("include/header.h");<|fim▁hole|>}
fn main() {
let headers_enabled = env::var_os("CARGO_FEATURE_HEADERS").is_some();
if headers_enabled {
write_headers();
}
}<|fim▁end|>
| |
<|file_name|>LSMRTreeWithAntiMatterTuplesDataflowHelperFactory.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.lsm.rtree.dataflow;
import java.util.Map;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
import org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationSchedulerProvider;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
import org.apache.hyracks.storage.am.lsm.common.api.IVirtualBufferCacheProvider;
import org.apache.hyracks.storage.am.lsm.common.dataflow.AbstractLSMIndexDataflowHelperFactory;
import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
public class LSMRTreeWithAntiMatterTuplesDataflowHelperFactory extends AbstractLSMIndexDataflowHelperFactory {
private static final long serialVersionUID = 1L;
private final IBinaryComparatorFactory[] btreeComparatorFactories;
private final IPrimitiveValueProviderFactory[] valueProviderFactories;
private final RTreePolicyType rtreePolicyType;
private final ILinearizeComparatorFactory linearizeCmpFactory;
private final int[] rtreeFields;
protected final boolean isPointMBR;
public LSMRTreeWithAntiMatterTuplesDataflowHelperFactory(IPrimitiveValueProviderFactory[] valueProviderFactories,
RTreePolicyType rtreePolicyType, IBinaryComparatorFactory[] btreeComparatorFactories,
IVirtualBufferCacheProvider virtualBufferCacheProvider, ILSMMergePolicyFactory mergePolicyFactory,
Map<String, String> mergePolicyProperties, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationSchedulerProvider ioSchedulerProvider, ILSMIOOperationCallbackFactory ioOpCallbackFactory,
ILinearizeComparatorFactory linearizeCmpFactory, int[] rtreeFields, ITypeTraits[] filterTypeTraits,
IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields, boolean durable, boolean isPointMBR) {
super(virtualBufferCacheProvider, mergePolicyFactory, mergePolicyProperties, opTrackerFactory,
ioSchedulerProvider, ioOpCallbackFactory, 1.0, filterTypeTraits, filterCmpFactories, filterFields,
durable);
this.btreeComparatorFactories = btreeComparatorFactories;
this.valueProviderFactories = valueProviderFactories;
this.rtreePolicyType = rtreePolicyType;
this.linearizeCmpFactory = linearizeCmpFactory;
this.rtreeFields = rtreeFields;
this.isPointMBR = isPointMBR;
}
@Override
public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
int partition) throws HyracksDataException {<|fim▁hole|> ioSchedulerProvider.getIOScheduler(ctx), ioOpCallbackFactory, linearizeCmpFactory, rtreeFields,
filterTypeTraits, filterCmpFactories, filterFields, durable, isPointMBR);
}
}<|fim▁end|>
|
return new LSMRTreeWithAntiMatterTuplesDataflowHelper(opDesc, ctx, partition,
virtualBufferCacheProvider.getVirtualBufferCaches(ctx, opDesc.getFileSplitProvider()),
btreeComparatorFactories, valueProviderFactories, rtreePolicyType,
mergePolicyFactory.createMergePolicy(mergePolicyProperties, ctx), opTrackerFactory,
|
<|file_name|>NettyComponent.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and<|fim▁hole|> */
package org.apache.camel.component.netty4;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ThreadFactory;
import io.netty.util.concurrent.DefaultEventExecutorGroup;
import io.netty.util.concurrent.EventExecutorGroup;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.impl.UriEndpointComponent;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.concurrent.CamelThreadFactory;
public class NettyComponent extends UriEndpointComponent {
private NettyConfiguration configuration;
private volatile EventExecutorGroup executorService;
public NettyComponent() {
super(NettyEndpoint.class);
}
public NettyComponent(Class<? extends Endpoint> endpointClass) {
super(endpointClass);
}
public NettyComponent(CamelContext context) {
super(context, NettyEndpoint.class);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
NettyConfiguration config;
if (configuration != null) {
config = configuration.copy();
} else {
config = new NettyConfiguration();
}
config = parseConfiguration(config, remaining, parameters);
// merge any custom bootstrap configuration on the config
NettyServerBootstrapConfiguration bootstrapConfiguration = resolveAndRemoveReferenceParameter(parameters, "bootstrapConfiguration", NettyServerBootstrapConfiguration.class);
if (bootstrapConfiguration != null) {
Map<String, Object> options = new HashMap<String, Object>();
if (IntrospectionSupport.getProperties(bootstrapConfiguration, options, null, false)) {
IntrospectionSupport.setProperties(getCamelContext().getTypeConverter(), config, options);
}
}
// validate config
config.validateConfiguration();
NettyEndpoint nettyEndpoint = new NettyEndpoint(remaining, this, config);
setProperties(nettyEndpoint.getConfiguration(), parameters);
return nettyEndpoint;
}
/**
* Parses the configuration
*
* @return the parsed and valid configuration to use
*/
protected NettyConfiguration parseConfiguration(NettyConfiguration configuration, String remaining, Map<String, Object> parameters) throws Exception {
configuration.parseURI(new URI(remaining), parameters, this, "tcp", "udp");
return configuration;
}
public NettyConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(NettyConfiguration configuration) {
this.configuration = configuration;
}
public void setExecutorService(EventExecutorGroup executorService) {
this.executorService = executorService;
}
public synchronized EventExecutorGroup getExecutorService() {
if (executorService == null) {
executorService = createExecutorService();
}
return executorService;
}
@Override
protected void doStart() throws Exception {
if (configuration == null) {
configuration = new NettyConfiguration();
}
if (configuration.isUsingExecutorService() && executorService == null) {
executorService = createExecutorService();
}
super.doStart();
}
protected EventExecutorGroup createExecutorService() {
// Provide the executor service for the application
// and use a Camel thread factory so we have consistent thread namings
// we should use a shared thread pool as recommended by Netty
String pattern = getCamelContext().getExecutorServiceManager().getThreadNamePattern();
ThreadFactory factory = new CamelThreadFactory(pattern, "NettyEventExecutorGroup", true);
return new DefaultEventExecutorGroup(configuration.getMaximumPoolSize(), factory);
}
@Override
protected void doStop() throws Exception {
if (executorService != null) {
getCamelContext().getExecutorServiceManager().shutdownNow(executorService);
executorService = null;
}
super.doStop();
}
}<|fim▁end|>
|
* limitations under the License.
|
<|file_name|>sc_tracebox.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Program: $Id: $
# Author: Robert Beverly <[email protected]>
# Description: Experimental tracebox warts parser
import sys
import struct
import dpkt
from sc_warts import *
if dpkt.__version__ == '1.8':
print "Upgrade dpkt"
sys.exit(-1)
TRACEBOXTYPE = 0x0c
def dict_diff(a, b):
diff = dict()
for k in a:
if k in b:
if b[k] != a[k]:
diff[k] = (a[k],b[k])
return diff
# return set(a.items()) ^ set(b.items())
class WartsTraceBoxReader(WartsReader):
def __init__(self, wartsfile, verbose=False):
super(WartsTraceBoxReader, self).__init__(wartsfile, verbose)
def next(self):
while True:
obj = self.next_object()
if not obj:
return (False, False)
if (obj.typ == TRACEBOXTYPE):
return (obj.flags, obj.pkts)
def next_object(self):
# read warts object header
self.header = self.fd.read(8)
# sanity check
if len(self.header) != 8:
return None
(magic, typ, length) = struct.unpack('!HHI', self.header)
if self.verbose:
print "Magic: %02X Obj: %02X Len: %02x" % (magic, typ, length)
assert(magic == obj_type['MAGIC'])
# read remainder of object
data = self.fd.read(length)
if typ == obj_type['LIST']:
return WartsList(data, verbose=self.verbose)
elif typ == obj_type['CYCLESTART']:
return WartsCycle(data, verbose=self.verbose)
elif typ == obj_type['CYCLE']:
return WartsCycle(data, verbose=self.verbose)
elif typ == obj_type['CYCLE_STOP']:
return WartsCycleStop(data, verbose=self.verbose)
elif typ == TRACEBOXTYPE:
return WartsTraceBox(data, verbose=self.verbose)
else:
print "Unsupported object: %02x Len: %d" % (typ, length)
assert False
class WartsTraceBox(WartsBaseObject):
def __init__(self, data, verbose=False):
super(WartsTraceBox, self).__init__(TRACEBOXTYPE, verbose)
self.data = data
self.flagdata = data
self.pkts = []
self.flag_defines = [
('listid', unpack_uint32_t),
('cycleid', unpack_uint32_t),
('userid', unpack_uint32_t),
('srcaddr', self.unpack_address),
('dstaddr', self.unpack_address),
('sport', unpack_uint16_t),
('dport', unpack_uint16_t),
('start', read_timeval),
('result', unpack_uint16_t),
('rtt', unpack_uint8_t),
('qtype', unpack_uint8_t),
('udp', unpack_uint8_t),
('printmode', unpack_uint8_t),
('pktc16', unpack_uint16_t),
('pktc', unpack_uint32_t),
]
flag_bytes = self.read_flags()
if self.verbose:
print "TB Params:", self.flags
offset = flag_bytes
for i in range(self.flags['pktc']):
pkt = WartsTraceBoxPkt(data[offset:], self.referenced_address, self.verbose)
self.pkts.append(pkt.flags)
offset+=pkt.flag_bytes
if self.verbose: print "Pkt %d: %s" % (i+1, pkt.flags)
class WartsTraceBoxPkt(WartsBaseObject):
def __init__(self, data, refs, verbose=False):
super(WartsTraceBoxPkt, self).__init__(TRACEBOXTYPE, verbose)
self.update_ref(refs)
self.flagdata = data
self.flag_defines = [
('dir', unpack_uint8_t),
('time', read_timeval),
('len', unpack_uint16_t),
('data', self.read_pass),
]
self.flag_bytes = self.read_flags()
datalen = self.flags['len']
self.flags['data'] = self.read_tracebox_pkt(data[self.flag_bytes:self.flag_bytes+datalen])
self.flag_bytes += self.flags['len']
def read_pass(self, b):
return ("pass", 0)
def read_tracebox_pkt(self, data):
fields = dict()
ip = dpkt.ip.IP(data)
fields['hop'] = socket.inet_ntoa(ip.src)
if ip.p == dpkt.ip.IP_PROTO_ICMP:
# This is a reply from a hop
fields['hop'] = socket.inet_ntoa(ip.src)
icmp = ip.data
#print "ICMP quote:", icmp.type, icmp.code, "LEN:", len(icmp.data.data)
# icmp.data is type dpkt.icmp.TimeExceed
# so, icmp.data.data is a dpkt.ip.IP
ip = icmp.data.data
fields['IP::Version'] = ip.v
fields['IP::IHL'] = ip.hl
dscp = (ip.tos & 0xFC) >> 2
ecn = (ip.tos & 0x03)
fields['IP::DiffServicesCP'] = hex(dscp)
fields['IP::ECN'] = hex(ecn)
fields['IP:Length'] = hex(ip.len)
fields['IP:ID'] = ip.id
flags = (ip.df >> 1) + ip.mf
fields['IP:Flags'] = hex(flags)
fields['IP:FragmentOffset'] = ip.offset
fields['IP:TTL'] = ip.ttl<|fim▁hole|> fields['IP::DestAddr'] = socket.inet_ntoa(ip.dst)
if ip.p == dpkt.ip.IP_PROTO_TCP:
tcp = ip.data
if not isinstance(tcp, dpkt.tcp.TCP):
#print "Partial quote!"
z = struct.pack('12sB',ip.data,0x50) + struct.pack('7B',*([0]*7))
tcp = dpkt.tcp.TCP(z)
#print type(tcp)
if len(ip.data) >= 4:
fields['TCP::SPort'] = hex(tcp.sport)
fields['TCP::DPort'] = hex(tcp.dport)
if len(ip.data) >= 8:
fields['TCP::SeqNumber'] = hex(tcp.seq)
if len(ip.data) >= 12:
fields['TCP::AckNumber'] = hex(tcp.ack)
if len(ip.data) >= 16:
fields['TCP::Offset'] = hex(tcp.off)
fields['TCP::Flags'] = hex(tcp.flags)
fields['TCP::Window'] = hex(tcp.win)
if len(ip.data) == 20:
fields['TCP::Checksum'] = hex(tcp.sum)
fields['TCP::UrgentPtr'] = hex(tcp.urp)
if len(ip.data) >= 20:
if len(tcp.opts) > 0:
opts = dpkt.tcp.parse_opts(tcp.opts)
for o,d in opts:
if o == dpkt.tcp.TCP_OPT_EOL:
fields['TCP::OPT_EOL'] = d
elif o == dpkt.tcp.TCP_OPT_NOP:
fields['TCP::OPT_NOP'] = d
elif o == dpkt.tcp.TCP_OPT_MSS:
fields['TCP::OPT_MSS'] = d
elif o == dpkt.tcp.TCP_OPT_WSCALE:
fields['TCP::OPT_WSCALE'] = d
elif o == dpkt.tcp.TCP_OPT_SACKOK:
fields['TCP::OPT_SACKOK'] = d
elif o == dpkt.tcp.TCP_OPT_SACK:
fields['TCP::OPT_SACK'] = d
elif o == dpkt.tcp.TCP_OPT_TIMESTAMP:
fields['TCP::OPT_TIMESTAMP'] = d
return fields
if __name__ == "__main__":
assert len(sys.argv) == 2
w = WartsTraceBoxReader(sys.argv[1], verbose=False)
while True:
(flags, pkts) = w.next()
if flags == False: break
print "tracebox from %s to %s (result: %d)" % (flags['srcaddr'], flags['dstaddr'], flags['result'])
last_tx = None
last_tx_ts = 0
i = 0
for pkt in pkts:
ts = pkt['time'] - flags['start']
if pkt['dir'] == 1: #TX
#print " TX at %1.3f:" % (ts)
if last_tx != None:
i+=1
print " %d: *" % (i)
last_tx = pkt['data']
last_tx_ts = pkt['time']
else: #RX
#print " RX at %1.3f:" % (ts)
i+=1
rtt = (pkt['time'] - last_tx_ts)*1000.0
if last_tx:
diff = dict_diff(last_tx, pkt['data'])
print " %d: %s RTT:%1.3f: %s" % (i, pkt['data']['hop'], rtt, " ".join(diff.keys()))
last_tx = None<|fim▁end|>
|
fields['IP::Protocol'] = ip.p
fields['IP::Checksum'] = hex(ip.sum)
fields['IP::SourceAddr'] = socket.inet_ntoa(ip.src)
|
<|file_name|>je_ss_smcrps_nrmsc_dropoutput.py<|end_file_name|><|fim▁begin|>"""Single slice vgg with normalised scale.
"""
import functools
import lasagne as nn
import numpy as np
import theano
import theano.tensor as T
import data_loader
import deep_learning_layers
import layers
import preprocess
import postprocess
import objectives
import theano_printer
import updates
# Random params
rng = np.random
take_a_dump = False # dump a lot of data in a pkl-dump file. (for debugging)
dump_network_loaded_data = False # dump the outputs from the dataloader (for debugging)
# Memory usage scheme
caching = None
# Save and validation frequency
validate_every = 10
validate_train_set = True
save_every = 10
restart_from_save = False
dump_network_loaded_data = False
# Training (schedule) parameters
# - batch sizes
batch_size = 32
sunny_batch_size = 4
batches_per_chunk = 16
AV_SLICE_PER_PAT = 11
num_epochs_train = 50 * AV_SLICE_PER_PAT
# - learning rate and method
base_lr = .0001
learning_rate_schedule = {
0: base_lr,
4*num_epochs_train/5: base_lr/10,
}
momentum = 0.9
build_updates = updates.build_adam_updates
# Preprocessing stuff
cleaning_processes = [
preprocess.set_upside_up,]
cleaning_processes_post = [<|fim▁hole|> functools.partial(preprocess.normalize_contrast_zmuv, z=2)]
augmentation_params = {
"rotation": (-16, 16),
"shear": (0, 0),
"translation": (-8, 8),
"flip_vert": (0, 1),
"roll_time": (0, 0),
"flip_time": (0, 0),
}
preprocess_train = preprocess.preprocess_normscale
preprocess_validation = functools.partial(preprocess_train, augment=False)
preprocess_test = preprocess_train
sunny_preprocess_train = preprocess.sunny_preprocess_with_augmentation
sunny_preprocess_validation = preprocess.sunny_preprocess_validation
sunny_preprocess_test = preprocess.sunny_preprocess_validation
# Data generators
create_train_gen = data_loader.generate_train_batch
create_eval_valid_gen = functools.partial(data_loader.generate_validation_batch, set="validation")
create_eval_train_gen = functools.partial(data_loader.generate_validation_batch, set="train")
create_test_gen = functools.partial(data_loader.generate_test_batch, set=["validation", "test"])
# Input sizes
image_size = 128
data_sizes = {
"sliced:data:singleslice:difference:middle": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:difference": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:ax": (batch_size, 30, 15, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:shape": (batch_size, 2,),
"sunny": (sunny_batch_size, 1, image_size, image_size)
# TBC with the metadata
}
# Objective
l2_weight = 0.000
l2_weight_out = 0.000
def build_objective(interface_layers):
# l2 regu on certain layers
l2_penalty = nn.regularization.regularize_layer_params_weighted(
interface_layers["regularizable"], nn.regularization.l2)
# build objective
return objectives.KaggleObjective(interface_layers["outputs"], penalty=l2_penalty)
# Testing
postprocess = postprocess.postprocess
test_time_augmentations = 100 * AV_SLICE_PER_PAT # More augmentations since a we only use single slices
# Architecture
def build_model():
#################
# Regular model #
#################
input_size = data_sizes["sliced:data:singleslice"]
l0 = nn.layers.InputLayer(input_size)
l1a = nn.layers.dnn.Conv2DDNNLayer(l0, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l1b = nn.layers.dnn.Conv2DDNNLayer(l1a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l1 = nn.layers.dnn.MaxPool2DDNNLayer(l1b, pool_size=(2,2), stride=(2,2))
l2a = nn.layers.dnn.Conv2DDNNLayer(l1, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l2b = nn.layers.dnn.Conv2DDNNLayer(l2a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l2 = nn.layers.dnn.MaxPool2DDNNLayer(l2b, pool_size=(2,2), stride=(2,2))
l3a = nn.layers.dnn.Conv2DDNNLayer(l2, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3b = nn.layers.dnn.Conv2DDNNLayer(l3a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3c = nn.layers.dnn.Conv2DDNNLayer(l3b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3 = nn.layers.dnn.MaxPool2DDNNLayer(l3c, pool_size=(2,2), stride=(2,2))
l4a = nn.layers.dnn.Conv2DDNNLayer(l3, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4b = nn.layers.dnn.Conv2DDNNLayer(l4a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4c = nn.layers.dnn.Conv2DDNNLayer(l4b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4 = nn.layers.dnn.MaxPool2DDNNLayer(l4c, pool_size=(2,2), stride=(2,2))
l5a = nn.layers.dnn.Conv2DDNNLayer(l4, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5b = nn.layers.dnn.Conv2DDNNLayer(l5a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5c = nn.layers.dnn.Conv2DDNNLayer(l5b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5 = nn.layers.dnn.MaxPool2DDNNLayer(l5c, pool_size=(2,2), stride=(2,2))
# Systole Dense layers
ldsys1 = nn.layers.DenseLayer(l5, num_units=1024, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
ldsys1drop = nn.layers.dropout(ldsys1, p=0.5)
ldsys2 = nn.layers.DenseLayer(ldsys1drop, num_units=1024, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
ldsys2drop = nn.layers.dropout(ldsys2, p=0.5)
ldsys3 = nn.layers.DenseLayer(ldsys2drop, num_units=600, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.softmax)
ldsys3drop = nn.layers.dropout(ldsys3, p=0.5) # dropout at the output might encourage adjacent neurons to correllate
l_systole = layers.CumSumLayer(ldsys3)
# Diastole Dense layers
lddia1 = nn.layers.DenseLayer(l5, num_units=1024, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
lddia1drop = nn.layers.dropout(lddia1, p=0.5)
lddia2 = nn.layers.DenseLayer(lddia1drop, num_units=1024, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
lddia2drop = nn.layers.dropout(lddia2, p=0.5)
lddia3 = nn.layers.DenseLayer(lddia2drop, num_units=600, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.softmax)
lddia3drop = nn.layers.dropout(lddia3, p=0.5) # dropout at the output might encourage adjacent neurons to correllate
l_diastole = layers.CumSumLayer(lddia3drop)
return {
"inputs":{
"sliced:data:singleslice": l0
},
"outputs": {
"systole": l_systole,
"diastole": l_diastole,
},
"regularizable": {
ldsys1: l2_weight,
ldsys2: l2_weight,
ldsys3: l2_weight_out,
lddia1: l2_weight,
lddia2: l2_weight,
lddia3: l2_weight_out,
},
}<|fim▁end|>
| |
<|file_name|>RandVar.java<|end_file_name|><|fim▁begin|>package aima.core.probability.util;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import aima.core.probability.RandomVariable;
import aima.core.probability.domain.Domain;
import aima.core.probability.proposition.TermProposition;
/**
* Default implementation of the RandomVariable interface.
*
* Note: Also implements the TermProposition interface so its easy to use
* RandomVariables in conjunction with propositions about them in the
* Probability Model APIs.
*
* @author Ciaran O'Reilly
*/
public class RandVar implements RandomVariable, TermProposition {
private String name = null;
private Domain domain = null;
private Set<RandomVariable> scope = new HashSet<RandomVariable>();
public RandVar(String name, Domain domain) {
ProbUtil.checkValidRandomVariableName(name);
if (null == domain) {
throw new IllegalArgumentException(
"Domain of RandomVariable must be specified.");
}
this.name = name;
this.domain = domain;
this.scope.add(this);
}
//
// START-RandomVariable
@Override
public String getName() {
return name;
}
@Override
public Domain getDomain() {
return domain;
}
// END-RandomVariable
//
//
// START-TermProposition
@Override
public RandomVariable getTermVariable() {
return this;
}
@Override
public Set<RandomVariable> getScope() {
return scope;
}
@Override
public Set<RandomVariable> getUnboundScope() {
return scope;
}
@Override
public boolean holds(Map<RandomVariable, Object> possibleWorld) {
return possibleWorld.containsKey(getTermVariable());
}
// END-TermProposition
//
<|fim▁hole|> public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof RandomVariable)) {
return false;
}
// The name (not the name:domain combination) uniquely identifies a
// Random Variable
RandomVariable other = (RandomVariable) o;
return this.name.equals(other.getName());
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public String toString() {
return getName();
}
}<|fim▁end|>
|
@Override
|
<|file_name|>pgwmodal-test.ts<|end_file_name|><|fim▁begin|>/// <reference path="pgwmodal.d.ts" />
var $j: JQueryStatic;
var $z: ZeptoStatic;
function test_open(): void {
var r: boolean = $j.pgwModal({
content: 'Modal Example 1'
});<|fim▁hole|> maxWidth: 800
});
$j.pgwModal({
url: 'modal-test.php',
loadingContent: '<span style="text-align:center">Loading in progress</span>',
closable: false,
titleBar: false
});
}
function test_action(): void {
var r1: boolean = $j.pgwModal('close');
var r2: boolean = $j.pgwModal('reposition');
var r3: boolean = $j.pgwModal('isOpen');
var r4: any = $j.pgwModal('getData');
}
function test_equal(): void {
$j.pgwModal == $z.pgwModal;
}<|fim▁end|>
|
$j.pgwModal({
target: '#modalContent',
title: 'Modal title 2',
|
<|file_name|>rx-sequence_equal.hpp<|end_file_name|><|fim▁begin|>// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information.
#pragma once
#if !defined(RXCPP_OPERATORS_RX_SEQUENCE_EQUAL_HPP)
#define RXCPP_OPERATORS_RX_SEQUENCE_EQUAL_HPP
#include "../rx-includes.hpp"
namespace rxcpp {
namespace operators {
namespace detail {
template<class T, class Observable, class OtherObservable, class BinaryPredicate, class Coordination>
struct sequence_equal : public operator_base<bool>
{
typedef rxu::decay_t<Observable> source_type;
typedef rxu::decay_t<T> source_value_type;
typedef rxu::decay_t<OtherObservable> other_source_type;
typedef typename other_source_type::value_type other_source_value_type;
typedef rxu::decay_t<BinaryPredicate> predicate_type;
typedef rxu::decay_t<Coordination> coordination_type;
typedef typename coordination_type::coordinator_type coordinator_type;
struct values {
values(source_type s, other_source_type t, predicate_type pred, coordination_type sf)
: source(std::move(s))
, other(std::move(t))
, pred(std::move(pred))
, coordination(std::move(sf))
{
}
source_type source;
other_source_type other;
predicate_type pred;
coordination_type coordination;
};
values initial;
sequence_equal(source_type s, other_source_type t, predicate_type pred, coordination_type sf)
: initial(std::move(s), std::move(t), std::move(pred), std::move(sf))
{
}
template<class Subscriber>
void on_subscribe(Subscriber s) const {
typedef Subscriber output_type;
struct state_type
: public std::enable_shared_from_this<state_type>
, public values
{
state_type(const values& vals, coordinator_type coor, const output_type& o)
: values(vals)
, coordinator(std::move(coor))
, out(o)
, source_completed(false)
, other_completed(false)
{
out.add(other_lifetime);
out.add(source_lifetime);
}
composite_subscription other_lifetime;
composite_subscription source_lifetime;
coordinator_type coordinator;
output_type out;
mutable std::list<source_value_type> source_values;<|fim▁hole|> mutable bool source_completed;
mutable bool other_completed;
};
auto coordinator = initial.coordination.create_coordinator();
auto state = std::make_shared<state_type>(initial, std::move(coordinator), std::move(s));
auto other = on_exception(
[&](){ return state->coordinator.in(state->other); },
state->out);
if (other.empty()) {
return;
}
auto source = on_exception(
[&](){ return state->coordinator.in(state->source); },
state->out);
if (source.empty()) {
return;
}
auto check_equal = [state]() {
if(!state->source_values.empty() && !state->other_values.empty()) {
auto x = std::move(state->source_values.front());
state->source_values.pop_front();
auto y = std::move(state->other_values.front());
state->other_values.pop_front();
if (!state->pred(x, y)) {
state->out.on_next(false);
state->out.on_completed();
}
} else {
if((!state->source_values.empty() && state->other_completed) ||
(!state->other_values.empty() && state->source_completed)) {
state->out.on_next(false);
state->out.on_completed();
}
}
};
auto check_complete = [state]() {
if(state->source_completed && state->other_completed) {
state->out.on_next(state->source_values.empty() && state->other_values.empty());
state->out.on_completed();
}
};
auto sinkOther = make_subscriber<other_source_value_type>(
state->out,
state->other_lifetime,
// on_next
[state, check_equal](other_source_value_type t) {
auto& values = state->other_values;
values.push_back(t);
check_equal();
},
// on_error
[state](std::exception_ptr e) {
state->out.on_error(e);
},
// on_completed
[state, check_complete]() {
auto& completed = state->other_completed;
completed = true;
check_complete();
}
);
auto selectedSinkOther = on_exception(
[&](){ return state->coordinator.out(sinkOther); },
state->out);
if (selectedSinkOther.empty()) {
return;
}
other->subscribe(std::move(selectedSinkOther.get()));
source.get().subscribe(
state->source_lifetime,
// on_next
[state, check_equal](source_value_type t) {
auto& values = state->source_values;
values.push_back(t);
check_equal();
},
// on_error
[state](std::exception_ptr e) {
state->out.on_error(e);
},
// on_completed
[state, check_complete]() {
auto& completed = state->source_completed;
completed = true;
check_complete();
}
);
}
};
template<class OtherObservable, class BinaryPredicate, class Coordination>
class sequence_equal_factory
{
typedef rxu::decay_t<OtherObservable> other_source_type;
typedef rxu::decay_t<Coordination> coordination_type;
typedef rxu::decay_t<BinaryPredicate> predicate_type;
other_source_type other_source;
coordination_type coordination;
predicate_type pred;
public:
sequence_equal_factory(other_source_type t, predicate_type p, coordination_type sf)
: other_source(std::move(t))
, coordination(std::move(sf))
, pred(std::move(p))
{
}
template<class Observable>
auto operator()(Observable&& source)
-> observable<bool, sequence_equal<rxu::value_type_t<rxu::decay_t<Observable>>, Observable, other_source_type, BinaryPredicate, Coordination>> {
return observable<bool, sequence_equal<rxu::value_type_t<rxu::decay_t<Observable>>, Observable, other_source_type, BinaryPredicate, Coordination>>(
sequence_equal<rxu::value_type_t<rxu::decay_t<Observable>>, Observable, other_source_type, BinaryPredicate, Coordination>(std::forward<Observable>(source), other_source, pred, coordination));
}
};
}
template<class OtherObservable>
inline auto sequence_equal(OtherObservable&& t)
-> detail::sequence_equal_factory<OtherObservable, rxu::equal_to<>, identity_one_worker> {
return detail::sequence_equal_factory<OtherObservable, rxu::equal_to<>, identity_one_worker>(std::forward<OtherObservable>(t), rxu::equal_to<>(), identity_current_thread());
}
template<class OtherObservable, class BinaryPredicate, class Check = typename std::enable_if<!is_coordination<BinaryPredicate>::value>::type>
inline auto sequence_equal(OtherObservable&& t, BinaryPredicate&& pred)
-> detail::sequence_equal_factory<OtherObservable, BinaryPredicate, identity_one_worker> {
return detail::sequence_equal_factory<OtherObservable, BinaryPredicate, identity_one_worker>(std::forward<OtherObservable>(t), std::forward<BinaryPredicate>(pred), identity_current_thread());
}
template<class OtherObservable, class Coordination, class Check = typename std::enable_if<is_coordination<Coordination>::value>::type>
inline auto sequence_equal(OtherObservable&& t, Coordination&& cn)
-> detail::sequence_equal_factory<OtherObservable, rxu::equal_to<>, Coordination> {
return detail::sequence_equal_factory<OtherObservable, rxu::equal_to<>, Coordination>(std::forward<OtherObservable>(t), rxu::equal_to<>(), std::forward<Coordination>(cn));
}
template<class OtherObservable, class BinaryPredicate, class Coordination>
inline auto sequence_equal(OtherObservable&& t, BinaryPredicate&& pred, Coordination&& cn)
-> detail::sequence_equal_factory<OtherObservable, BinaryPredicate, Coordination> {
return detail::sequence_equal_factory<OtherObservable, BinaryPredicate, Coordination>(std::forward<OtherObservable>(t), std::forward<BinaryPredicate>(pred), std::forward<Coordination>(cn));
}
}
}
#endif<|fim▁end|>
|
mutable std::list<other_source_value_type> other_values;
|
<|file_name|>test_pep425tags.py<|end_file_name|><|fim▁begin|>import sys
from mock import patch
from pip import pep425tags
class TestPEP425Tags(object):
def mock_get_config_var(self, **kwd):
"""
Patch sysconfig.get_config_var for arbitrary keys.
"""
import pip.pep425tags
get_config_var = pip.pep425tags.sysconfig.get_config_var
def _mock_get_config_var(var):
if var in kwd:
return kwd[var]
return get_config_var(var)
return _mock_get_config_var
def abi_tag_unicode(self, flags, config_vars):
"""
Used to test ABI tags, verify correct use of the `u` flag
"""
import pip.pep425tags
config_vars.update({'SOABI': None})
base = pip.pep425tags.get_abbr_impl() + pip.pep425tags.get_impl_ver()
if sys.version_info < (3, 3):
config_vars.update({'Py_UNICODE_SIZE': 2})
mock_gcf = self.mock_get_config_var(**config_vars)
with patch('pip.pep425tags.sysconfig.get_config_var', mock_gcf):
abi_tag = pip.pep425tags.get_abi_tag()
assert abi_tag == base + flags
config_vars.update({'Py_UNICODE_SIZE': 4})
mock_gcf = self.mock_get_config_var(**config_vars)
with patch('pip.pep425tags.sysconfig.get_config_var', mock_gcf):
abi_tag = pip.pep425tags.get_abi_tag()
assert abi_tag == base + flags + 'u'
else:
# On Python >= 3.3, UCS-4 is essentially permanently enabled, and
# Py_UNICODE_SIZE is None. SOABI on these builds does not include
# the 'u' so manual SOABI detection should not do so either.
config_vars.update({'Py_UNICODE_SIZE': None})
mock_gcf = self.mock_get_config_var(**config_vars)
with patch('pip.pep425tags.sysconfig.get_config_var', mock_gcf):
abi_tag = pip.pep425tags.get_abi_tag()
assert abi_tag == base + flags
def test_broken_sysconfig(self):
"""
Test that pep425tags still works when sysconfig is broken.
Can be a problem on Python 2.7
Issue #1074.
"""
import pip.pep425tags
def raises_ioerror(var):
raise IOError("I have the wrong path!")
with patch('pip.pep425tags.sysconfig.get_config_var', raises_ioerror):
assert len(pip.pep425tags.get_supported())
def test_no_hyphen_tag(self):
"""
Test that no tag contains a hyphen.
"""
import pip.pep425tags
mock_gcf = self.mock_get_config_var(SOABI='cpython-35m-darwin')
with patch('pip.pep425tags.sysconfig.get_config_var', mock_gcf):
supported = pip.pep425tags.get_supported()
for (py, abi, plat) in supported:
assert '-' not in py
assert '-' not in abi
assert '-' not in plat
def test_manual_abi_noflags(self):
"""
Test that no flags are set on a non-PyDebug, non-Pymalloc ABI tag.
"""
self.abi_tag_unicode('', {'Py_DEBUG': False, 'WITH_PYMALLOC': False})
def test_manual_abi_d_flag(self):
"""<|fim▁hole|> def test_manual_abi_m_flag(self):
"""
Test that the `m` flag is set on a non-PyDebug, Pymalloc ABI tag.
"""
self.abi_tag_unicode('m', {'Py_DEBUG': False, 'WITH_PYMALLOC': True})
def test_manual_abi_dm_flags(self):
"""
Test that the `dm` flags are set on a PyDebug, Pymalloc ABI tag.
"""
self.abi_tag_unicode('dm', {'Py_DEBUG': True, 'WITH_PYMALLOC': True})
class TestManylinux1Tags(object):
@patch('pip.pep425tags.get_platform', lambda: 'linux_x86_64')
@patch('pip.utils.glibc.have_compatible_glibc', lambda major, minor: True)
def test_manylinux1_compatible_on_linux_x86_64(self):
"""
Test that manylinux1 is enabled on linux_x86_64
"""
assert pep425tags.is_manylinux1_compatible()
@patch('pip.pep425tags.get_platform', lambda: 'linux_i686')
@patch('pip.utils.glibc.have_compatible_glibc', lambda major, minor: True)
def test_manylinux1_compatible_on_linux_i686(self):
"""
Test that manylinux1 is enabled on linux_i686
"""
assert pep425tags.is_manylinux1_compatible()
@patch('pip.pep425tags.get_platform', lambda: 'linux_x86_64')
@patch('pip.utils.glibc.have_compatible_glibc', lambda major, minor: False)
def test_manylinux1_2(self):
"""
Test that manylinux1 is disabled with incompatible glibc
"""
assert not pep425tags.is_manylinux1_compatible()
@patch('pip.pep425tags.get_platform', lambda: 'arm6vl')
@patch('pip.utils.glibc.have_compatible_glibc', lambda major, minor: True)
def test_manylinux1_3(self):
"""
Test that manylinux1 is disabled on arm6vl
"""
assert not pep425tags.is_manylinux1_compatible()
@patch('pip.pep425tags.get_platform', lambda: 'linux_x86_64')
@patch('pip.utils.glibc.have_compatible_glibc', lambda major, minor: True)
@patch('sys.platform', 'linux2')
def test_manylinux1_tag_is_first(self):
"""
Test that the more specific tag manylinux1 comes first.
"""
groups = {}
for pyimpl, abi, arch in pep425tags.get_supported():
groups.setdefault((pyimpl, abi), []).append(arch)
for arches in groups.values():
if arches == ['any']:
continue
# Expect the most specific arch first:
if len(arches) == 3:
assert arches == ['manylinux1_x86_64', 'linux_x86_64', 'any']
else:
assert arches == ['manylinux1_x86_64', 'linux_x86_64']<|fim▁end|>
|
Test that the `d` flag is set on a PyDebug, non-Pymalloc ABI tag.
"""
self.abi_tag_unicode('d', {'Py_DEBUG': True, 'WITH_PYMALLOC': False})
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use ::Memory;
pub mod opcodes;
pub mod opcode;
#[derive(Clone, Copy)]
pub enum Register8 {
A,
F,
B,
C,
D,
E,
H,
L,
}
#[derive(Copy, Clone)]
pub enum Register16 {
AF,
BC,
DE,
HL,
SP,
PC,
}
pub enum Flag {
Z,
NZ,
N,
H,
C,
NC,
}
pub struct Cpu {
/// Accumulator register
a: u8,
/// Flags register
f: u8,
b: u8,
c: u8,
d: u8,
e: u8,
h: u8,
l: u8,
sp: u16,
pc: u16,
timer: u8,
}
impl Cpu {
/// Init a new Cpu instance
pub fn new() -> Cpu {
Cpu {
a: 0x0,
f: 0x0,
b: 0x0,
c: 0x0,
d: 0x0,
e: 0x0,
h: 0x0,
l: 0x0,
sp: 0x00,
pc: 0x00,
timer: 0x0,
}
}
/// Get a 8bit value from register
pub fn get_8(&self, reg: Register8) -> u8 {
match reg {
Register8::A => self.a,
Register8::F => self.f,
Register8::B => self.b,
Register8::C => self.c,
Register8::D => self.d,
Register8::E => self.e,
Register8::H => self.h,
Register8::L => self.l,
}
}
/// Set a 8bit value to a register
pub fn set_8(&mut self, reg: Register8, v: u8) {
match reg {
Register8::A => self.a = v,
Register8::F => self.f = v,
Register8::B => self.b = v,
Register8::C => self.c = v,
Register8::D => self.d = v,
Register8::E => self.e = v,
Register8::H => self.h = v,
Register8::L => self.l = v,
};
}
/// Get a 16bit value from a register
pub fn get_16(&self, reg: Register16) -> u16 {
match reg {
Register16::AF => join_bytes!(self.a, self.f),
Register16::BC => join_bytes!(self.b, self.c),
Register16::DE => join_bytes!(self.d, self.e),
Register16::HL => join_bytes!(self.h, self.l),
Register16::SP => self.sp,
Register16::PC => self.pc,
}
}
/// Set a 16bit value to a register
pub fn set_16(&mut self, reg: Register16, v: u16) {
match reg {
Register16::AF => {
self.a = high_byte!(v);
self.f = low_byte!(v);
}
Register16::BC => {
self.b = high_byte!(v);
self.c = low_byte!(v);
}
Register16::DE => {
self.d = high_byte!(v);
self.e = low_byte!(v);
}
Register16::HL => {
self.h = high_byte!(v);
self.l = low_byte!(v);
}
Register16::SP => self.sp = v,
Register16::PC => self.pc = v,
}
}
/// Set a flags
pub fn set_flag(&mut self, flag: Flag, set: bool) {
let f = self.get_8(Register8::F);
match (flag, set) {
(Flag::Z, true) => self.set_8(Register8::F, f | 0b1000_0000),
(Flag::N, true) => self.set_8(Register8::F, f | 0b0100_0000),
(Flag::H, true) => self.set_8(Register8::F, f | 0b0010_0000),
(Flag::C, true) => self.set_8(Register8::F, f | 0b0001_0000),
(Flag::Z, false) => self.set_8(Register8::F, f & 0b0111_1111),
(Flag::N, false) => self.set_8(Register8::F, f & 0b1011_1111),
(Flag::H, false) => self.set_8(Register8::F, f & 0b1101_1111),
(Flag::C, false) => self.set_8(Register8::F, f & 0b1110_1111),
_ => unreachable!(),
}
}
pub fn get_flag(&self, flag: Flag) -> bool {
let f = self.get_8(Register8::F);
match flag {
Flag::C => ((f & 0b0001_0000) as u8 >> 4 ) == 1,
Flag::H => ((f & 0b0010_0000) as u8 >> 5 ) == 1,
Flag::N => ((f & 0b0100_0000) as u8 >> 6 ) == 1,
Flag::Z => ((f & 0b1000_0000) as u8 >> 7 ) == 1,
Flag::NC => ((f & 0b0001_0000) as u8 >> 4 ) == 0,
Flag::NZ => ((f & 0b1000_0000) as u8 >> 7 ) == 0,
}
}
/// Set all the flags return to false
pub fn reset_flags(&mut self) {
self.set_flag(Flag::Z, false);
self.set_flag(Flag::N, false);
self.set_flag(Flag::H, false);
self.set_flag(Flag::C, false);
}
/// Inc 8bit register by 1
pub fn inc_8(&mut self, reg: Register8) {
match reg {
Register8::A => self.a += 1,
Register8::F => self.f += 1,
Register8::B => self.b += 1,
Register8::C => self.c += 1,
Register8::D => self.d += 1,
Register8::E => self.e += 1,
Register8::H => self.h += 1,
Register8::L => self.l += 1,
};
}
/// Inc 16bit register by 1
pub fn inc_16(&mut self, reg: Register16) {
let v: u16 = self.get_16(reg) + 1;
match reg {
Register16::AF => {<|fim▁hole|> Register16::BC => {
self.b = high_byte!(v);
self.c = low_byte!(v);
}
Register16::DE => {
self.d = high_byte!(v);
self.e = low_byte!(v);
}
Register16::HL => {
self.h = high_byte!(v);
self.l = low_byte!(v);
}
Register16::SP => self.sp = v,
Register16::PC => self.pc = v,
}
}
/// Inc PC by x
pub fn inc_pc(&mut self, inc: u8) {
self.pc += inc as u16;
}
}
impl fmt::Debug for Cpu {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"=== CPU DEBUG ===\n\
> A <{:#x}> ({0})\n\
> F (flag) <{:#x}> ({1})\n\
> B <{:#x}> ({2})\n\
> C <{:#x}> ({3})\n\
> D <{:#x}> ({4})\n\
> E <{:#x}> ({5})\n\
> H <{:#x}> ({6})\n\
> L <{:#x}> ({7})\n\
> SP <{:#x}> ({8})\n\
> PC <{:#x}> ({9})\n\
> C: {} H: {} N: {} Z: {} NC: {}, NZ: {}",
self.get_8(Register8::A),
self.get_8(Register8::F),
self.get_8(Register8::B),
self.get_8(Register8::C),
self.get_8(Register8::D),
self.get_8(Register8::E),
self.get_8(Register8::H),
self.get_8(Register8::L),
self.get_16(Register16::SP),
self.get_16(Register16::PC),
self.get_flag(Flag::C),
self.get_flag(Flag::H),
self.get_flag(Flag::N),
self.get_flag(Flag::Z),
self.get_flag(Flag::NC),
self.get_flag(Flag::NZ))
}
}
#[test]
fn set_get() {
let mut cpu = Cpu::new();
cpu.set_8(Register8::A, 50);
cpu.set_8(Register8::F, 51);
cpu.set_8(Register8::B, 52);
cpu.set_8(Register8::C, 53);
cpu.set_8(Register8::D, 54);
cpu.set_8(Register8::E, 55);
cpu.set_8(Register8::H, 56);
cpu.set_8(Register8::L, 57);
assert_eq!(cpu.get_8(Register8::A), 50);
assert_eq!(cpu.get_8(Register8::F), 51);
assert_eq!(cpu.get_8(Register8::B), 52);
assert_eq!(cpu.get_8(Register8::C), 53);
assert_eq!(cpu.get_8(Register8::D), 54);
assert_eq!(cpu.get_8(Register8::E), 55);
assert_eq!(cpu.get_8(Register8::H), 56);
assert_eq!(cpu.get_8(Register8::L), 57);
cpu.set_16(Register16::AF, 50_000);
cpu.set_16(Register16::BC, 51_000);
cpu.set_16(Register16::DE, 52_000);
cpu.set_16(Register16::HL, 53_000);
cpu.set_16(Register16::SP, 54_000);
cpu.set_16(Register16::PC, 55_000);
assert_eq!(cpu.get_16(Register16::AF), 50_000);
assert_eq!(cpu.get_16(Register16::BC), 51_000);
assert_eq!(cpu.get_16(Register16::DE), 52_000);
assert_eq!(cpu.get_16(Register16::HL), 53_000);
assert_eq!(cpu.get_16(Register16::SP), 54_000);
assert_eq!(cpu.get_16(Register16::PC), 55_000);
cpu.inc_16(Register16::AF);
cpu.inc_16(Register16::BC);
cpu.inc_16(Register16::DE);
cpu.inc_16(Register16::HL);
cpu.inc_16(Register16::SP);
cpu.inc_16(Register16::PC);
assert_eq!(cpu.get_16(Register16::AF), 50_001);
assert_eq!(cpu.get_16(Register16::BC), 51_001);
assert_eq!(cpu.get_16(Register16::DE), 52_001);
assert_eq!(cpu.get_16(Register16::HL), 53_001);
assert_eq!(cpu.get_16(Register16::SP), 54_001);
assert_eq!(cpu.get_16(Register16::PC), 55_001);
}
#[test]
fn flags() {
let mut cpu = Cpu::new();
cpu.set_flag(Flag::Z, true);
assert_eq!(cpu.get_8(Register8::F), 0b10000000);
cpu.set_flag(Flag::N, true);
assert_eq!(cpu.get_8(Register8::F), 0b11000000);
cpu.set_flag(Flag::H, true);
assert_eq!(cpu.get_8(Register8::F), 0b11100000);
cpu.set_flag(Flag::C, true);
assert_eq!(cpu.get_8(Register8::F), 0b11110000);
cpu.set_flag(Flag::Z, false);
assert_eq!(cpu.get_8(Register8::F), 0b01110000);
cpu.set_flag(Flag::N, false);
assert_eq!(cpu.get_8(Register8::F), 0b00110000);
cpu.set_flag(Flag::H, false);
assert_eq!(cpu.get_8(Register8::F), 0b00010000);
cpu.set_flag(Flag::C, false);
assert_eq!(cpu.get_8(Register8::F), 0b00000000);
}<|fim▁end|>
|
self.a = high_byte!(v);
self.f = low_byte!(v);
}
|
<|file_name|>generate_docs.py<|end_file_name|><|fim▁begin|>import os
import pydoc
import sys
class DocTree:
def __init__(self, src, dest):
self.basepath = os.getcwd()
sys.path.append(os.path.join(self.basepath, src))
self.src = src
self.dest = dest
self._make_dest(dest)
self._make_docs(src)<|fim▁hole|> def _make_dest(self, dest):
path = os.path.join(self.basepath, dest)
if os.path.isdir(path):
os.rmdir(path)
os.makedirs(path)
def _make_docs(self, src):
print('making htmls for ' + src)
pydoc.writedocs(src)
print(os.listdir())
def _move_docs(self, dest):
for f in os.listdir():
if f.endswith('.html'):
_dest = os.path.join(dest, f)
os.rename(f, _dest)
def main():
dest = 'docs'
src = 'vcx/api'
src = os.path.join(os.getcwd(), src)
DocTree(src, dest)
if __name__ == '__main__':
main()<|fim▁end|>
|
self._move_docs(dest)
|
<|file_name|>scratchpad.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Display number of scratchpad windows and urgency hints.
Configuration parameters:
cache_timeout: refresh interval for i3-msg or swaymsg (default 5)
format: display format for this module
(default "\u232b [\?color=scratchpad {scratchpad}]")
thresholds: specify color thresholds to use
(default [(0, "darkgray"), (1, "violet")])
Format placeholders:
{scratchpad} number of scratchpads
{urgent} number of urgent scratchpads
Color thresholds:
xxx: print a color based on the value of `xxx` placeholder
Optional:
i3ipc: an improved python library to control i3wm and sway
Examples:
```
# hide zero scratchpad
scratchpad {
format = '[\?not_zero \u232b [\?color=scratchpad {scratchpad}]]'
}
# hide non-urgent scratchpad
scratchpad {
format = '[\?not_zero \u232b {urgent}]'
}
# bring up scratchpads on clicks
scratchpad {
on_click 1 = 'scratchpad show'
}
# add more colors
scratchpad {
thresholds = [
(0, "darkgray"), (1, "violet"), (2, "deepskyblue"), (3, "lime"),
(4, "yellow"), (5, "orange"), (6, "red"), (7, "tomato"),
]
}
```
@author shadowprince (counter), cornerman (async)
@license Eclipse Public License (counter), BSD (async)
SAMPLE OUTPUT
[{'full_text': '\u232b '}, {'full_text': u'0', 'color': '#a9a9a9'}]
violet
[{'full_text': '\u232b '}, {'full_text': u'5', 'color': '#ee82ee'}]
urgent
[{'full_text': '\u232b URGENT 1', 'urgent': True}]
"""
STRING_ERROR = "invalid ipc `{}`"
class Ipc:
"""
"""
def __init__(self, parent):
self.parent = parent
self.setup(parent)
class I3ipc(Ipc):
"""
i3ipc - an improved python library to control i3wm and sway
"""
def setup(self, parent):
from threading import Thread
self.parent.cache_timeout = self.parent.py3.CACHE_FOREVER
self.scratchpad_data = {"scratchpad": 0, "urgent": 0}
t = Thread(target=self.start)
t.daemon = True
t.start()
def start(self):
from i3ipc import Connection
i3 = Connection()
self.update(i3)
for event in ["window::move", "window::urgent"]:
i3.on(event, self.update)
i3.main()
def update(self, i3, event=None):
leaves = i3.get_tree().scratchpad().leaves()
temporary = {
"ipc": self.parent.ipc,
"scratchpad": len(leaves),
"urgent": sum(window.urgent for window in leaves),
}
if self.scratchpad_data != temporary:
self.scratchpad_data = temporary
self.parent.py3.update()
def get_scratchpad_data(self):
return self.scratchpad_data
class Msg(Ipc):
"""
i3-msg - send messages to i3 window manager
swaymsg - send messages to sway window manager
"""
def setup(self, parent):
from json import loads
self.json_loads = loads
wm_msg = {"i3msg": "i3-msg"}.get(parent.ipc, parent.ipc)
self.tree_command = [wm_msg, "-t", "get_tree"]
def get_scratchpad_data(self):
tree = self.json_loads(self.parent.py3.command_output(self.tree_command))
leaves = self.find_scratchpad(tree).get("floating_nodes", [])
return {
"ipc": self.parent.ipc,
"scratchpad": len(leaves),
"urgent": sum([window["urgent"] for window in leaves]),
}
def find_scratchpad(self, tree):
if tree.get("name") == "__i3_scratch":
return tree
for x in tree.get("nodes", []):
result = self.find_scratchpad(x)
if result:
return result
return {}
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 5
format = "\u232b [\?color=scratchpad {scratchpad}]"
thresholds = [(0, "darkgray"), (1, "violet")]
def post_config_hook(self):
# ipc: specify i3ipc, i3-msg, or swaymsg, otherwise auto
self.ipc = getattr(self, "ipc", "")
if self.ipc in ["", "i3ipc"]:
try:
from i3ipc import Connection # noqa f401
self.ipc = "i3ipc"
except Exception:
if self.ipc:
raise # module not found
self.ipc = (self.ipc or self.py3.get_wm_msg()).replace("-", "")
if self.ipc in ["i3ipc"]:
self.backend = I3ipc(self)
elif self.ipc in ["i3msg", "swaymsg"]:
self.backend = Msg(self)
else:
raise Exception(STRING_ERROR.format(self.ipc))
<|fim▁hole|> self.thresholds_init = self.py3.get_color_names_list(self.format)
def scratchpad(self):
scratchpad_data = self.backend.get_scratchpad_data()
for x in self.thresholds_init:
if x in scratchpad_data:
self.py3.threshold_get_color(scratchpad_data[x], x)
response = {
"cached_until": self.py3.time_in(self.cache_timeout),
"full_text": self.py3.safe_format(self.format, scratchpad_data),
}
if scratchpad_data["urgent"]:
response["urgent"] = True
return response
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
config = {"format": "\[{ipc}\] [\?color=scratchpad {scratchpad}]"}
module_test(Py3status, config=config)<|fim▁end|>
| |
<|file_name|>rtnl-route-event.rs<|end_file_name|><|fim▁begin|>use std::io::Write;
use std::mem::size_of;
use std::ffi::CStr;
extern crate libc;
extern crate crslmnl as mnl;
use libc::{ c_int, c_char, c_void, socklen_t };
use mnl::linux::netlink as netlink;
use mnl::linux::rtnetlink;
extern {
// const char *inet_ntop(int af, const void *src,
// char *dst, socklen_t size);
fn inet_ntop(af: c_int, src: *const c_void, dst: *mut c_char, size: socklen_t) -> *const c_char;
}
pub const INET_ADDRSTRLEN: usize = 16;
pub const INET6_ADDRSTRLEN: usize = 46;
trait AddrFamily {
fn family(&self) -> c_int;
}
impl AddrFamily for libc::in_addr {
fn family(&self) -> c_int { libc::AF_INET }
}
impl AddrFamily for libc::in6_addr {
fn family(&self) -> c_int { libc::AF_INET6 }
}
fn _inet_ntoa<T: AddrFamily>(addr: &T) -> String {
let mut buf = [0u8; INET6_ADDRSTRLEN];
unsafe {
let rs = inet_ntop(addr.family(), addr as *const _ as *const c_void,
buf.as_mut_ptr() as *mut c_char, INET6_ADDRSTRLEN as socklen_t);
CStr::from_ptr(rs).to_string_lossy().into_owned()
}
}
macro_rules! println_stderr(
($($arg:tt)*) => { {
let r = writeln!(&mut ::std::io::stderr(), $($arg)*);
r.expect("failed printing to stderr");
} }
);
fn data_attr_cb2<'a>(attr: &'a mnl::Attr, tb: &mut [Option<&'a mnl::Attr>]) -> mnl::CbRet {
// skip unsupported attribute in user-space
if let Err(_) = attr.type_valid(rtnetlink::RTAX_MAX as u16) {
return mnl::CbRet::OK;
}
if let Err(errno) = attr.validate(mnl::AttrDataType::U32) {
println_stderr!("mnl_attr_validate: {}", errno);
return mnl::CbRet::ERROR;
}
tb[attr.atype() as usize] = Some(attr);
mnl::CbRet::OK
}
fn attributes_show_ip<T: AddrFamily>(tb: &[Option<&mnl::Attr>]) {
tb[rtnetlink::RTA_TABLE as usize]
.map(|attr| print!("table={} ", attr.u32()));
tb[rtnetlink::RTA_DST as usize]
.map(|attr| print!("dst={} ", _inet_ntoa::<T>(attr.payload())));
tb[rtnetlink::RTA_SRC as usize]
.map(|attr| print!("src={} ", _inet_ntoa::<T>(attr.payload())));
tb[rtnetlink::RTA_OIF as usize]
.map(|attr| print!("oif={} ", attr.u32()));
tb[rtnetlink::RTA_FLOW as usize]
.map(|attr| print!("flow={} ", attr.u32()));
tb[rtnetlink::RTA_PREFSRC as usize]
.map(|attr| print!("prefsrc={} ", _inet_ntoa::<T>(attr.payload())));
tb[rtnetlink::RTA_GATEWAY as usize]
.map(|attr| print!("gw={} ", _inet_ntoa::<T>(attr.payload())));
tb[rtnetlink::RTA_PRIORITY as usize]
.map(|attr| print!("prio={} ", attr.u32()));
tb[rtnetlink::RTA_METRICS as usize]
.map(|attr| {
let mut tbx: [Option<&mnl::Attr>; rtnetlink::RTAX_MAX as usize + 1]
= [None; rtnetlink::RTAX_MAX as usize + 1];<|fim▁hole|> for i in 0..rtnetlink::RTAX_MAX as usize {
tbx[i].map(|attr| print!("metrics[{}]={} ", i, attr.u32()));
}
});
}
fn data_ipv4_attr_cb<'a>(attr: &'a mnl::Attr, tb: &mut [Option<&'a mnl::Attr>]) -> mnl::CbRet {
// skip unsupported attribute in user-space
if let Err(_) = attr.type_valid(rtnetlink::RTA_MAX) {
return mnl::CbRet::OK;
}
let atype = attr.atype();
match atype {
n if (n == rtnetlink::RTA_TABLE ||
n == rtnetlink::RTA_DST ||
n == rtnetlink::RTA_SRC ||
n == rtnetlink::RTA_DST ||
n == rtnetlink::RTA_SRC ||
n == rtnetlink::RTA_OIF ||
n == rtnetlink::RTA_FLOW ||
n == rtnetlink::RTA_PREFSRC ||
n == rtnetlink::RTA_GATEWAY ||
n == rtnetlink::RTA_PRIORITY) => {
if let Err(errno) = attr.validate(mnl::AttrDataType::U32) {
println_stderr!("mnl_attr_validate - {}: {}", atype, errno);
return mnl::CbRet::ERROR;
}
},
n if n == rtnetlink::RTA_METRICS => {
if let Err(errno) = attr.validate(mnl::AttrDataType::NESTED) {
println_stderr!("mnl_attr_validate - {}: {}", atype, errno);
return mnl::CbRet::ERROR;
}
},
_ => {},
}
tb[atype as usize] = Some(attr);
mnl::CbRet::OK
}
fn data_ipv6_attr_cb<'a>(attr: &'a mnl::Attr, tb: &mut [Option<&'a mnl::Attr>]) -> mnl::CbRet {
// skip unsupported attribute in user-space
if let Err(_) = attr.type_valid(rtnetlink::RTA_MAX) {
return mnl::CbRet::OK;
}
let atype = attr.atype();
match atype {
n if (n == rtnetlink::RTA_TABLE ||
n == rtnetlink::RTA_OIF ||
n == rtnetlink::RTA_FLOW ||
n == rtnetlink::RTA_PRIORITY) => {
if let Err(errno) = attr.validate(mnl::AttrDataType::U32) {
println_stderr!("mnl_attr_validate - {}: {}", atype, errno);
return mnl::CbRet::ERROR;
}
},
n if (n == rtnetlink::RTA_DST ||
n == rtnetlink::RTA_SRC ||
n == rtnetlink::RTA_PREFSRC ||
n == rtnetlink::RTA_GATEWAY) => {
if let Err(errno) = attr.validate2(mnl::AttrDataType::BINARY, size_of::<libc::in6_addr>()) {
println_stderr!("mnl_attr_validate - {}: {}", atype, errno);
return mnl::CbRet::ERROR;
}
},
n if n == rtnetlink::RTA_METRICS => {
if let Err(errno) = attr.validate(mnl::AttrDataType::NESTED) {
println_stderr!("mnl_attr_validate - {}: {}", atype, errno);
return mnl::CbRet::ERROR;
}
},
_ => {},
}
tb[atype as usize] = Some(attr);
mnl::CbRet::OK
}
fn data_cb(nlh: mnl::Nlmsg, _: &mut Option<u8>) -> mnl::CbRet {
let rm = nlh.payload::<rtnetlink::Rtmsg>();
match *nlh.nlmsg_type {
n if n == rtnetlink::RTM_NEWROUTE => print!("[NEW] "),
n if n == rtnetlink::RTM_DELROUTE => print!("[DEL] "),
_ => {},
}
// protocol family = AF_INET | AF_INET6 //
print!("family={} ", rm.rtm_family);
// destination CIDR, eg. 24 or 32 for IPv4
print!("dst_len={} ", rm.rtm_dst_len);
// source CIDR
print!("src_len={} ", rm.rtm_src_len);
// type of service (TOS), eg. 0
print!("tos={} ", rm.rtm_tos);
// table id:
// RT_TABLE_UNSPEC = 0
//
// ... user defined values ...
//
// RT_TABLE_COMPAT = 252
// RT_TABLE_DEFAULT = 253
// RT_TABLE_MAIN = 254
// RT_TABLE_LOCAL = 255
// RT_TABLE_MAX = 0xFFFFFFFF
//
// Synonimous attribute: RTA_TABLE.
print!("table={} ", rm.rtm_table);
// type:
// RTN_UNSPEC = 0
// RTN_UNICAST = 1
// RTN_LOCAL = 2
// RTN_BROADCAST = 3
// RTN_ANYCAST = 4
// RTN_MULTICAST = 5
// RTN_BLACKHOLE = 6
// RTN_UNREACHABLE = 7
// RTN_PROHIBIT = 8
// RTN_THROW = 9
// RTN_NAT = 10
// RTN_XRESOLVE = 11
// __RTN_MAX = 12
print!("type={} ", rm.rtm_type);
// scope:
// RT_SCOPE_UNIVERSE = 0 : everywhere in the universe
//
// ... user defined values ...
//
// RT_SCOPE_SITE = 200
// RT_SCOPE_LINK = 253 : destination attached to link
// RT_SCOPE_HOST = 254 : local address
// RT_SCOPE_NOWHERE = 255 : not existing destination
print!("scope={} ", rm.rtm_scope);
// protocol:
// RTPROT_UNSPEC = 0
// RTPROT_REDIRECT = 1
// RTPROT_KERNEL = 2 : route installed by kernel
// RTPROT_BOOT = 3 : route installed during boot
// RTPROT_STATIC = 4 : route installed by administrator
//
// Values >= RTPROT_STATIC are not interpreted by kernel, they are
// just user-defined.
print!("proto={} ", rm.rtm_protocol);
// flags:
// RTM_F_NOTIFY = 0x100: notify user of route change
// RTM_F_CLONED = 0x200: this route is cloned
// RTM_F_EQUALIZE = 0x400: Multipath equalizer: NI
// RTM_F_PREFIX = 0x800: Prefix addresses
print!("flags={:x} ", rm.rtm_flags);
let mut tb: [Option<&mnl::Attr>; rtnetlink::RTA_MAX as usize + 1]
= [None; rtnetlink::RTA_MAX as usize + 1];
match rm.rtm_family as c_int {
libc::AF_INET => {
let _ = nlh.parse(size_of::<rtnetlink::Rtmsg>(), data_ipv4_attr_cb, &mut tb);
attributes_show_ip::<libc::in_addr>(&tb);
},
libc::AF_INET6 => {
let _ = nlh.parse(size_of::<rtnetlink::Rtmsg>(), data_ipv6_attr_cb, &mut tb);
attributes_show_ip::<libc::in6_addr>(&tb);
},
_ => unreachable!()
}
println!("");
mnl::CbRet::OK
}
fn main() {
let nl = mnl::Socket::open(netlink::Family::ROUTE)
.unwrap_or_else(|errno| panic!("mnl_socket_open: {}", errno));
nl.bind(rtnetlink::RTMGRP_IPV4_ROUTE | rtnetlink::RTMGRP_IPV6_ROUTE,
mnl::SOCKET_AUTOPID)
.unwrap_or_else(|errno| panic!("mnl_socket_bind: {}", errno));
let mut buf = vec![0u8; mnl::SOCKET_BUFFER_SIZE()];
loop {
let nrecv = nl.recvfrom(&mut buf)
.unwrap_or_else(|errno| panic!("mnl_socket_sendto: {}", errno));
if mnl::cb_run(&buf[0..nrecv], 0, 0, Some(data_cb), &mut None)
.unwrap_or_else(|errno| panic!("mnl_cb_run: {}", errno))
== mnl::CbRet::STOP {
break;
}
}
let _ = nl.close();
}<|fim▁end|>
|
let _ = attr.parse_nested(data_attr_cb2, &mut tbx);
|
<|file_name|>FileProcessor.java<|end_file_name|><|fim▁begin|>package com.gaojice.diskviewer.processor;
import java.io.File;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import com.gaojice.diskviewer.dao.DiskFileDao;
import com.gaojice.diskviewer.entity.DiskFile;
public class FileProcessor implements Runnable {
private DiskFileDao diskFileDao;
private File root;<|fim▁hole|> super();
this.diskFileDao = diskFileDao;
this.root = root;
this.p = p;
this.taskExecutor = taskExecutor;
}
public void setTaskExecutor(ThreadPoolTaskExecutor taskExecutor) {
this.taskExecutor = taskExecutor;
}
public void setDiskFileDao(DiskFileDao diskFileDao) {
this.diskFileDao = diskFileDao;
}
public void setRoot(File root) {
this.root = root;
}
public void setP(DiskFile p) {
this.p = p;
}
public void run() {
DiskFile diskFile = new DiskFile();
diskFile.setParent(p);
diskFile.setName(root.getName());
if (root.isDirectory()) {
diskFile.setName(root.getAbsolutePath());
diskFile.setType("D");
diskFile.setSize(0L);
diskFileDao.insert(diskFile);
File[] children = root.listFiles();
if (children != null) {
for (File child : children) {
FileProcessor fileProcessor = new FileProcessor(diskFileDao, child, diskFile, taskExecutor);
taskExecutor.execute(fileProcessor);
}
}
} else {
diskFile.setType("F");
diskFile.setSize(root.length());
diskFileDao.insert(diskFile);
}
}
}<|fim▁end|>
|
private DiskFile p;
private org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor taskExecutor;
public FileProcessor(DiskFileDao diskFileDao, File root, DiskFile p, ThreadPoolTaskExecutor taskExecutor) {
|
<|file_name|>priorityrep.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2015 Rackspace
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package objectserver
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"math/rand"
"net/http"
"os"
"regexp"
"strconv"
"strings"
"sync"
"time"
"github.com/troubling/hummingbird/common"
"github.com/troubling/hummingbird/common/conf"
"github.com/troubling/hummingbird/common/ring"
"github.com/troubling/hummingbird/common/srv"
"golang.org/x/net/http2"
)
type devLimiter struct {
inUse map[int]int
m sync.Mutex
max int
somethingFinished chan struct{}
}
func (d *devLimiter) start(j *PriorityRepJob) bool {
d.m.Lock()
doable := d.inUse[j.FromDevice.Id] < d.max
doable = doable && d.inUse[j.ToDevice.Id] < d.max
if doable {
d.inUse[j.FromDevice.Id] += 1
d.inUse[j.ToDevice.Id] += 1
}
d.m.Unlock()
return doable
}
func (d *devLimiter) finished(j *PriorityRepJob) {
d.m.Lock()
d.inUse[j.FromDevice.Id] -= 1
d.inUse[j.ToDevice.Id] -= 1
d.m.Unlock()
select {
case d.somethingFinished <- struct{}{}:
default:
}
}
func (d *devLimiter) waitForSomethingToFinish() {
<-d.somethingFinished
}
func SendPriRepJob(job *PriorityRepJob, client common.HTTPClient, userAgent string) (string, bool) {
url := fmt.Sprintf("%s://%s:%d/priorityrep", job.FromDevice.Scheme, job.FromDevice.ReplicationIp, job.FromDevice.ReplicationPort)
jsonned, err := json.Marshal(job)
if err != nil {
return fmt.Sprintf("Failed to serialize job for some reason: %s", err), false
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonned))
if err != nil {
return fmt.Sprintf("Failed to create request for some reason: %s", err), false
}
req.Header.Set("User-Agent", userAgent)
req.ContentLength = int64(len(jsonned))
req.Header.Set("Content-Type", "application/json")
resp, err := client.Do(req)
if err != nil {
return fmt.Sprintf("Error moving partition %d: %v",
job.Partition, err), false
}
defer resp.Body.Close()
if resp.StatusCode/100 == 2 {
if data, err := ioutil.ReadAll(resp.Body); err == nil {
prp := PriorityReplicationResult{}
if err = json.Unmarshal(data, &prp); err == nil {
return fmt.Sprintf("Replicating partition %d from %s/%s to %s/%s replicated %d objects with %d errors",
job.Partition, job.FromDevice.Ip, job.FromDevice.Device, job.ToDevice.Ip, job.ToDevice.Device,
prp.ObjectsReplicated, prp.ObjectsErrored), prp.Success
} else {
return fmt.Sprintf("could not get valid response for partition %d: %v",
job.Partition, err), false
}
} else {
return fmt.Sprintf("could not read body forpartition %d: %v",
job.Partition, err), false
}
} else if resp.StatusCode == 404 {
return fmt.Sprintf("partition %d: not found on %s/%s",
job.Partition, job.FromDevice.Ip, job.FromDevice.Device), true
}
return fmt.Sprintf("Bad status code moving partition %d: %d",
job.Partition, resp.StatusCode), false
}
// doPriRepJobs executes a list of PriorityRepJobs, limiting concurrent jobs per device to deviceMax.
func doPriRepJobs(jobs []*PriorityRepJob, deviceMax int, client common.HTTPClient, userAgent string) []uint64 {
limiter := &devLimiter{inUse: make(map[int]int), max: deviceMax, somethingFinished: make(chan struct{}, 1)}
wg := sync.WaitGroup{}
badParts := []uint64{}
var badPartsLock sync.Mutex
for len(jobs) > 0 {
foundDoable := false
for i := range jobs {
if !limiter.start(jobs[i]) {
continue
}
foundDoable = true
wg.Add(1)
go func(job *PriorityRepJob) {
defer wg.Done()
defer limiter.finished(job)
res, ok := SendPriRepJob(job, client, userAgent)
fmt.Println(res)
if !ok {
badPartsLock.Lock()
badParts = append(badParts, job.Partition)
badPartsLock.Unlock()
}
}(jobs[i])
jobs = append(jobs[:i], jobs[i+1:]...)
break
}
if !foundDoable {
limiter.waitForSomethingToFinish()
}
}
wg.Wait()
return badParts
}
// getPartMoveJobs takes two rings and creates a list of jobs for any partition moves between them.
func getPartMoveJobs(oldRing, newRing ring.Ring, overrideParts []uint64, policy int) []*PriorityRepJob {
allNewDevices := map[string]bool{}
for _, dev := range newRing.AllDevices() {
if dev == nil {
continue
}
allNewDevices[fmt.Sprintf("%s:%d/%s", dev.Ip, dev.Port, dev.Device)] = true
}
jobs := make([]*PriorityRepJob, 0)
for i := uint64(0); true; i++ {
partition := i
if len(overrideParts) > 0 {
if int(partition) < len(overrideParts) {
partition = overrideParts[partition]
} else {
break
}
}
olddevs := oldRing.GetNodes(partition)
newdevs := newRing.GetNodes(partition)
if olddevs == nil || newdevs == nil {
break
}
for i := range olddevs {
if olddevs[i].Id != newdevs[i].Id {
// TODO: handle if a node just changes positions, which doesn't happen, but isn't against the contract.
fromDev := olddevs[i]
if _, ok := allNewDevices[fmt.Sprintf("%s:%d/%s", fromDev.Ip, fromDev.Port, fromDev.Device)]; !ok {
fromDev = olddevs[(i+1)%len(olddevs)]
}
jobs = append(jobs, &PriorityRepJob{
Partition: partition,
FromDevice: fromDev,
ToDevice: newdevs[i],
Policy: policy,
})
}
}
}
return jobs
}
func objectRingPolicyIndex(s string) (int, error) {
if !strings.Contains(s, "object") {
return 0, fmt.Errorf("object not in string: %v", s)
}
re := regexp.MustCompile(`object-(\d*)`)
match := re.FindStringSubmatch(s)
if match == nil {
return 0, nil
} else {
policyIdx, err := strconv.Atoi(match[1])
if err != nil {
return 0, fmt.Errorf("invalid policy index: %v\n", match[1])
}
return policyIdx, nil
}
}
<|fim▁hole|> flags := flag.NewFlagSet("moveparts", flag.ExitOnError)
policyName := flags.String("P", "", "policy to use")
certFile := flags.String("certfile", "", "Cert file to use for setting up https client")
keyFile := flags.String("keyfile", "", "Key file to use for setting up https client")
flags.Usage = func() {
fmt.Fprintf(os.Stderr, "USAGE: hummingbird moveparts [old ringfile]\n")
flags.PrintDefaults()
}
flags.Parse(args)
if len(flags.Args()) != 1 {
flags.Usage()
return 1
}
policyIndex := 0
if *policyName != "" {
policies, err := conf.GetPolicies()
if err != nil {
fmt.Fprintln(os.Stderr, "Unable to load policies:", err)
return 1
}
p := policies.NameLookup(*policyName)
if p == nil {
fmt.Fprintf(os.Stderr, "Unknown policy named %q\n", *policyName)
return 1
}
policyIndex = p.Index
}
oldPolicy, policyErr := objectRingPolicyIndex(flags.Arg(0))
if policyErr != nil {
fmt.Println("Invalid ring:", policyErr)
return 1
}
if oldPolicy != policyIndex {
fmt.Printf("Old policy: %v doesn't match specified policy: %v\n", oldPolicy, policyIndex)
return 1
}
hashPathPrefix, hashPathSuffix, err := cnf.GetHashPrefixAndSuffix()
if err != nil {
fmt.Println("Unable to load hash path prefix and suffix:", err)
return 1
}
oldRing, err := ring.LoadRing(flags.Arg(0), hashPathPrefix, hashPathSuffix)
if err != nil {
fmt.Println("Unable to load old ring:", err)
return 1
}
curRing, err := ring.GetRing("object", hashPathPrefix, hashPathSuffix, policyIndex)
if err != nil {
fmt.Println("Unable to load current ring:", err)
return 1
}
transport := &http.Transport{
MaxIdleConnsPerHost: 100,
MaxIdleConns: 0,
}
if *certFile != "" && *keyFile != "" {
tlsConf, err := common.NewClientTLSConfig(*certFile, *keyFile)
if err != nil {
fmt.Println("Error getting TLS config:", err)
return 1
}
transport.TLSClientConfig = tlsConf
if err = http2.ConfigureTransport(transport); err != nil {
fmt.Println("Error setting up http2:", err)
return 1
}
}
// TODO: Do we want to trace requests with this client?
client := &http.Client{Timeout: time.Hour,
Transport: transport,
}
badParts := []uint64{}
for {
jobs := getPartMoveJobs(oldRing, curRing, badParts, policyIndex)
lastRun := len(jobs)
for i := len(jobs) - 1; i > 0; i-- { // shuffle jobs list
j := rand.Intn(i + 1)
jobs[j], jobs[i] = jobs[i], jobs[j]
}
fmt.Println("Job count:", len(jobs))
badParts = doPriRepJobs(jobs, 2, client, "doMoveParts")
if len(badParts) == 0 {
break
} else {
fmt.Printf("Finished run of partitions. retrying %d.\n", len(badParts))
fmt.Println("NOTE: This will loop on any partitions not found on any primary")
if lastRun == len(badParts) {
time.Sleep(time.Minute * 5)
} else {
time.Sleep(time.Second * 5)
}
}
}
fmt.Println("Done sending jobs.")
return 0
}
// MoveParts takes two object .ring.gz files as []string{oldRing, newRing} and dispatches priority replication jobs to rebalance data in line with any ring changes.
func MoveParts(args []string, cnf srv.ConfigLoader) {
ret := doMoveParts(args, cnf)
os.Exit(ret)
}
// getRestoreDeviceJobs takes an ip address and device name, and creates a list of jobs to restore that device's data from peers.
func getRestoreDeviceJobs(theRing ring.Ring, ip string, devName string, srcRegionOnly int, allPeers bool, overrideParts []uint64, policy int) []*PriorityRepJob {
jobs := make([]*PriorityRepJob, 0)
for i := uint64(0); true; i++ {
partition := i
if len(overrideParts) > 0 {
if int(partition) < len(overrideParts) {
partition = overrideParts[partition]
} else {
break
}
}
devs := theRing.GetNodes(partition)
if devs == nil {
break
}
var toDev *ring.Device
for _, dev := range devs {
if dev.Device == devName && (dev.Ip == ip || dev.ReplicationIp == ip) {
toDev = dev
break
}
}
if toDev != nil {
foundJob := false
for len(devs) > 0 {
rd := rand.Intn(len(devs))
src := devs[rd]
devs = append(devs[:rd], devs[rd+1:]...)
if src.Device == toDev.Device && (src.Ip == toDev.Ip || src.ReplicationIp == toDev.ReplicationIp) {
continue
}
if srcRegionOnly >= 0 && src.Region != srcRegionOnly {
continue
}
jobs = append(jobs, &PriorityRepJob{
Partition: partition,
FromDevice: src,
ToDevice: toDev,
Policy: policy,
})
foundJob = true
if !allPeers {
break
}
}
if !foundJob {
fmt.Printf("Could not find job for partition: %d\n", partition)
}
}
}
return jobs
}
// RestoreDevice takes an IP address and device name such as []string{"172.24.0.1", "sda1"} and attempts to restores its data from peers.
func RestoreDevice(args []string, cnf srv.ConfigLoader) {
flags := flag.NewFlagSet("restoredevice", flag.ExitOnError)
policyName := flags.String("P", "", "policy to use")
region := flags.Int("region", -1, "restore device only from peers in specified region")
ringLoc := flags.String("r", "", "Specify which ring file to use")
conc := flags.Int("c", 2, "limit of per device concurrency priority repl calls")
full := flags.Bool("f", false, "send priority replicate calls to every qualifying peer primary (slow)")
certFile := flags.String("certfile", "", "Cert file to use for setting up https client")
keyFile := flags.String("keyfile", "", "Key file to use for setting up https client")
flags.Usage = func() {
fmt.Fprintf(os.Stderr, "USAGE: hummingbird restoredevice [ip] [device]\n")
flags.PrintDefaults()
}
flags.Parse(args)
if len(flags.Args()) != 2 {
flags.Usage()
return
}
policyIndex := 0
if *policyName != "" {
policies, err := conf.GetPolicies()
if err != nil {
fmt.Fprintln(os.Stderr, "Unable to load policies:", err)
return
}
p := policies.NameLookup(*policyName)
if p == nil {
fmt.Fprintf(os.Stderr, "Unknown policy named %q\n", *policyName)
return
}
policyIndex = p.Index
}
hashPathPrefix, hashPathSuffix, err := cnf.GetHashPrefixAndSuffix()
if err != nil {
fmt.Println("Unable to load hash path prefix and suffix:", err)
return
}
var objRing ring.Ring
if *ringLoc == "" {
objRing, err = ring.GetRing("object", hashPathPrefix, hashPathSuffix, policyIndex)
if err != nil {
fmt.Println("Unable to load ring:", err)
return
}
} else {
objRing, err = ring.LoadRing(*ringLoc, hashPathPrefix, hashPathSuffix)
if err != nil {
fmt.Println("Unable to load ring:", err)
return
}
}
transport := &http.Transport{
MaxIdleConnsPerHost: 100,
MaxIdleConns: 0,
}
if *certFile != "" && *keyFile != "" {
tlsConf, err := common.NewClientTLSConfig(*certFile, *keyFile)
if err != nil {
fmt.Println("Error getting TLS config:", err)
return
}
transport.TLSClientConfig = tlsConf
if err = http2.ConfigureTransport(transport); err != nil {
fmt.Println("Error setting up http2:", err)
return
}
}
// TODO: Do we want to trace requests with this client?
client := &http.Client{
Timeout: time.Hour * 4,
Transport: transport,
}
badParts := []uint64{}
for {
jobs := getRestoreDeviceJobs(objRing, flags.Arg(0), flags.Arg(1), *region, *full, badParts, policyIndex)
lastRun := len(jobs)
fmt.Println("Job count:", len(jobs))
for i := len(jobs) - 1; i > 0; i-- { // shuffle jobs list
j := rand.Intn(i + 1)
jobs[j], jobs[i] = jobs[i], jobs[j]
}
badParts = doPriRepJobs(jobs, *conc, client, "RestoreDevice")
if len(badParts) == 0 {
break
} else {
fmt.Printf("Finished run of partitions. retrying %d.\n", len(badParts))
fmt.Println("NOTE: This will loop on any partitions not found on any primary")
if lastRun == len(badParts) {
time.Sleep(time.Minute * 5)
} else {
time.Sleep(time.Second * 5)
}
}
}
fmt.Println("Done sending jobs.")
}<|fim▁end|>
|
func doMoveParts(args []string, cnf srv.ConfigLoader) int {
|
<|file_name|>root.rs<|end_file_name|><|fim▁begin|>use alloc::arc::Arc;
use alloc::boxed::Box;
use collections::BTreeMap;
use core::sync::atomic::{AtomicUsize, Ordering};
use spin::RwLock;
use context;
use syscall::error::*;
use syscall::scheme::Scheme;<|fim▁hole|> scheme_ns: SchemeNamespace,
scheme_id: SchemeId,
next_id: AtomicUsize,
handles: RwLock<BTreeMap<usize, Arc<UserInner>>>
}
impl RootScheme {
pub fn new(scheme_ns: SchemeNamespace, scheme_id: SchemeId) -> RootScheme {
RootScheme {
scheme_ns: scheme_ns,
scheme_id: scheme_id,
next_id: AtomicUsize::new(0),
handles: RwLock::new(BTreeMap::new())
}
}
}
impl Scheme for RootScheme {
fn open(&self, path: &[u8], flags: usize, uid: u32, _gid: u32) -> Result<usize> {
if uid == 0 {
let context = {
let contexts = context::contexts();
let context = contexts.current().ok_or(Error::new(ESRCH))?;
Arc::downgrade(&context)
};
let id = self.next_id.fetch_add(1, Ordering::SeqCst);
let inner = {
let mut schemes = scheme::schemes_mut();
let inner = Arc::new(UserInner::new(self.scheme_id, id, flags, context));
schemes.insert(self.scheme_ns, path.to_vec().into_boxed_slice(), |scheme_id| {
inner.scheme_id.store(scheme_id, Ordering::SeqCst);
Arc::new(Box::new(UserScheme::new(Arc::downgrade(&inner))))
})?;
inner
};
self.handles.write().insert(id, inner);
Ok(id)
} else {
Err(Error::new(EACCES))
}
}
fn dup(&self, file: usize, _buf: &[u8]) -> Result<usize> {
let mut handles = self.handles.write();
let inner = {
let inner = handles.get(&file).ok_or(Error::new(EBADF))?;
inner.clone()
};
let id = self.next_id.fetch_add(1, Ordering::SeqCst);
handles.insert(id, inner);
Ok(id)
}
fn read(&self, file: usize, buf: &mut [u8]) -> Result<usize> {
let inner = {
let handles = self.handles.read();
let inner = handles.get(&file).ok_or(Error::new(EBADF))?;
inner.clone()
};
inner.read(buf)
}
fn write(&self, file: usize, buf: &[u8]) -> Result<usize> {
let inner = {
let handles = self.handles.read();
let inner = handles.get(&file).ok_or(Error::new(EBADF))?;
inner.clone()
};
inner.write(buf)
}
fn fevent(&self, file: usize, flags: usize) -> Result<usize> {
let inner = {
let handles = self.handles.read();
let inner = handles.get(&file).ok_or(Error::new(EBADF))?;
inner.clone()
};
inner.fevent(flags)
}
fn fsync(&self, file: usize) -> Result<usize> {
let inner = {
let handles = self.handles.read();
let inner = handles.get(&file).ok_or(Error::new(EBADF))?;
inner.clone()
};
inner.fsync()
}
fn close(&self, file: usize) -> Result<usize> {
self.handles.write().remove(&file).ok_or(Error::new(EBADF)).and(Ok(0))
}
}<|fim▁end|>
|
use scheme::{self, SchemeNamespace, SchemeId};
use scheme::user::{UserInner, UserScheme};
pub struct RootScheme {
|
<|file_name|>websiteServing.js<|end_file_name|><|fim▁begin|>/**
* findRoutingRule - find applicable routing rule from bucket metadata
* @param {RoutingRule []} routingRules - array of routingRule objects
* @param {string} key - object key
* @param {number} [errCode] - error code to match if applicable
* @return {object | undefined} redirectInfo -- comprised of all of the
* keys/values from routingRule.getRedirect() plus
* a key of prefixFromRule and a value of routingRule.condition.keyPrefixEquals
*/
export function findRoutingRule(routingRules, key, errCode) {
if (!routingRules || routingRules.length === 0) {
return undefined;
}
// For AWS compat:
// 1) use first routing rules whose conditions are satisfied
// 2) for matching prefix no need to check closest match. first
// match wins
// 3) there can be a match for a key condition with and without
// error code condition but first one that matches will be the rule
// used. So, if prefix foo without error and first rule has error condition,
// will fall through to next foo rule. But if first foo rule has
// no error condition, will have match on first rule even if later
// there is more specific rule with error condition.
for (let i = 0; i < routingRules.length; i++) {
const prefixFromRule =
routingRules[i].getCondition().keyPrefixEquals;
const errorCodeFromRule =
routingRules[i].getCondition().httpErrorCodeReturnedEquals;
if (prefixFromRule !== undefined) {
if (!key.startsWith(prefixFromRule)) {<|fim▁hole|> }
// add the prefixFromRule to the redirect info
// so we can replaceKeyPrefixWith if that is part of redirect
// rule
const redirectInfo = Object.assign({ prefixFromRule },
routingRules[i].getRedirect());
// have key match so check error code match
if (errorCodeFromRule !== undefined) {
if (errCode === errorCodeFromRule) {
return redirectInfo;
}
// if don't match on both conditions, this is not the rule
// for us
continue;
}
// if no error code condition at all, we have found our match
return redirectInfo;
}
// we have an error code condition but no key condition
if (errorCodeFromRule !== undefined) {
if (errCode === errorCodeFromRule) {
const redirectInfo = Object.assign({},
routingRules[i].getRedirect());
return redirectInfo;
}
continue;
}
return undefined;
}
return undefined;
}
/**
* extractRedirectInfo - convert location saved from x-amz-website header to
* same format as redirectInfo saved from a put bucket website configuration
* @param {string} location - location to redirect to
* @return {object} redirectInfo - select key/values stored in
* WebsiteConfiguration for a redirect -- protocol, replaceKeyWith and hostName
*/
export function extractRedirectInfo(location) {
const redirectInfo = { redirectLocationHeader: true };
if (location.startsWith('/')) {
// redirect to another object in bucket
redirectInfo.replaceKeyWith = location.slice(1);
// when redirect info is set by x-amz-website-redirect-location header
// to another key in the same bucket
// AWS only returns the path in the location response header
redirectInfo.justPath = true;
} else if (location.startsWith('https')) {
// otherwise, redirect to another website
redirectInfo.protocol = 'https';
redirectInfo.hostName = location.slice(8);
} else {
redirectInfo.protocol = 'http';
redirectInfo.hostName = location.slice(7);
}
return redirectInfo;
}<|fim▁end|>
|
// no key match, move on
continue;
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
import subprocess
import datetime
import os
def get_version(version=None):
"""Derives a PEP386-compliant version number from VERSION."""
if version is None:
version = VERSION
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] == 'alpha' and version[4] == 0:
git_changeset = get_git_changeset()
if git_changeset:
sub = '.a%s' % git_changeset
elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return main + sub
def get_git_changeset():
"""Returns a numeric identifier of the latest git changeset.
The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
This value isn't guaranteed to be unique, but collisions are very unlikely,
so it's sufficient for generating the development version numbers.
"""
repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
git_log = subprocess.Popen('git log --pretty=format:%ct --quiet -1 HEAD',
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True, cwd=repo_dir, universal_newlines=True)
timestamp = git_log.communicate()[0]
try:
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
except ValueError:
return None
return timestamp.strftime('%Y%m%d%H%M%S')<|fim▁end|>
|
NAME = 'django-adminactions'
VERSION = __version__ = (0, 4, 0, 'final', 0)
__author__ = 'sax'
|
<|file_name|>WTSpec4MEditorAdvisor.java<|end_file_name|><|fim▁begin|>/**
*/
package WTSpec4M.presentation;
import org.eclipse.emf.common.ui.URIEditorInput;
import org.eclipse.emf.common.ui.action.WorkbenchWindowActionDelegate;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.edit.ui.action.LoadResourceAction;
import org.eclipse.emf.edit.ui.util.EditUIUtil;
import org.eclipse.equinox.app.IApplication;
import org.eclipse.equinox.app.IApplicationContext;
import org.eclipse.jface.action.GroupMarker;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.action.MenuManager;
import org.eclipse.jface.action.Separator;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.IEditorDescriptor;
import org.eclipse.ui.IFolderLayout;
import org.eclipse.ui.IPageLayout;
import org.eclipse.ui.IPerspectiveFactory;
import org.eclipse.ui.IViewPart;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchActionConstants;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.actions.ActionFactory;
import org.eclipse.ui.application.ActionBarAdvisor;
import org.eclipse.ui.application.IActionBarConfigurer;
import org.eclipse.ui.application.IWorkbenchConfigurer;
import org.eclipse.ui.application.IWorkbenchWindowConfigurer;
import org.eclipse.ui.application.WorkbenchAdvisor;
import org.eclipse.ui.application.WorkbenchWindowAdvisor;
import org.mondo.collaboration.online.rap.widgets.CurrentUserView;
import org.mondo.collaboration.online.rap.widgets.DefaultPerspectiveAdvisor;
import org.mondo.collaboration.online.rap.widgets.ModelExplorer;
import org.mondo.collaboration.online.rap.widgets.ModelLogView;
import org.mondo.collaboration.online.rap.widgets.WhiteboardChatView;
/**
* Customized {@link WorkbenchAdvisor} for the RCP application.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public final class WTSpec4MEditorAdvisor extends WorkbenchAdvisor {
/**
* This looks up a string in the plugin's plugin.properties file.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static String getString(String key) {
return WTSpec4MEditorPlugin.INSTANCE.getString(key);
}
/**
* This looks up a string in plugin.properties, making a substitution.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static String getString(String key, Object s1) {
return WTSpec4M.presentation.WTSpec4MEditorPlugin.INSTANCE.getString(key, new Object [] { s1 });
}
/**
* RCP's application
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static class Application implements IApplication {
/**
* @see org.eclipse.equinox.app.IApplication#start(org.eclipse.equinox.app.IApplicationContext)
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Object start(IApplicationContext context) throws Exception {
WorkbenchAdvisor workbenchAdvisor = new WTSpec4MEditorAdvisor();
Display display = PlatformUI.createDisplay();
try {
int returnCode = PlatformUI.createAndRunWorkbench(display, workbenchAdvisor);
if (returnCode == PlatformUI.RETURN_RESTART) {
return IApplication.EXIT_RESTART;
}
else {
return IApplication.EXIT_OK;
}
}
finally {
display.dispose();
}
}
/**
* @see org.eclipse.equinox.app.IApplication#stop()
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void stop() {
// Do nothing.
}
}
/**
* RCP's perspective
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static class Perspective implements IPerspectiveFactory {
/**
* Perspective ID
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static final String ID_PERSPECTIVE = "WTSpec4M.presentation.WTSpec4MEditorAdvisorPerspective";
/**
* @see org.eclipse.ui.IPerspectiveFactory#createInitialLayout(org.eclipse.ui.IPageLayout)
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void createInitialLayout(IPageLayout layout) {
layout.setEditorAreaVisible(true);
layout.addPerspectiveShortcut(ID_PERSPECTIVE);
IFolderLayout left = layout.createFolder("left", IPageLayout.LEFT, (float)0.33, layout.getEditorArea());
left.addView(ModelExplorer.ID);
IFolderLayout bottomLeft = layout.createFolder("bottomLeft", IPageLayout.BOTTOM, (float)0.90, "left");
bottomLeft.addView(CurrentUserView.ID);
IFolderLayout topRight = layout.createFolder("topRight", IPageLayout.RIGHT, (float)0.55, layout.getEditorArea());
topRight.addView(WhiteboardChatView.ID);
IFolderLayout right = layout.createFolder("right", IPageLayout.BOTTOM, (float)0.33, "topRight");
right.addView(ModelLogView.ID);
IFolderLayout bottomRight = layout.createFolder("bottomRight", IPageLayout.BOTTOM, (float)0.60, "right");
bottomRight.addView(IPageLayout.ID_PROP_SHEET);
}
}
/**
* RCP's window advisor
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static class WindowAdvisor extends WorkbenchWindowAdvisor {
private Shell shell;
/**
* @see WorkbenchWindowAdvisor#WorkbenchWindowAdvisor(org.eclipse.ui.application.IWorkbenchWindowConfigurer)
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public WindowAdvisor(IWorkbenchWindowConfigurer configurer) {
super(configurer);
}
@Override
public void createWindowContents(Shell shell) {
super.createWindowContents(shell);
this.shell = shell;
}
@Override
public void postWindowOpen() {
super.postWindowOpen();
shell.setMaximized(true);
DefaultPerspectiveAdvisor.hideDefaultViews();
}
/**
* @see org.eclipse.ui.application.WorkbenchWindowAdvisor#preWindowOpen()
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void preWindowOpen() {
IWorkbenchWindowConfigurer configurer = getWindowConfigurer();
// configurer.setInitialSize(new Point(600, 450));
configurer.setShowCoolBar(false);
configurer.setShowStatusLine(true);
configurer.setTitle(getString("_UI_Application_title"));
}
/**
* @see org.eclipse.ui.application.WorkbenchWindowAdvisor#createActionBarAdvisor(org.eclipse.ui.application.IActionBarConfigurer)
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public ActionBarAdvisor createActionBarAdvisor(IActionBarConfigurer configurer) {
return new WindowActionBarAdvisor(configurer);
}
}
/**
* RCP's action bar advisor
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static class WindowActionBarAdvisor extends ActionBarAdvisor {
/**
* @see ActionBarAdvisor#ActionBarAdvisor(org.eclipse.ui.application.IActionBarConfigurer)
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public WindowActionBarAdvisor(IActionBarConfigurer configurer) {
super(configurer);
}
/**
* @see org.eclipse.ui.application.ActionBarAdvisor#fillMenuBar(org.eclipse.jface.action.IMenuManager)
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void fillMenuBar(IMenuManager menuBar) {
IWorkbenchWindow window = getActionBarConfigurer().getWindowConfigurer().getWindow();
menuBar.add(createFileMenu(window));
menuBar.add(createEditMenu(window));
menuBar.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS));
menuBar.add(createWindowMenu(window));
menuBar.add(createHelpMenu(window));
}
/**
* Creates the 'File' menu.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IMenuManager createFileMenu(IWorkbenchWindow window) {
IMenuManager menu = new MenuManager(getString("_UI_Menu_File_label"),
IWorkbenchActionConstants.M_FILE);
menu.add(new GroupMarker(IWorkbenchActionConstants.FILE_START));
IMenuManager newMenu = new MenuManager(getString("_UI_Menu_New_label"), "new");
newMenu.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS));
menu.add(newMenu);
menu.add(new Separator());
menu.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS));
menu.add(new Separator());
addToMenuAndRegister(menu, ActionFactory.CLOSE.create(window));
addToMenuAndRegister(menu, ActionFactory.CLOSE_ALL.create(window));
menu.add(new Separator());
addToMenuAndRegister(menu, ActionFactory.SAVE.create(window));
addToMenuAndRegister(menu, ActionFactory.SAVE_AS.create(window));
addToMenuAndRegister(menu, ActionFactory.SAVE_ALL.create(window));
menu.add(new Separator());
addToMenuAndRegister(menu, ActionFactory.QUIT.create(window));
menu.add(new GroupMarker(IWorkbenchActionConstants.FILE_END));
return menu;
}
/**
* Creates the 'Edit' menu.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IMenuManager createEditMenu(IWorkbenchWindow window) {
IMenuManager menu = new MenuManager(getString("_UI_Menu_Edit_label"),<|fim▁hole|> IWorkbenchActionConstants.M_EDIT);
menu.add(new GroupMarker(IWorkbenchActionConstants.EDIT_START));
addToMenuAndRegister(menu, ActionFactory.UNDO.create(window));
addToMenuAndRegister(menu, ActionFactory.REDO.create(window));
menu.add(new GroupMarker(IWorkbenchActionConstants.UNDO_EXT));
menu.add(new Separator());
addToMenuAndRegister(menu, ActionFactory.CUT.create(window));
addToMenuAndRegister(menu, ActionFactory.COPY.create(window));
addToMenuAndRegister(menu, ActionFactory.PASTE.create(window));
menu.add(new GroupMarker(IWorkbenchActionConstants.CUT_EXT));
menu.add(new Separator());
addToMenuAndRegister(menu, ActionFactory.DELETE.create(window));
addToMenuAndRegister(menu, ActionFactory.SELECT_ALL.create(window));
menu.add(new Separator());
menu.add(new GroupMarker(IWorkbenchActionConstants.ADD_EXT));
menu.add(new GroupMarker(IWorkbenchActionConstants.EDIT_END));
menu.add(new Separator(IWorkbenchActionConstants.MB_ADDITIONS));
return menu;
}
/**
* Creates the 'Window' menu.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IMenuManager createWindowMenu(IWorkbenchWindow window) {
IMenuManager menu = new MenuManager(getString("_UI_Menu_Window_label"),
IWorkbenchActionConstants.M_WINDOW);
addToMenuAndRegister(menu, ActionFactory.OPEN_NEW_WINDOW.create(window));
menu.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS));
return menu;
}
/**
* Creates the 'Help' menu.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IMenuManager createHelpMenu(IWorkbenchWindow window) {
IMenuManager menu = new MenuManager(getString("_UI_Menu_Help_label"), IWorkbenchActionConstants.M_HELP);
// Welcome or intro page would go here
// Help contents would go here
// Tips and tricks page would go here
menu.add(new GroupMarker(IWorkbenchActionConstants.HELP_START));
menu.add(new GroupMarker(IWorkbenchActionConstants.HELP_END));
menu.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS));
return menu;
}
/**
* Adds the specified action to the given menu and also registers the action with the
* action bar configurer, in order to activate its key binding.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addToMenuAndRegister(IMenuManager menuManager, IAction action) {
menuManager.add(action);
getActionBarConfigurer().registerGlobalAction(action);
}
}
/**
* About action for the RCP application.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static class AboutAction extends WorkbenchWindowActionDelegate {
/**
* @see org.eclipse.ui.IActionDelegate#run(org.eclipse.jface.action.IAction)
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void run(IAction action) {
MessageDialog.openInformation(getWindow().getShell(), getString("_UI_About_title"),
getString("_UI_About_text"));
}
}
/**
* Open URI action for the objects from the WTSpec4M model.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static class OpenURIAction extends WorkbenchWindowActionDelegate {
/**
* Opens the editors for the files selected using the LoadResourceDialog.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void run(IAction action) {
LoadResourceAction.LoadResourceDialog loadResourceDialog = new LoadResourceAction.LoadResourceDialog(getWindow().getShell());
if (Window.OK == loadResourceDialog.open()) {
for (URI uri : loadResourceDialog.getURIs()) {
openEditor(getWindow().getWorkbench(), uri);
}
}
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static boolean openEditor(IWorkbench workbench, URI uri) {
IWorkbenchWindow workbenchWindow = workbench.getActiveWorkbenchWindow();
IWorkbenchPage page = workbenchWindow.getActivePage();
IEditorDescriptor editorDescriptor = EditUIUtil.getDefaultEditor(uri, null);
if (editorDescriptor == null) {
MessageDialog.openError(
workbenchWindow.getShell(),
getString("_UI_Error_title"),
getString("_WARN_No_Editor", uri.lastSegment()));
return false;
}
else {
try {
page.openEditor(new URIEditorInput(uri), editorDescriptor.getId());
}
catch (PartInitException exception) {
MessageDialog.openError(
workbenchWindow.getShell(),
getString("_UI_OpenEditorError_label"),
exception.getMessage());
return false;
}
}
return true;
}
/**
* @see org.eclipse.ui.application.WorkbenchAdvisor#getInitialWindowPerspectiveId()
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getInitialWindowPerspectiveId() {
return Perspective.ID_PERSPECTIVE;
}
/**
* @see org.eclipse.ui.application.WorkbenchAdvisor#initialize(org.eclipse.ui.application.IWorkbenchConfigurer)
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void initialize(IWorkbenchConfigurer configurer) {
super.initialize(configurer);
configurer.setSaveAndRestore(true);
}
/**
* @see org.eclipse.ui.application.WorkbenchAdvisor#createWorkbenchWindowAdvisor(org.eclipse.ui.application.IWorkbenchWindowConfigurer)
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public WorkbenchWindowAdvisor createWorkbenchWindowAdvisor(IWorkbenchWindowConfigurer configurer) {
return new WindowAdvisor(configurer);
}
}<|fim▁end|>
| |
<|file_name|>cursor.js<|end_file_name|><|fim▁begin|>/**
* Baobab Cursors
* ===============
*
* Cursors created by selecting some data within a Baobab tree.
*/
import Emitter from 'emmett';
import {Monkey} from './monkey';
import type from './type';
import {
Archive,
arrayFrom,
before,
coercePath,
deepClone,
getIn,
makeError,
shallowClone,
solveUpdate
} from './helpers';
/**
* Traversal helper function for dynamic cursors. Will throw a legible error
* if traversal is not possible.
*
* @param {string} method - The method name, to create a correct error msg.
* @param {array} solvedPath - The cursor's solved path.
*/
function checkPossibilityOfDynamicTraversal(method, solvedPath) {
if (!solvedPath)
throw makeError(
`Baobab.Cursor.${method}: ` +
`cannot use ${method} on an unresolved dynamic path.`,
{path: solvedPath}
);
}
/**
* Cursor class
*
* @constructor
* @param {Baobab} tree - The cursor's root.
* @param {array} path - The cursor's path in the tree.
* @param {string} hash - The path's hash computed ahead by the tree.
*/
export default class Cursor extends Emitter {
constructor(tree, path, hash) {
super();
// If no path were to be provided, we fallback to an empty path (root)
path = path || [];
// Privates
this._identity = '[object Cursor]';
this._archive = null;
// Properties
this.tree = tree;
this.path = path;
this.hash = hash;
// State
this.state = {
killed: false,
recording: false,
undoing: false
};
// Checking whether the given path is dynamic or not
this._dynamicPath = type.dynamicPath(this.path);
// Checking whether the given path will meet a monkey
this._monkeyPath = type.monkeyPath(this.tree._monkeys, this.path);
if (!this._dynamicPath)
this.solvedPath = this.path;
else
this.solvedPath = getIn(this.tree._data, this.path).solvedPath;
/**
* Listener bound to the tree's writes so that cursors with dynamic paths
* may update their solved path correctly.
*
* @param {object} event - The event fired by the tree.
*/
this._writeHandler = ({data}) => {
if (this.state.killed ||
!solveUpdate([data.path], this._getComparedPaths()))
return;
this.solvedPath = getIn(this.tree._data, this.path).solvedPath;
};
/**
* Function in charge of actually trigger the cursor's updates and
* deal with the archived records.
*
* @note: probably should wrap the current solvedPath in closure to avoid
* for tricky cases where it would fail.
*
* @param {mixed} previousData - the tree's previous data.
*/
const fireUpdate = (previousData) => {
const self = this;
const eventData = {
get previousData() {
return getIn(previousData, self.solvedPath).data;
},
get currentData() {
return self.get();
}
};
if (this.state.recording && !this.state.undoing)
this.archive.add(eventData.previousData);
this.state.undoing = false;
return this.emit('update', eventData);
};
/**
* Listener bound to the tree's updates and determining whether the
* cursor is affected and should react accordingly.
*
* Note that this listener is lazily bound to the tree to be sure
* one wouldn't leak listeners when only creating cursors for convenience
* and not to listen to updates specifically.
*
* @param {object} event - The event fired by the tree.
*/
this._updateHandler = (event) => {
if (this.state.killed)
return;
const {paths, previousData} = event.data,
update = fireUpdate.bind(this, previousData),
comparedPaths = this._getComparedPaths();
if (solveUpdate(paths, comparedPaths))
return update();
};
// Lazy binding
let bound = false;
this._lazyBind = () => {
if (bound)
return;
bound = true;
if (this._dynamicPath)
this.tree.on('write', this._writeHandler);
return this.tree.on('update', this._updateHandler);
};
// If the path is dynamic, we actually need to listen to the tree
if (this._dynamicPath) {
this._lazyBind();
}
else {
// Overriding the emitter `on` and `once` methods
this.on = before(this._lazyBind, this.on.bind(this));
this.once = before(this._lazyBind, this.once.bind(this));
}
}
/**
* Internal helpers
* -----------------
*/
/**
* Method returning the paths of the tree watched over by the cursor and that
* should be taken into account when solving a potential update.
*
* @return {array} - Array of paths to compare with a given update.
*/
_getComparedPaths() {
// Checking whether we should keep track of some dependencies
const additionalPaths = this._monkeyPath ?
getIn(this.tree._monkeys, this._monkeyPath)
.data
.relatedPaths() :
[];
return [this.solvedPath].concat(additionalPaths);
}
/**
* Predicates
* -----------
*/
/**
* Method returning whether the cursor is at root level.
*
* @return {boolean} - Is the cursor the root?
*/
isRoot() {
return !this.path.length;
}
/**
* Method returning whether the cursor is at leaf level.
*
* @return {boolean} - Is the cursor a leaf?
*/
isLeaf() {
return type.primitive(this._get().data);
}
/**
* Method returning whether the cursor is at branch level.
*
* @return {boolean} - Is the cursor a branch?
*/
isBranch() {
return !this.isRoot() && !this.isLeaf();
}
/**
* Traversal Methods
* ------------------
*/
/**
* Method returning the root cursor.
*
* @return {Baobab} - The root cursor.
*/
root() {
return this.tree.select();
}
/**
* Method selecting a subpath as a new cursor.
*
* Arity (1):
* @param {path} path - The path to select.
*
* Arity (*):
* @param {...step} path - The path to select.
*
* @return {Cursor} - The created cursor.
*/
select(path) {
if (arguments.length > 1)
path = arrayFrom(arguments);
return this.tree.select(this.path.concat(path));
}
/**
* Method returning the parent node of the cursor or else `null` if the
* cursor is already at root level.
*
* @return {Baobab} - The parent cursor.
*/
up() {
if (!this.isRoot())
return this.tree.select(this.path.slice(0, -1));
return null;
}
/**
* Method returning the child node of the cursor.
*
* @return {Baobab} - The child cursor.
*/
down() {
checkPossibilityOfDynamicTraversal('down', this.solvedPath);
if (!(this._get().data instanceof Array))
throw Error('Baobab.Cursor.down: cannot go down on a non-list type.');
return this.tree.select(this.solvedPath.concat(0));
}
/**
* Method returning the left sibling node of the cursor if this one is
* pointing at a list. Returns `null` if this cursor is already leftmost.
*
* @return {Baobab} - The left sibling cursor.
*/
left() {
checkPossibilityOfDynamicTraversal('left', this.solvedPath);
const last = +this.solvedPath[this.solvedPath.length - 1];
if (isNaN(last))
throw Error('Baobab.Cursor.left: cannot go left on a non-list type.');
return last ?
this.tree.select(this.solvedPath.slice(0, -1).concat(last - 1)) :
null;
}
/**
* Method returning the right sibling node of the cursor if this one is
* pointing at a list. Returns `null` if this cursor is already rightmost.
*
* @return {Baobab} - The right sibling cursor.
*/
right() {
checkPossibilityOfDynamicTraversal('right', this.solvedPath);
const last = +this.solvedPath[this.solvedPath.length - 1];
if (isNaN(last))
throw Error('Baobab.Cursor.right: cannot go right on a non-list type.');
if (last + 1 === this.up()._get().data.length)
return null;
return this.tree.select(this.solvedPath.slice(0, -1).concat(last + 1));
}
/**
* Method returning the leftmost sibling node of the cursor if this one is
* pointing at a list.
*
* @return {Baobab} - The leftmost sibling cursor.
*/
leftmost() {
checkPossibilityOfDynamicTraversal('leftmost', this.solvedPath);
const last = +this.solvedPath[this.solvedPath.length - 1];
if (isNaN(last))
throw Error('Baobab.Cursor.leftmost: cannot go left on a non-list type.');
return this.tree.select(this.solvedPath.slice(0, -1).concat(0));
}
/**
* Method returning the rightmost sibling node of the cursor if this one is
* pointing at a list.
*
* @return {Baobab} - The rightmost sibling cursor.
*/
rightmost() {
checkPossibilityOfDynamicTraversal('rightmost', this.solvedPath);
const last = +this.solvedPath[this.solvedPath.length - 1];
if (isNaN(last))
throw Error(
'Baobab.Cursor.rightmost: cannot go right on a non-list type.');
const list = this.up()._get().data;
return this.tree
.select(this.solvedPath.slice(0, -1).concat(list.length - 1));
}
/**
* Method mapping the children nodes of the cursor.
*
* @param {function} fn - The function to map.
* @param {object} [scope] - An optional scope.
* @return {array} - The resultant array.
*/
map(fn, scope) {
checkPossibilityOfDynamicTraversal('map', this.solvedPath);
const array = this._get().data,
l = arguments.length;
if (!type.array(array))
throw Error('baobab.Cursor.map: cannot map a non-list type.');
return array.map(function(item, i) {
return fn.call(
l > 1 ? scope : this,
this.select(i),
i,
array
);
}, this);
}
/**
* Getter Methods
* ---------------
*/
/**
* Internal get method. Basically contains the main body of the `get` method
* without the event emitting. This is sometimes needed not to fire useless
* events.
*
* @param {path} [path=[]] - Path to get in the tree.
* @return {object} info - The resultant information.
* @return {mixed} info.data - Data at path.
* @return {array} info.solvedPath - The path solved when getting.
*/
_get(path = []) {
if (!type.path(path))
throw makeError('Baobab.Cursor.getters: invalid path.', {path});
if (!this.solvedPath)
return {data: undefined, solvedPath: null, exists: false};
return getIn(this.tree._data, this.solvedPath.concat(path));
}
/**
* Method used to check whether a certain path exists in the tree starting
* from the current cursor.
*
* Arity (1):
* @param {path} path - Path to check in the tree.
*
* Arity (2):
* @param {..step} path - Path to check in the tree.
*
* @return {boolean} - Does the given path exists?
*/
exists(path) {
path = coercePath(path);
if (arguments.length > 1)
path = arrayFrom(arguments);
return this._get(path).exists;
}
/**
* Method used to get data from the tree. Will fire a `get` event from the
* tree so that the user may sometimes react upon it to fetch data, for
* instance.
*
* Arity (1):
* @param {path} path - Path to get in the tree.
*
* Arity (2):
* @param {..step} path - Path to get in the tree.
*
* @return {mixed} - Data at path.
*/
get(path) {
path = coercePath(path);
if (arguments.length > 1)
path = arrayFrom(arguments);
const {data, solvedPath} = this._get(path);
// Emitting the event
this.tree.emit('get', {data, solvedPath, path: this.path.concat(path)});
return data;
}
/**
* Method used to shallow clone data from the tree.
*
* Arity (1):
* @param {path} path - Path to get in the tree.<|fim▁hole|> * Arity (2):
* @param {..step} path - Path to get in the tree.
*
* @return {mixed} - Cloned data at path.
*/
clone(...args) {
const data = this.get(...args);
return shallowClone(data);
}
/**
* Method used to deep clone data from the tree.
*
* Arity (1):
* @param {path} path - Path to get in the tree.
*
* Arity (2):
* @param {..step} path - Path to get in the tree.
*
* @return {mixed} - Cloned data at path.
*/
deepClone(...args) {
const data = this.get(...args);
return deepClone(data);
}
/**
* Method used to return raw data from the tree, by carefully avoiding
* computed one.
*
* @todo: should be more performant as the cloning should happen as well as
* when dropping computed data.
*
* Arity (1):
* @param {path} path - Path to serialize in the tree.
*
* Arity (2):
* @param {..step} path - Path to serialize in the tree.
*
* @return {mixed} - The retrieved raw data.
*/
serialize(path) {
path = coercePath(path);
if (arguments.length > 1)
path = arrayFrom(arguments);
if (!type.path(path))
throw makeError('Baobab.Cursor.getters: invalid path.', {path});
if (!this.solvedPath)
return undefined;
const fullPath = this.solvedPath.concat(path);
const data = deepClone(getIn(this.tree._data, fullPath).data),
monkeys = getIn(this.tree._monkeys, fullPath).data;
const dropComputedData = (d, m) => {
if (!type.object(m) || !type.object(d))
return;
for (const k in m) {
if (m[k] instanceof Monkey)
delete d[k];
else
dropComputedData(d[k], m[k]);
}
};
dropComputedData(data, monkeys);
return data;
}
/**
* Method used to project some of the data at cursor onto a map or a list.
*
* @param {object|array} projection - The projection's formal definition.
* @return {object|array} - The resultant map/list.
*/
project(projection) {
if (type.object(projection)) {
const data = {};
for (const k in projection)
data[k] = this.get(projection[k]);
return data;
}
else if (type.array(projection)) {
const data = [];
for (let i = 0, l = projection.length; i < l; i++)
data.push(this.get(projection[i]));
return data;
}
throw makeError('Baobab.Cursor.project: wrong projection.', {projection});
}
/**
* History Methods
* ----------------
*/
/**
* Methods starting to record the cursor's successive states.
*
* @param {integer} [maxRecords] - Maximum records to keep in memory. Note
* that if no number is provided, the cursor
* will keep everything.
* @return {Cursor} - The cursor instance for chaining purposes.
*/
startRecording(maxRecords) {
maxRecords = maxRecords || Infinity;
if (maxRecords < 1)
throw makeError('Baobab.Cursor.startRecording: invalid max records.', {
value: maxRecords
});
this.state.recording = true;
if (this.archive)
return this;
// Lazy binding
this._lazyBind();
this.archive = new Archive(maxRecords);
return this;
}
/**
* Methods stopping to record the cursor's successive states.
*
* @return {Cursor} - The cursor instance for chaining purposes.
*/
stopRecording() {
this.state.recording = false;
return this;
}
/**
* Methods undoing n steps of the cursor's recorded states.
*
* @param {integer} [steps=1] - The number of steps to rollback.
* @return {Cursor} - The cursor instance for chaining purposes.
*/
undo(steps = 1) {
if (!this.state.recording)
throw new Error('Baobab.Cursor.undo: cursor is not recording.');
const record = this.archive.back(steps);
if (!record)
throw Error('Baobab.Cursor.undo: cannot find a relevant record.');
this.state.undoing = true;
this.set(record);
return this;
}
/**
* Methods returning whether the cursor has a recorded history.
*
* @return {boolean} - `true` if the cursor has a recorded history?
*/
hasHistory() {
return !!(this.archive && this.archive.get().length);
}
/**
* Methods returning the cursor's history.
*
* @return {array} - The cursor's history.
*/
getHistory() {
return this.archive ? this.archive.get() : [];
}
/**
* Methods clearing the cursor's history.
*
* @return {Cursor} - The cursor instance for chaining purposes.
*/
clearHistory() {
if (this.archive)
this.archive.clear();
return this;
}
/**
* Releasing
* ----------
*/
/**
* Methods releasing the cursor from memory.
*/
release() {
// Removing listeners on parent
if (this._dynamicPath)
this.tree.off('write', this._writeHandler);
this.tree.off('update', this._updateHandler);
// Unsubscribe from the parent
if (this.hash)
delete this.tree._cursors[this.hash];
// Dereferencing
delete this.tree;
delete this.path;
delete this.solvedPath;
delete this.archive;
// Killing emitter
this.kill();
this.state.killed = true;
}
/**
* Output
* -------
*/
/**
* Overriding the `toJSON` method for convenient use with JSON.stringify.
*
* @return {mixed} - Data at cursor.
*/
toJSON() {
return this.serialize();
}
/**
* Overriding the `toString` method for debugging purposes.
*
* @return {string} - The cursor's identity.
*/
toString() {
return this._identity;
}
}
/**
* Method used to allow iterating over cursors containing list-type data.
*
* e.g. for(let i of cursor) { ... }
*
* @returns {object} - Each item sequentially.
*/
if (typeof Symbol === 'function' && typeof Symbol.iterator !== 'undefined') {
Cursor.prototype[Symbol.iterator] = function() {
const array = this._get().data;
if (!type.array(array))
throw Error('baobab.Cursor.@@iterate: cannot iterate a non-list type.');
let i = 0;
const cursor = this,
length = array.length;
return {
next() {
if (i < length) {
return {
value: cursor.select(i++)
};
}
return {
done: true
};
}
};
};
}
/**
* Setter Methods
* ---------------
*
* Those methods are dynamically assigned to the class for DRY reasons.
*/
// Not using a Set so that ES5 consumers don't pay a bundle size price
const INTRANSITIVE_SETTERS = {
unset: true,
pop: true,
shift: true
};
/**
* Function creating a setter method for the Cursor class.
*
* @param {string} name - the method's name.
* @param {function} [typeChecker] - a function checking that the given value is
* valid for the given operation.
*/
function makeSetter(name, typeChecker) {
/**
* Binding a setter method to the Cursor class and having the following
* definition.
*
* Note: this is not really possible to make those setters variadic because
* it would create an impossible polymorphism with path.
*
* @todo: perform value validation elsewhere so that tree.update can
* beneficiate from it.
*
* Arity (1):
* @param {mixed} value - New value to set at cursor's path.
*
* Arity (2):
* @param {path} path - Subpath to update starting from cursor's.
* @param {mixed} value - New value to set.
*
* @return {mixed} - Data at path.
*/
Cursor.prototype[name] = function(path, value) {
// We should warn the user if he applies to many arguments to the function
if (arguments.length > 2)
throw makeError(`Baobab.Cursor.${name}: too many arguments.`);
// Handling arities
if (arguments.length === 1 && !INTRANSITIVE_SETTERS[name]) {
value = path;
path = [];
}
// Coerce path
path = coercePath(path);
// Checking the path's validity
if (!type.path(path))
throw makeError(`Baobab.Cursor.${name}: invalid path.`, {path});
// Checking the value's validity
if (typeChecker && !typeChecker(value))
throw makeError(`Baobab.Cursor.${name}: invalid value.`, {path, value});
// Checking the solvability of the cursor's dynamic path
if (!this.solvedPath)
throw makeError(
`Baobab.Cursor.${name}: the dynamic path of the cursor cannot be solved.`,
{path: this.path}
);
const fullPath = this.solvedPath.concat(path);
// Filing the update to the tree
return this.tree.update(
fullPath,
{
type: name,
value
}
);
};
}
/**
* Making the necessary setters.
*/
makeSetter('set');
makeSetter('unset');
makeSetter('apply', type.function);
makeSetter('push');
makeSetter('concat', type.array);
makeSetter('unshift');
makeSetter('pop');
makeSetter('shift');
makeSetter('splice', type.splicer);
makeSetter('merge', type.object);
makeSetter('deepMerge', type.object);<|fim▁end|>
|
*
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>//Create Tab Group
var tabGroup = Titanium.UI.createTabGroup();
// Variables
var Teas = ['#F5F5DC', '#FFE4B5', '#FFE4C4', '#D2B48C', '#C3b091',
'#c3b091', '#926F5B', '#804000', '#654321', '#3D2B1F'];
allRows = [];
var theColours = Ti.UI.createTableView({});
for (var i=0; i<Teas.length; i++){
theRow = Ti.UI.createTableViewRow({backgroundColor:Teas[i], height:50, TeaColour:Teas[i]});
allRows.push(theRow);
}
theColours.setData(allRows);
var options = Ti.UI.createView({layout: 'vertical'});
var showCamera = Ti.UI.createButton({title: 'Show Camera'});
// TeaSelection Function
function getVerdict(colour){
var indicator = colour.charAt(1);
var msg;
switch(indicator){
case 'F': msg = 'Milky'; break;
case 'D': msg = 'Nice'; break;
case 'C': msg = 'Perfect'; break;
case '9': msg = 'A bit strong'; break;
case '8': msg = 'Builders tea'; break;
case '6': msg = 'Send it back'; break;
case '3': msg = 'No milk here'; break;
}
return msg;
};
function showTeaVerdict(_args){
var teaVerdict = Ti.UI.createWindow({layout:'vertical'});
teaVerdict.backgroundColor = _args;
teaVerdict.msg = getVerdict(_args);
var judgement = Ti.UI.createLabel ({text:teaVerdict.msg, top:'50%'});
var close = Ti.UI.createButton ({title:'Choose Again', top:'25%'});
close.addEventListener('click', function(e){
teaVerdict.close();
teaVerdict = null;
});
teaVerdict.add(judgement);
teaVerdict.add(close);
teaVerdict.open();
}
//Camera Function
function showPhoto(_args) {
thePhoto.setImage(_args.media);
}
<|fim▁hole|> title:'Select Color',
backgroundColor:'#fff'
});
var tabTea = Titanium.UI.createTab({
title:'TeaSelection',
window:winTea
});
winTea.add(theColours);
// Tab 2
var winCamera = Titanium.UI.createWindow({
title:'Camera',
backgroundColor:'#fff'
});
var tabCamera = Titanium.UI.createTab({
title:'Camera',
window:winCamera
});
winCamera.add(showCamera);
// Add Listener
theColours.addEventListener('click', function(e){showTeaVerdict(e.source.TeaColour);});
showCamera.addEventListener('click', function (e) {
Ti.Media.showCamera({animated: true,
autoHide: true,
saveToPhotoGallery: true,
showControls: true,
mediaTypes: [Ti.Media.MEDIA_TYPE_PHOTO],
success: function(e) {showPhoto(e);}
});
});
// Add Tabs
tabGroup.addTab(tabTea);
tabGroup.addTab(tabCamera);
// Open tabGroup
tabGroup.open();<|fim▁end|>
|
// Tab 1
var winTea = Titanium.UI.createWindow({
|
<|file_name|>steps.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import threading
from huxley.consts import TestRunModes
from huxley.errors import TestError
from huxley.images import images_identical, image_diff
from selenium.webdriver import ActionChains
from selenium.webdriver.common.keys import Keys
# Since we want consistent focus screenshots we steal focus
# when taking screenshots. To avoid races we lock during this
# process.
SCREENSHOT_LOCK = threading.RLock()
class TestStep(object):
def __init__(self, offset_time):
self.offset_time = offset_time
def execute(self, run):
raise NotImplementedError
class ClickTestStep(TestStep):
CLICK_ID = '_huxleyClick'
def __init__(self, offset_time, pos):
super(ClickTestStep, self).__init__(offset_time)
self.pos = pos
def execute(self, run):
print ' Clicking', self.pos
if run.d.name == 'phantomjs':
# PhantomJS 1.x does not support 'click()' so use Selenium
body = run.d.find_element_by_tag_name('body')
ActionChains(run.d).move_to_element_with_offset(body, self.pos[0], self.pos[1]).click().perform()
elif run.d.name == 'Safari':
el = run.d.execute_script('return document.elementFromPoint(%d, %d);' % (self.pos[0], self.pos[1]))
if el:
el.click()
else:
print ' warning, no element found at (%d, %d);' % (self.pos[0], self.pos[1])
else:
# Work around multiple bugs in WebDriver's implementation of click()
run.d.execute_script(
'document.elementFromPoint(%d, %d).click();' % (self.pos[0], self.pos[1])
)
run.d.execute_script(
'document.elementFromPoint(%d, %d).focus();' % (self.pos[0], self.pos[1])
)
class ScrollTestStep(TestStep):
SCROLL_OFFSET_ID = '_huxleyScroll'
def __init__(self, offset_time, pos):
super(ScrollTestStep, self).__init__(offset_time)
self.pos = pos
def execute(self, run):
print ' Scrolling', self.pos
run.d.execute_script(
'window.scrollTo(%d, %d);' % (self.pos[0], self.pos[1])
)
class KeyTestStep(TestStep):
KEYS_BY_JS_KEYCODE = {<|fim▁hole|> 37: Keys.LEFT,
38: Keys.UP,
39: Keys.RIGHT,
40: Keys.DOWN,
46: Keys.DELETE,
186: ";",
187: "=",
188: ",",
190: ".",
191: "/",
192: "`",
219: "[",
220: "\\",
221: "]",
222: "'",
}
KEYS_BY_JS_KEYCODE_SHIFT = dict(KEYS_BY_JS_KEYCODE.items() + {
48: ")",
49: "!",
50: "@",
51: "#",
52: "$",
53: "%",
54: "^",
55: "&",
56: "*",
57: "(",
186: ":",
187: "+",
188: "<",
190: ">",
191: "?",
192: "~",
219: "{",
220: "|",
221: "}",
222: "\"",
}.items())
KEY_ID = '_huxleyKey'
# param is [keyCode, shiftKey]
def __init__(self, offset_time, param):
super(KeyTestStep, self).__init__(offset_time)
# backwards compat. for old records where a string was saved
if isinstance(param, basestring):
self.key = param
else:
codes = self.KEYS_BY_JS_KEYCODE_SHIFT if param[1] else self.KEYS_BY_JS_KEYCODE
char = chr(param[0])
if not param[1]:
char = char.lower()
self.key = codes.get(param[0], char)
def execute(self, run):
if self.key == Keys.HOME:
print ' Scrolling to top'
run.d.execute_script('window.scrollTo(0, 0)')
elif self.key == Keys.END:
print ' Scrolling to bottom'
run.d.execute_script('window.scrollTo(0, document.body.clientHeight)')
else:
print ' Typing', self.key
id = run.d.execute_script('return document.activeElement.id;')
if id is None or id == '':
run.d.execute_script(
'document.activeElement.id = %r;' % self.KEY_ID
)
id = self.KEY_ID
run.d.find_element_by_id(id).send_keys(self.key)
class ScreenshotTestStep(TestStep):
def __init__(self, offset_time, run, index):
super(ScreenshotTestStep, self).__init__(offset_time)
self.index = index
def get_path(self, run):
return os.path.join(run.path, 'screenshot' + str(self.index) + '.png')
def execute(self, run):
print ' Taking screenshot', self.index
original = self.get_path(run)
new = os.path.join(run.path, 'last.png')
with SCREENSHOT_LOCK:
# Steal focus for a consistent screenshot
run.d.switch_to_window(run.d.window_handles[0])
# iOS insertion points are visible in screenshots
if run.d.name == 'Safari':
active = run.d.execute_script('a = document.activeElement; a.blur(); return a;')
if run.mode == TestRunModes.RERECORD:
run.d.save_screenshot(original)
else:
run.d.save_screenshot(new)
try:
if not images_identical(original, new, run.test.mask):
if run.save_diff:
diffpath = os.path.join(run.path, 'diff.png')
diff = image_diff(original, new, diffpath, run.diffcolor, run.test.mask)
raise TestError(
('Screenshot %s was different; compare %s with %s. See %s ' +
'for the comparison. diff=%r') % (
self.index, original, new, diffpath, diff
)
)
else:
raise TestError('Screenshot %s was different.' % self.index)
finally:
if not run.save_diff:
os.unlink(new)<|fim▁end|>
|
33: Keys.PAGE_UP,
34: Keys.PAGE_DOWN,
35: Keys.END,
36: Keys.HOME,
|
<|file_name|>DXImageTransform.Microsoft.CrBlinds.1.js<|end_file_name|><|fim▁begin|>class dximagetransform_microsoft_crblinds_1 {
constructor() {
// short bands () {get} {set}
this.bands = undefined;
// int Capabilities () {get}
this.Capabilities = undefined;
// string Direction () {get} {set}
this.Direction = undefined;
// float Duration () {get} {set}
this.Duration = undefined;
// float Progress () {get} {set}
this.Progress = undefined;
// float StepResolution () {get}
this.StepResolution = undefined;<|fim▁hole|>
}
}
module.exports = dximagetransform_microsoft_crblinds_1;<|fim▁end|>
| |
<|file_name|>graphite_tcp.go<|end_file_name|><|fim▁begin|>package graphite
import (
"bufio"
"log"
"net"
"os"
"strings"
"sync"
"github.com/influxdb/influxdb"
)
// TCPServer processes Graphite data received over TCP connections.
type TCPServer struct {
writer SeriesWriter
parser *Parser
database string
listener *net.Listener
wg sync.WaitGroup
Logger *log.Logger
}
// NewTCPServer returns a new instance of a TCPServer.
func NewTCPServer(p *Parser, w SeriesWriter, db string) *TCPServer {
return &TCPServer{
parser: p,
writer: w,
database: db,
Logger: log.New(os.Stderr, "[graphite] ", log.LstdFlags),
}
}
// ListenAndServe instructs the TCPServer to start processing Graphite data
// on the given interface. iface must be in the form host:port
func (t *TCPServer) ListenAndServe(iface string) error {
if iface == "" { // Make sure we have an address
return ErrBindAddressRequired
}
ln, err := net.Listen("tcp", iface)
if err != nil {
return err
}
t.listener = &ln
t.Logger.Println("listening on TCP connection", ln.Addr().String())
t.wg.Add(1)
go func() {
defer t.wg.Done()
for {
conn, err := ln.Accept()
if opErr, ok := err.(*net.OpError); ok && !opErr.Temporary() {
t.Logger.Println("graphite TCP listener closed")
return
}
if err != nil {
t.Logger.Println("error accepting TCP connection", err.Error())
continue
}
t.wg.Add(1)
go t.handleConnection(conn)
}
}()
return nil
}
func (t *TCPServer) Host() string {
l := *t.listener
return l.Addr().String()
}
func (t *TCPServer) Close() error {
var err error
if t.listener != nil {
err = (*t.listener).Close()
}
t.wg.Wait()
t.listener = nil
return err
}
// handleConnection services an individual TCP connection.
func (t *TCPServer) handleConnection(conn net.Conn) {
defer conn.Close()
defer t.wg.Done()
reader := bufio.NewReader(conn)
for {
// Read up to the next newline.
buf, err := reader.ReadBytes('\n')<|fim▁hole|> // Trim the buffer, even though there should be no padding
line := strings.TrimSpace(string(buf))
// Parse it.
point, err := t.parser.Parse(line)
if err != nil {
t.Logger.Printf("unable to parse data: %s", err)
continue
}
// Send the data to the writer.
_, e := t.writer.WriteSeries(t.database, "", []influxdb.Point{point})
if e != nil {
t.Logger.Printf("failed to write data point to database %q: %s\n", t.database, e)
}
}
}<|fim▁end|>
|
if err != nil {
return
}
|
<|file_name|>task_2_8.py<|end_file_name|><|fim▁begin|># Задача 2. Вариант 8.
#Напишите программу, которая будет выводить на экран наиболее понравившееся вам высказывание, автором которого является Лао-Цзы. Не забудьте о том, что автор должен быть упомянут на отдельной строке.
# Ionova A. K.
#30.04.2016<|fim▁hole|>
print("Нельзя обожествлять бесов.\n\t\t\t\t\t\t\t\tЛао-цзы")
input("Нажмите ENTER для выхода.")<|fim▁end|>
| |
<|file_name|>dockableMirrorMapPlugin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : Dockable MirrorMap
Description : Creates a dockable map canvas
Date : February 1, 2011
copyright : (C) 2011 by Giuseppe Sucameli (Faunalia)
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
import resources_rc
class DockableMirrorMapPlugin:
def __init__(self, iface):
# Save a reference to the QGIS iface
self.iface = iface
def initGui(self):
self.dockableMirrors = []
self.lastDockableMirror = 0
self.dockableAction = QAction(QIcon(":/plugins/DockableMirrorMap/icons/dockablemirrormap.png"), "Dockable MirrorMap", self.iface.mainWindow())
QObject.connect(self.dockableAction, SIGNAL("triggered()"), self.runDockableMirror)
self.aboutAction = QAction(QIcon(":/plugins/DockableMirrorMap/icons/about.png"), "About", self.iface.mainWindow())
QObject.connect(self.aboutAction, SIGNAL("triggered()"), self.about)
# Add to the plugin menu and toolbar
self.iface.addPluginToMenu("Dockable MirrorMap", self.dockableAction)
self.iface.addPluginToMenu("Dockable MirrorMap", self.aboutAction)
self.iface.addToolBarIcon(self.dockableAction)
QObject.connect(self.iface, SIGNAL("projectRead()"), self.onProjectLoaded)
QObject.connect(QgsProject.instance(), SIGNAL("writeProject(QDomDocument &)"), self.onWriteProject)
def unload(self):
QObject.disconnect(self.iface, SIGNAL("projectRead()"), self.onProjectLoaded)
QObject.disconnect(QgsProject.instance(), SIGNAL("writeProject(QDomDocument &)"), self.onWriteProject)
self.removeDockableMirrors()
# Remove the plugin
self.iface.removePluginMenu("Dockable MirrorMap",self.dockableAction)
self.iface.removePluginMenu("Dockable MirrorMap",self.aboutAction)
self.iface.removeToolBarIcon(self.dockableAction)
def about(self):
from DlgAbout import DlgAbout
DlgAbout(self.iface.mainWindow()).exec_()
def removeDockableMirrors(self):
for d in list(self.dockableMirrors):
d.close()
self.dockableMirrors = []
self.lastDockableMirror = 0
def runDockableMirror(self):
from dockableMirrorMap import DockableMirrorMap
wdg = DockableMirrorMap(self.iface.mainWindow(), self.iface)
minsize = wdg.minimumSize()
maxsize = wdg.maximumSize()
self.setupDockWidget(wdg)
self.addDockWidget(wdg)
wdg.setMinimumSize(minsize)
wdg.setMaximumSize(maxsize)
if wdg.isFloating():
wdg.move(50, 50) # move the widget to the center
def setupDockWidget(self, wdg):
othersize = QGridLayout().verticalSpacing()
if len(self.dockableMirrors) <= 0:
width = self.iface.mapCanvas().size().width()/2 - othersize
wdg.setLocation( Qt.RightDockWidgetArea )
wdg.setMinimumWidth( width )
wdg.setMaximumWidth( width )
elif len(self.dockableMirrors) == 1:
height = self.dockableMirrors[0].size().height()/2 - othersize/2
wdg.setLocation( Qt.RightDockWidgetArea )
wdg.setMinimumHeight( height )
wdg.setMaximumHeight( height )
elif len(self.dockableMirrors) == 2:
height = self.iface.mapCanvas().size().height()/2 - othersize/2
wdg.setLocation( Qt.BottomDockWidgetArea )
wdg.setMinimumHeight( height )
wdg.setMaximumHeight( height )
else:
wdg.setLocation( Qt.BottomDockWidgetArea )
wdg.setFloating( True )
def addDockWidget(self, wdg, position=None):
if position == None:
position = wdg.getLocation()
else:
wdg.setLocation( position )
mapCanvas = self.iface.mapCanvas()
oldSize = mapCanvas.size()
prevFlag = mapCanvas.renderFlag()
mapCanvas.setRenderFlag(False)
self.iface.addDockWidget(position, wdg)
wdg.setNumber( self.lastDockableMirror )
self.lastDockableMirror = self.lastDockableMirror+1
self.dockableMirrors.append( wdg )
QObject.connect(wdg, SIGNAL( "closed(PyQt_PyObject)" ), self.onCloseDockableMirror)
newSize = mapCanvas.size()
if newSize != oldSize:
# trick: update the canvas size
mapCanvas.resize(newSize.width() - 1, newSize.height())
mapCanvas.setRenderFlag(prevFlag)
mapCanvas.resize(newSize)
else:
mapCanvas.setRenderFlag(prevFlag)
def onCloseDockableMirror(self, wdg):
if self.dockableMirrors.count( wdg ) > 0:
self.dockableMirrors.remove( wdg )
if len(self.dockableMirrors) <= 0:
self.lastDockableMirror = 0
def onWriteProject(self, domproject):
if len(self.dockableMirrors) <= 0:
return
QgsProject.instance().writeEntry( "DockableMirrorMap", "/numMirrors", len(self.dockableMirrors) )
for i, dockwidget in enumerate(self.dockableMirrors):
# save position and geometry
floating = dockwidget.isFloating()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/floating" % i, floating )
if floating:
position = "%s %s" % (dockwidget.pos().x(), dockwidget.pos().y())
else:
position = u"%s" % dockwidget.getLocation()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/position" % i, str(position) )
size = "%s %s" % (dockwidget.size().width(), dockwidget.size().height())
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/size" % i, str(size) )
# save the layer list
layerIds = dockwidget.getMirror().getLayerSet()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/layers" % i, layerIds )
scaleFactor = dockwidget.getMirror().scaleFactor.value()
QgsProject.instance().writeEntryDouble("DockableMirrorMap", "/mirror%s/scaleFactor" % i, scaleFactor)
def onProjectLoaded(self):
# restore mirrors?
num, ok = QgsProject.instance().readNumEntry("DockableMirrorMap", "/numMirrors")
if not ok or num <= 0:
return
# remove all mirrors
self.removeDockableMirrors()
mirror2lids = {}
# load mirrors
for i in range(num):
if num >= 2:
if i == 0:
prevFlag = self.iface.mapCanvas().renderFlag()
self.iface.mapCanvas().setRenderFlag(False)
elif i == num-1:
self.iface.mapCanvas().setRenderFlag(True)
from dockableMirrorMap import DockableMirrorMap
dockwidget = DockableMirrorMap(self.iface.mainWindow(), self.iface)
minsize = dockwidget.minimumSize()
maxsize = dockwidget.maximumSize()
# restore position
floating, ok = QgsProject.instance().readBoolEntry("DockableMirrorMap", "/mirror%s/floating" % i)
if ok:
dockwidget.setFloating( floating )
position, ok = QgsProject.instance().readEntry("DockableMirrorMap", "/mirror%s/position" % i)
if ok:
try:
if floating:
parts = position.split(" ")<|fim▁hole|> else:
dockwidget.setLocation( int(position) )
except ValueError:
pass
# restore geometry
dockwidget.setFixedSize( dockwidget.geometry().width(), dockwidget.geometry().height() )
size, ok = QgsProject.instance().readEntry("DockableMirrorMap", "/mirror%s/size" % i)
if ok:
try:
parts = size.split(" ")
dockwidget.setFixedSize( int(parts[0]), int(parts[1]) )
except ValueError:
pass
scaleFactor, ok = QgsProject.instance().readDoubleEntry("DockableMirrorMap", "/mirror%s/scaleFactor" % i, 1.0)
if ok: dockwidget.getMirror().scaleFactor.setValue( scaleFactor )
# get layer list
layerIds, ok = QgsProject.instance().readListEntry("DockableMirrorMap", "/mirror%s/layers" % i)
if ok: dockwidget.getMirror().setLayerSet( layerIds )
self.addDockWidget( dockwidget )
dockwidget.setMinimumSize(minsize)
dockwidget.setMaximumSize(maxsize)<|fim▁end|>
|
if len(parts) >= 2:
dockwidget.move( int(parts[0]), int(parts[1]) )
|
<|file_name|>0014_auto_20150413_1639.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0013_merge'),
]
operations = [
migrations.AlterField(
model_name='user',
name='public',<|fim▁hole|> ]<|fim▁end|>
|
field=models.BooleanField(default=True, help_text=b'Determines whether or not your profile is open to the public'),
preserve_default=True,
),
|
<|file_name|>Arbitrary.hpp<|end_file_name|><|fim▁begin|>#pragma once
namespace rc {
namespace gen {
namespace detail {
template <typename T>
struct DefaultArbitrary;
} // namespace detail
template <typename T>
decltype(Arbitrary<T>::arbitrary()) arbitrary() {
static const auto instance = rc::Arbitrary<T>::arbitrary();
return instance;
}
} // namespace gen
template <typename T, typename>
struct Arbitrary {
static decltype(gen::detail::DefaultArbitrary<T>::arbitrary()) arbitrary() {
return gen::detail::DefaultArbitrary<T>::arbitrary();
}<|fim▁hole|>
#include "Arbitrary.hpp"<|fim▁end|>
|
};
} // namespace rc
|
<|file_name|>movie_making.py<|end_file_name|><|fim▁begin|>"""
Python module defining a class for creating movies of matplotlib figures.
This code and information is provided 'as is' without warranty of any kind,
either express or implied, including, but not limited to, the implied
warranties of non-infringement, merchantability or fitness for a particular
purpose.
"""
from functools import partial
import shutil
import subprocess
import tempfile
import matplotlib as mpl
import matplotlib.pyplot as plt
def invert_color(color):
""" Returns the inverted value of a matplotlib color """
# get the color value
c = invert_color.cc.to_rgba(color)
# keep alpha value intact!
return (1-c[0], 1-c[1], 1-c[2], c[3])
# initialize the color converted and keep it as a static variable
invert_color.cc = mpl.colors.ColorConverter()
def invert_colors(fig):
""" Changes the colors of a figure to their inverted values """
# keep track of the object that have been changed
visited = set()
def get_filter(name):
""" construct a specific filter for `findobj` """
return lambda x: hasattr(x, 'set_%s'%name) and hasattr(x, 'get_%s'%name)
for o in fig.findobj(get_filter('facecolor')):
if o not in visited:
o.set_facecolor(invert_color(o.get_facecolor()))
if hasattr(o, 'set_edgecolor') and hasattr(o, 'get_edgecolor'):
o.set_edgecolor(invert_color(o.get_edgecolor()))
visited.add(o)
for o in fig.findobj(get_filter('color')):
if o not in visited:
o.set_color(invert_color(o.get_color()))
visited.add(o)
class Movie(object):
""" Class for creating movies from matplotlib figures using ffmpeg """
def __init__(self,
width=None, filename=None, inverted=False, verbose=True,
framerate=None
):
self.width = width #< pixel width of the movie
self.filename = filename #< filename used to save the movie
self.inverted = inverted #< colors inverted?
self.verbose = verbose #< verbose encoding information?
self.framerate = framerate #< framerate of the movie
# internal variables
self.recording = False
self.tempdir = None
self.frame = 0
self._start()
def __del__(self):
self._end()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
if self.filename is not None:
self.save(self.filename)
self._end()
return False
def _start(self):
""" initializes the video recording """
# create temporary directory for the image files of the movie
self.tempdir = tempfile.mkdtemp(prefix='movie_')
self.frame = 0
self.recording = True
def _end(self):
""" clear up temporary things if necessary """
if self.recording:
shutil.rmtree(self.tempdir)
self.recording = False
def clear(self):
""" delete current status and start from scratch """
self._end()
self._start()
def _add_file(self, save_function):
"""
Adds a file to the current movie
"""
if not self.recording:
raise ValueError('Movie is not initialized.')
save_function("%s/frame_%09d.png" % (self.tempdir, self.frame))
self.frame += 1
def add_image(self, image):
"""
Adds the data of a PIL image as a frame to the current movie.
"""<|fim▁hole|> from PIL import ImageOps
image_inv = ImageOps.invert(image)
self._add_file(image_inv.save)
else:
self._add_file(image.save)
def add_array(self, data, colormap=None):
"""
Adds the data from the array as a frame to the current movie.
The array is assumed to be scaled to [0, 1].
(0, 0) lies in the upper left corner of the image.
The first axis extends toward the right, the second toward the bottom
"""
# get colormap
if colormap is None:
import matplotlib.cm as cm
colormap = cm.gray
# produce image
try:
import Image
except ImportError:
from PIL import Image
import numpy as np
grey_data = colormap(np.clip(data.T, 0, 1))
im = Image.fromarray(np.uint8(grey_data*255))
# save image
self.add_image(im)
def add_figure(self, fig=None):
""" adds the figure `fig` as a frame to the current movie """
if fig is None:
fig = plt.gcf()
if self.width is None:
dpi = None
else:
dpi = self.width/fig.get_figwidth()
# save image
if self.inverted:
invert_colors(fig)
save_function = partial(
fig.savefig,
dpi=dpi, edgecolor='none',
facecolor=invert_color(fig.get_facecolor())
)
self._add_file(save_function)
invert_colors(fig)
else:
save_function = partial(fig.savefig, dpi=dpi)
self._add_file(save_function)
def save_frames(self, filename_pattern='./frame_%09d.png', frames='all'):
""" saves the given `frames` as images using the `filename_pattern` """
if not self.recording:
raise ValueError('Movie is not initialized.')
if 'all' == frames:
frames = range(self.frame)
for f in frames:
shutil.copy(
"%s/frame_%09d.png" % (self.tempdir, f),
filename_pattern % f
)
def save(self, filename, extra_args=None):
""" convert the recorded images to a movie using ffmpeg """
if not self.recording:
raise ValueError('Movie is not initialized.')
# set parameters
if extra_args is None:
extra_args = []
if self.framerate is not None:
extra_args += ["-r", self.framerate]
if filename is None:
filename = self.filename
# construct the call to ffmpeg
# add the `-pix_fmt yuv420p` switch for compatibility reasons
# -> http://ffmpeg.org/trac/ffmpeg/wiki/x264EncodingGuide
args = ["ffmpeg"]
if extra_args:
args += extra_args
args += [
"-y", # don't ask questions
"-f", "image2", # input format
"-i", "%s/frame_%%09d.png" % self.tempdir, # input data
"-pix_fmt", "yuv420p", # pixel format for compatibility
"-b:v", "1024k", # high bit rate for good quality
filename # output file
]
# spawn the subprocess and capture its output
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out = p.stdout.read()
err = p.stderr.read()
# check if error occurred
if p.wait():
print(out)
print(err)
raise RuntimeError('An error occurred while producing the movie.')
# do output anyway, when verbosity is requested
if self.verbose:
print(out)
print(err)
def test_movie_making():
""" Simple test code for movie making """
try:
# try python2 version
filename = raw_input('Choose a file name: ')
except NameError:
# python3 fallback
filename = input('Choose a file name: ')
import numpy as np
# prepare data
x = np.linspace(0, 10, 100)
lines, = plt.plot(x, np.sin(x))
plt.ylim(-1, 1)
with Movie(filename=filename) as movie:
for k in range(30):
lines.set_ydata(np.sin(x + 0.1*k))
movie.add_frame()
if __name__ == "__main__":
print('This file is intended to be used as a module.')
print('This code serves as a test for the defined methods.')
test_movie_making()<|fim▁end|>
|
if self.inverted:
try:
import ImageOps
except ImportError:
|
<|file_name|>IntegerHelper.java<|end_file_name|><|fim▁begin|>/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Lachlan Dowding
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package permafrost.tundra.math;
import java.text.MessageFormat;
/**
* A collection of convenience methods for working with integers.
*/
public final class IntegerHelper {
/**
* The default value used when parsing a null string.
*/<|fim▁hole|> private static int DEFAULT_INT_VALUE = 0;
/**
* Disallow instantiation of this class.
*/
private IntegerHelper() {}
/**
* Converts the given object to a Integer.
*
* @param object The object to be converted.
* @return The converted object.
*/
public static Integer normalize(Object object) {
Integer value = null;
if (object instanceof Number) {
value = ((Number)object).intValue();
} else if (object instanceof String) {
value = parse((String)object);
}
return value;
}
/**
* Parses the given string as an integer.
*
* @param input A string to be parsed as integer.
* @return Integer representing the given string, or 0 if the given string was null.
*/
public static int parse(String input) {
return parse(input, DEFAULT_INT_VALUE);
}
/**
* Parses the given string as an integer.
*
* @param input A string to be parsed as integer.
* @param defaultValue The value returned if the given string is null.
* @return Integer representing the given string, or defaultValue if the given string is null.
*/
public static int parse(String input, int defaultValue) {
if (input == null) return defaultValue;
return Integer.parseInt(input);
}
/**
* Parses the given strings as integers.
*
* @param input A list of strings to be parsed as integers.
* @return A list of integers representing the given strings.
*/
public static int[] parse(String[] input) {
return parse(input, DEFAULT_INT_VALUE);
}
/**
* Parses the given strings as integers.
*
* @param input A list of strings to be parsed as integers.
* @param defaultValue The value returned if a string in the list is null.
* @return A list of integers representing the given strings.
*/
public static int[] parse(String[] input, int defaultValue) {
if (input == null) return null;
int[] output = new int[input.length];
for (int i = 0; i < input.length; i++) {
output[i] = parse(input[i], defaultValue);
}
return output;
}
/**
* Serializes the given integer as a string.
*
* @param input The integer to be serialized.
* @return A string representation of the given integer.
*/
public static String emit(int input) {
return Integer.toString(input);
}
/**
* Serializes the given integers as strings.
*
* @param input A list of integers to be serialized.
* @return A list of string representations of the given integers.
*/
public static String[] emit(int[] input) {
if (input == null) return null;
String[] output = new String[input.length];
for (int i = 0; i < input.length; i++) {
output[i] = emit(input[i]);
}
return output;
}
}<|fim▁end|>
| |
<|file_name|>verifytree.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python -tt
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# copyright (c) 2008 Red Hat, Inc - written by Seth Vidal and Will Woods
import yum
import sys
import os
from yum.misc import getCacheDir, checksum
import urlparse
from yum import Errors
from optparse import OptionParser
import ConfigParser
# Subclass ConfigParser so that the options don't get lowercased. This is
# important given that they are path names.
class LocalConfigParser(ConfigParser.ConfigParser):
"""A subclass of ConfigParser which does not lowercase options"""
def optionxform(self, optionstr):
return optionstr
####
# take a file path to a repo as an option, verify all the metadata vs repomd.xml
# optionally go through packages and verify them vs the checksum in the primary
# Error values
BAD_REPOMD = 1
BAD_METADATA = 2
BAD_COMPS = 4
BAD_PACKAGES = 8
BAD_IMAGES = 16
# Testopia case/plan numbers
plan_number = 13
case_numbers = {'REPODATA': 56, 'CORE_PACKAGES': 57, 'COMPS': 58,
'BOOT_IMAGES': 59}
def get_schema_path():
"""Return the local path to the RELAX NG comps schema."""
# Depending on whether our distro uses versioned or unversioned docdirs
# (the former is true for Fedora < 20, see bug 998579), the schema file
# should be provided by yum at either of the following locations:
paths = ['/usr/share/doc/yum%s/comps.rng' % s
for s in ('', '-' + yum.__version__)]
for path in paths:
# Better than os.path.exists() as this also ensures we can actually
# read the file
try:
with open(path):
return path
except IOError:
continue
raise IOError(paths)
def testopia_create_run(plan):
'''Create a run of the given test plan. Returns the run ID.'''
run_id = 49 # STUB actually create the run
print "Testopia: created run %i of plan %i" % (run_id,plan)
return run_id
def testopia_report(run,case,result):
print " testopia: reporting %s for case %s in run %i" % (result,
str(case),run)
if type(case) == str:
case = case_numbers[case]
# STUB actually do the reporting
def checkfileurl(pkg):
pkg_path = pkg.remote_url
pkg_path = pkg_path.replace('file://', '')
(csum_type, csum) = pkg.returnIdSum()
try:
filesum = checksum(csum_type, pkg_path)
except Errors.MiscError:
return False
if filesum != csum:
return False
return True
def treeinfo_checksum(treeinfo):
# read treeinfo file into cp
# take checksums section
result = 0
cp = LocalConfigParser()
try:
cp.read(treeinfo)
except ConfigParser.MissingSectionHeaderError:
# Generally this means we failed to access the file
print " could not find sections in treeinfo file %s" % treeinfo
return BAD_IMAGES
except ConfigParser.Error:
print " could not parse treeinfo file %s" % treeinfo
return BAD_IMAGES
if not cp.has_section('checksums'):
print " no checksums section in treeinfo file %s" % treeinfo
return BAD_IMAGES
dir_path = os.path.dirname(treeinfo)
for opt in cp.options('checksums'):
fnpath = dir_path + '/%s' % opt
fnpath = os.path.normpath(fnpath)
csuminfo = cp.get('checksums', opt).split(':')
if len(csuminfo) < 2:
print " checksum information doesn't make any sense for %s." % opt
result = BAD_IMAGES
continue
if not os.path.exists(fnpath):
print " cannot find file %s listed in treeinfo" % fnpath
result = BAD_IMAGES
continue
csum = checksum(csuminfo[0], fnpath)
if csum != csuminfo[1]:
print " file %s %s does not match:\n ondisk %s vs treeinfo: %s" % (opt, csuminfo[0], csum, csuminfo[1])
result = BAD_IMAGES
continue
return result
def main():
parser = OptionParser()
parser.usage = """
verifytree - verify that a local yum repository is consistent
verifytree /path/to/repo"""
parser.add_option("-a","--checkall",action="store_true",default=False,
help="Check all packages in the repo")
parser.add_option("--nocomps", "--nogroups",action="store_true",
default=False,
help="Do not read and check comps")
parser.add_option("--noplugins",action="store_true",default=False,
help="Do not load any plugins")
parser.add_option("-t","--testopia",action="store",type="int",
help="Report results to the given testopia run number")
parser.add_option("-r","--treeinfo", action="store_true", default=False,
help="check the checksums of listed files in a .treeinfo file, if available")
opts, args = parser.parse_args()
if not args:
print "Must provide a file url to the repo"
sys.exit(1)
# FIXME: check that "args" is a valid dir before proceeding
# (exists, isdir, contains .treeinfo, etc)
url = args[0]
if url[0] == '/':
url = 'file://' + url
s = urlparse.urlsplit(url)[0]
h,d = urlparse.urlsplit(url)[1:3]
if s != 'file':
print "Must be a file:// url or you will not like this"
sys.exit(1)
repoid = '%s/%s' % (h, d)
repoid = repoid.replace('/', '_')
# Bad things happen if we're missing a trailing slash here
if url[-1] != '/':
url += '/'
basedir = url.replace('file://', '') # for a normal path thing
my = yum.YumBase()
if opts.noplugins:
my.preconf.init_plugins = False
my.conf.cachedir = getCacheDir()
my.repos.disableRepo('*')
newrepo = yum.yumRepo.YumRepository(repoid)
newrepo.name = repoid
newrepo.baseurl = [url]
newrepo.basecachedir = my.conf.cachedir
newrepo.metadata_expire = 0
newrepo.timestamp_check = False
newrepo.enablegroups = 1
# we want *all* metadata
newrepo.mdpolicy = 'group:all'
# add our new repo
my.repos.add(newrepo)
# enable that repo
my.repos.enableRepo(repoid)
# setup the repo dirs/etc
my.doRepoSetup(thisrepo=repoid)
# Initialize results and reporting
retval = 0
if opts.testopia:
run_id = testopia_create_run(opts.testopia)
report = lambda case,result: testopia_report(run_id,case,result)
else:
report = lambda case,result: None
# Check the metadata
print "Checking repodata:"
try:
md_types = newrepo.repoXML.fileTypes()
print " verifying repomd.xml with yum"
except yum.Errors.RepoError:
print " failed to load repomd.xml."
report('REPODATA','FAILED')
report('CORE_PACKAGES','BLOCKED')
report('COMPS','BLOCKED')
return retval | BAD_REPOMD
for md_type in md_types:
try:
print " verifying %s checksum" % md_type
newrepo.retrieveMD(md_type)
except Errors.RepoError, e:
print " %s metadata missing or does not match checksum" % md_type
retval = retval | BAD_METADATA
if retval & BAD_METADATA:
report('REPODATA','FAILED')
else:
report('REPODATA','PASSED')
if not opts.nocomps:
print "Checking groups (comps.xml):"
try:
print " verifying comps.xml with yum"
b = my.comps.compscount
comps = newrepo.getGroups()
except (Errors.GroupsError, Errors.RepoMDError):
print ' comps file missing or unparseable'
report('COMPS','FAILED')
retval = retval | BAD_COMPS
if not (retval & BAD_COMPS):
print " verifying comps.xml grammar with xmllint"
try:
schema = get_schema_path()
except IOError as e:
print ' could not read schema file, paths tried:'
for path in e.args[0]:
print ' ' + path
print (' make sure you have the latest version of yum '
'properly installed')
r = 1
else:
r = os.system("xmllint --noout --nowarning --relaxng %s %s" %
(schema, comps))
if r != 0:
retval = retval | BAD_COMPS
report('COMPS','FAILED')
else:
report('COMPS','PASSED')
# if we've got a .treeinfo file and we are told to check it, then do so
tr_path = basedir + '/.treeinfo'
if opts.treeinfo and os.path.exists(tr_path):
print "Checking checksums of files in .treeinfo"
tr_val = treeinfo_checksum(tr_path)
retval = tr_val | retval
sack = []
packages_ok = True
if opts.checkall:
print "Checking all packages"
sack = my.pkgSack
elif not (retval & BAD_COMPS or opts.nocomps):
print "Checking mandatory @core packages"
group = my.comps.return_group('core')
if group is not None:
pkgs = group.mandatory_packages
else:
print " @core group not found"
retval = retval | BAD_COMPS
report('COMPS','FAILED')
pkgs = []
for pname in pkgs:
# FIXME: this pulls from pkgSack, which (I guess) is populated
# based on the arch etc. of the current host.. so you can't check
# the x86_64 repo from an i386 machine, f'rinstance.
try:
sack.extend(my.pkgSack.searchNevra(name=pname))
except yum.Errors.RepoError:
print " something went wrong with the repodata."
sack = []
break
for pkg in sack:
if checkfileurl(pkg):
print " verifying %s checksum" % pkg
else:
print " verifying %s checksum FAILED" % pkg
packages_ok = False
if sack:
if packages_ok is True:
report('CORE_PACKAGES','PASSED')
else:
report('CORE_PACKAGES','FAILED')
retval = retval | BAD_PACKAGES
else:
# we couldn't test anything
report('CORE_PACKAGES','BLOCKED')
# All done!
if retval == 0:
print "Tree verified."<|fim▁hole|> rc = main()
sys.exit(rc)<|fim▁end|>
|
return retval
if __name__ == "__main__":
|
<|file_name|>mm.client.duck.js<|end_file_name|><|fim▁begin|>/*globals document, setTimeout, clearTimeout, Audio, navigator */
var MallardMayhem = MallardMayhem || {};
(function () {
"use strict";
MallardMayhem.Duck = function (options) {
var self = this;
this.currentTimeout = null;
this.domElement = document.createElement('span');
this.currentLocation = 0;
this.sounds = {};
this.maxAge = (20 * 1000);
this.lifeSpan = null;
this.id = options.id || null;
this.colour = options.colour || null;
this.name = options.name || 'Quacky';
this.direction = 'right';
this.flyTo = function (coords, callback) {
var baseClass = 'duck-' + self.colour,
randomLocation;
switch (typeof coords) {
case 'string':
coords = coords.split(',');
break;
case 'function':
if (coords.x && coords.y) {
coords = [coords.x, coords.y];
}
break;
}
if (!self.currentLocation) {
self.domElement.style.top = '0px';
self.domElement.style.left = '0px';
self.currentLocation = [(MallardMayhem.stage.offsetWidth / 2), MallardMayhem.stage.offsetHeight - (self.domElement.style.height * 2)];
}
if (self.currentLocation[0] === coords[0] && self.currentLocation[1] === coords[1]) {
if (callback) {
callback();
} else {
randomLocation = MallardMayhem.randomCoord();
self.flyTo(randomLocation);
}
} else {
if (self.currentLocation[1] !== coords[1]) {
if (coords[1] > self.currentLocation[1]) {
if ((coords[1] - self.currentLocation[1]) < MallardMayhem.animationStep) {
self.currentLocation[1] = self.currentLocation[1] + (coords[1] - self.currentLocation[1]);
} else {
self.currentLocation[1] = self.currentLocation[1] + MallardMayhem.animationStep;
}
baseClass = baseClass + '-bottom';
}
if (coords[1] < self.currentLocation[1]) {
if ((self.currentLocation[1] - coords[1]) < MallardMayhem.animationStep) {
self.currentLocation[1] = self.currentLocation[1] - (self.currentLocation[1] - coords[1]);
} else {
self.currentLocation[1] = self.currentLocation[1] - MallardMayhem.animationStep;
}
baseClass = baseClass + '-top';
}
}
if (self.currentLocation[0] !== coords[0]) {
if (coords[0] > self.currentLocation[0]) {
if ((coords[0] - self.currentLocation[0]) < MallardMayhem.animationStep) {
self.currentLocation[0] = self.currentLocation[0] + (coords[0] - self.currentLocation[0]);
} else {
self.currentLocation[0] = self.currentLocation[0] + MallardMayhem.animationStep;
}
baseClass = baseClass + '-right';
}
if (coords[0] < self.currentLocation[0]) {
if ((self.currentLocation[0] - coords[0]) < MallardMayhem.animationStep) {<|fim▁hole|> }
baseClass = baseClass + '-left';
}
}
self.domElement.style.left = self.currentLocation[0] + 'px';
self.domElement.style.top = self.currentLocation[1] + 'px';
if (self.domElement.className !== baseClass) {
self.domElement.className = baseClass;
}
self.currentTimeout = setTimeout(function () {
self.flyTo(coords, callback);
}, MallardMayhem.animationSpeed);
}
};
this.drop = function () {
self.currentLocation[1] = self.currentLocation[1] + (MallardMayhem.animationStep * 2);
self.domElement.style.top = self.currentLocation[1] + 'px';
if (self.currentLocation[1] < MallardMayhem.stage.offsetHeight - 72) {
setTimeout(self.drop, MallardMayhem.animationSpeed);
} else {
self.sounds.fall.currentLocation = self.sounds.fall.pause();
self.sounds.ground.play();
MallardMayhem.removeDuck(self.id);
}
};
this.kill = function () {
clearTimeout(self.currentTimeout);
clearTimeout(self.lifeSpan);
self.domElement.className = 'duck-' + self.colour + '-dead';
self.sounds.fall.play();
setTimeout(self.drop, ((1000 / 4) * 3));
};
this.gotShot = function () {
self.domElement.removeEventListener('mousedown', self.gotShot);
self.domElement.removeEventListener('touchstart', self.gotShot);
MallardMayhem.killDuck(self.id);
};
this.flapWing = function () {
self.sounds.flap.play();
};
this.initialize = function () {
self.domElement.id = self.id;
self.domElement.setAttribute('class', 'duck-' + self.colour + '-right');
self.domElement.addEventListener('mousedown', self.gotShot);
self.domElement.addEventListener('touchstart', self.gotShot);
MallardMayhem.stage.appendChild(self.domElement);
var randomLocation = MallardMayhem.randomCoord(),
format = (navigator.userAgent.indexOf('Firefox') > 1) ? 'ogg' : 'mp3';
self.flyTo(randomLocation);
self.lifeSpan = setTimeout(self.flyAway, self.maxAge);
self.sounds = {
fall : new Audio('./audio/fall.' + format),
ground: new Audio('./audio/ground.' + format)
};
self.sounds.fall.volume = 0.1;
};
this.flyAway = function () {
clearTimeout(self.currentTimeout);
self.domElement.removeEventListener('mousedown', self.gotShot);
self.domElement.removeEventListener('touchstart', self.gotShot);
self.domElement.className = 'duck-' + self.colour + '-top';
self.currentLocation[1] = self.currentLocation[1] - (MallardMayhem.animationStep * 3);
self.domElement.style.top = self.currentLocation[1] + 'px';
if (self.currentLocation[1] > (-60)) {
setTimeout(self.flyAway, MallardMayhem.animationSpeed);
} else {
MallardMayhem.removeDuck(self.id);
}
};
this.initialize();
};
}());<|fim▁end|>
|
self.currentLocation[0] = self.currentLocation[0] - (self.currentLocation[0] - coords[0]);
} else {
self.currentLocation[0] = self.currentLocation[0] - MallardMayhem.animationStep;
|
<|file_name|>protocollayer.js<|end_file_name|><|fim▁begin|>/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object') {
define = function(deps, factory) {
deps = deps.map.forEach(function(id) {
return require(id);
});
module.exports = factory(deps);
};
define.amd = {};
}
if (typeof define === 'function' && define.amd) {
define('activesync/codepages',[
'wbxml',
'./codepages/Common',
'./codepages/AirSync',
'./codepages/Contacts',
'./codepages/Email',
'./codepages/Calendar',
'./codepages/Move',
'./codepages/ItemEstimate',
'./codepages/FolderHierarchy',
'./codepages/MeetingResponse',
'./codepages/Tasks',
'./codepages/ResolveRecipients',
'./codepages/ValidateCert',
'./codepages/Contacts2',
'./codepages/Ping',
'./codepages/Provision',
'./codepages/Search',
'./codepages/GAL',
'./codepages/AirSyncBase',
'./codepages/Settings',
'./codepages/DocumentLibrary',
'./codepages/ItemOperations',
'./codepages/ComposeMail',
'./codepages/Email2',
'./codepages/Notes',
'./codepages/RightsManagement'
], factory);
} else {
root.ActiveSyncCodepages = factory(WBXML,
ASCPCommon,
ASCPAirSync,
ASCPContacts,
ASCPEmail,
ASCPCalendar,
ASCPMove,
ASCPItemEstimate,
ASCPHierarchy,
ASCPMeetingResponse,
ASCPTasks,
ASCPResolveRecipients,
ASCPValidateCert,
ASCPContacts2,
ASCPPing,
ASCPProvision,
ASCPSearch,
ASCPGAL,
ASCPAirSyncBase,
ASCPSettings,
ASCPDocumentLibrary,
ASCPItemOperations,
ASCPComposeMail,
ASCPEmail2,
ASCPNotes,
ASCPRightsManagement);
}
}(this, function(WBXML, Common, AirSync, Contacts, Email, Calendar, Move,
ItemEstimate, FolderHierarchy, MeetingResponse, Tasks,
ResolveRecipients, ValidateCert, Contacts2, Ping, Provision,
Search, GAL, AirSyncBase, Settings, DocumentLibrary,
ItemOperations, ComposeMail, Email2, Notes, RightsManagement) {
'use strict';
var codepages = {
Common: Common,
AirSync: AirSync,
Contacts: Contacts,
Email: Email,
Calendar: Calendar,
Move: Move,
ItemEstimate: ItemEstimate,
FolderHierarchy: FolderHierarchy,
MeetingResponse: MeetingResponse,
Tasks: Tasks,
ResolveRecipients: ResolveRecipients,
ValidateCert: ValidateCert,
Contacts2: Contacts2,
Ping: Ping,
Provision: Provision,
Search: Search,
GAL: GAL,
AirSyncBase: AirSyncBase,
Settings: Settings,
DocumentLibrary: DocumentLibrary,
ItemOperations: ItemOperations,
ComposeMail: ComposeMail,
Email2: Email2,
Notes: Notes,
RightsManagement: RightsManagement
};
WBXML.CompileCodepages(codepages);
return codepages;
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('wbxml',factory);
else
root.WBXML = factory();
}(this, function() {
'use strict';
var exports = {};
var Tokens = {
SWITCH_PAGE: 0x00,
END: 0x01,
ENTITY: 0x02,
STR_I: 0x03,
LITERAL: 0x04,
EXT_I_0: 0x40,
EXT_I_1: 0x41,
EXT_I_2: 0x42,
PI: 0x43,
LITERAL_C: 0x44,
EXT_T_0: 0x80,
EXT_T_1: 0x81,
EXT_T_2: 0x82,
STR_T: 0x83,
LITERAL_A: 0x84,
EXT_0: 0xC0,
EXT_1: 0xC1,
EXT_2: 0xC2,
OPAQUE: 0xC3,
LITERAL_AC: 0xC4,
};
/**
* Create a constructor for a custom error type that works like a built-in
* Error.
*
* @param name the string name of the error
* @param parent (optional) a parent class for the error, defaults to Error
* @param extraArgs an array of extra arguments that can be passed to the
* constructor of this error type
* @return the constructor for this error
*/
function makeError(name, parent, extraArgs) {
function CustomError() {
// Try to let users call this as CustomError(...) without the "new". This
// is imperfect, and if you call this function directly and give it a
// |this| that's a CustomError, things will break. Don't do it!
var self = this instanceof CustomError ?
this : Object.create(CustomError.prototype);
var tmp = Error();
var offset = 1;
self.stack = tmp.stack.substring(tmp.stack.indexOf('\n') + 1);
self.message = arguments[0] || tmp.message;
if (extraArgs) {
offset += extraArgs.length;
for (var i = 0; i < extraArgs.length; i++)
self[extraArgs[i]] = arguments[i+1];
}
var m = /@(.+):(.+)/.exec(self.stack);
self.fileName = arguments[offset] || (m && m[1]) || "";
self.lineNumber = arguments[offset + 1] || (m && m[2]) || 0;
return self;
}
CustomError.prototype = Object.create((parent || Error).prototype);
CustomError.prototype.name = name;
CustomError.prototype.constructor = CustomError;
return CustomError;
}
var ParseError = makeError('WBXML.ParseError');
exports.ParseError = ParseError;
function StringTable(data, decoder) {
this.strings = [];
this.offsets = {};
var start = 0;
for (var i = 0; i < data.length; i++) {
if (data[i] === 0) {
this.offsets[start] = this.strings.length;
this.strings.push(decoder.decode( data.subarray(start, i) ));
start = i + 1;
}
}
}
StringTable.prototype = {
get: function(offset) {
if (offset in this.offsets)
return this.strings[this.offsets[offset]];
else {
if (offset < 0)
throw new ParseError('offset must be >= 0');
var curr = 0;
for (var i = 0; i < this.strings.length; i++) {
// Add 1 to the current string's length here because we stripped a
// null-terminator earlier.
if (offset < curr + this.strings[i].length + 1)
return this.strings[i].slice(offset - curr);
curr += this.strings[i].length + 1;
}
}
throw new ParseError('invalid offset');
},
};
function CompileCodepages(codepages) {
codepages.__nsnames__ = {};
codepages.__tagnames__ = {};
codepages.__attrdata__ = {};
for (var iter in Iterator(codepages)) {
var name = iter[0], page = iter[1];
if (name.match(/^__/))
continue;
if (page.Tags) {
var v = Iterator(page.Tags).next();
codepages.__nsnames__[v[1] >> 8] = name;
for (var iter2 in Iterator(page.Tags)) {
var tag = iter2[0], value = iter2[1];
codepages.__tagnames__[value] = tag;
}
}
if (page.Attrs) {
for (var iter3 in Iterator(page.Attrs)) {
var attr = iter3[0], data = iter3[1];
if (!('name' in data))
data.name = attr;
codepages.__attrdata__[data.value] = data;
page.Attrs[attr] = data.value;
}
}
}
}
exports.CompileCodepages = CompileCodepages;
var mib2str = {
3: 'US-ASCII',
4: 'ISO-8859-1',
5: 'ISO-8859-2',
6: 'ISO-8859-3',
7: 'ISO-8859-4',
8: 'ISO-8859-5',
9: 'ISO-8859-6',
10: 'ISO-8859-7',
11: 'ISO-8859-8',
12: 'ISO-8859-9',
13: 'ISO-8859-10',
106: 'UTF-8',
};
// TODO: Really, we should build our own map here with synonyms for the
// various encodings, but this is a step in the right direction.
var str2mib = {};
for (var iter in Iterator(mib2str)) {
str2mib[iter[1]] = iter[0];
}
function Element(ownerDocument, type, tag) {
this.ownerDocument = ownerDocument;
this.type = type;
this._attrs = {};
if (typeof tag === 'string') {
var pieces = tag.split(':');
if (pieces.length === 1) {
this.localTagName = pieces[0];
} else {
this.namespaceName = pieces[0];
this.localTagName = pieces[1];
}
}
else {
this.tag = tag;
Object.defineProperties(this, {
'namespace': { get: function() { return this.tag >> 8; } },
'localTag': { get: function() { return this.tag & 0xff; } },
'namespaceName': { get: function() {
return this.ownerDocument._codepages.__nsnames__[this.namespace];
} },
'localTagName': { get: function() {
return this.ownerDocument._codepages.__tagnames__[this.tag];
} },
});
}
}
exports.Element = Element;
Element.prototype = {
get tagName() {
var ns = this.namespaceName;
ns = ns ? ns + ':' : '';
return ns + this.localTagName;
},
getAttributes: function() {
var attributes = [];
for (var iter in Iterator(this._attrs)) {
var name = iter[0], pieces = iter[1];
var data = name.split(':');
attributes.push({ name: name, namespace: data[0], localName: data[1],
value: this._getAttribute(pieces) });
}
return attributes;
},
getAttribute: function(attr) {
if (typeof attr === 'number')
attr = this.ownerDocument._codepages.__attrdata__[attr].name;
else if (!(attr in this._attrs) && this.namespace !== null &&
attr.indexOf(':') === -1)
attr = this.namespaceName + ':' + attr;
return this._getAttribute(this._attrs[attr]);
},
_getAttribute: function(pieces) {
var strValue = '';
var array = [];
for (var iter in Iterator(pieces)) {
var hunk = iter[1];
if (hunk instanceof Extension) {
if (strValue) {
array.push(strValue);
strValue = '';
}
array.push(hunk);
}
else if (typeof hunk === 'number') {
strValue += this.ownerDocument._codepages.__attrdata__[hunk].data ||
'';
}
else {
strValue += hunk;
}
}
if (strValue)
array.push(strValue);
return array.length === 1 ? array[0] : array;
},
_addAttribute: function(attr) {
if (typeof attr === 'string') {
if (attr in this._attrs)
throw new ParseError('attribute '+attr+' is repeated');
return this._attrs[attr] = [];
}
else {
var namespace = attr >> 8;
var localAttr = attr & 0xff;
var localName = this.ownerDocument._codepages.__attrdata__[localAttr]
.name;
var nsName = this.ownerDocument._codepages.__nsnames__[namespace];
var name = nsName + ':' + localName;
if (name in this._attrs)
throw new ParseError('attribute '+name+' is repeated');
return this._attrs[name] = [attr];
}
},
};
function EndTag(ownerDocument) {
this.ownerDocument = ownerDocument;
}
exports.EndTag = EndTag;
EndTag.prototype = {
get type() { return 'ETAG'; },
};
function Text(ownerDocument, textContent) {
this.ownerDocument = ownerDocument;
this.textContent = textContent;
}
exports.Text = Text;
Text.prototype = {
get type() { return 'TEXT'; },
};
function Extension(ownerDocument, subtype, index, value) {
this.ownerDocument = ownerDocument;
this.subtype = subtype;
this.index = index;
this.value = value;
}
exports.Extension = Extension;
Extension.prototype = {
get type() { return 'EXT'; },
};
function ProcessingInstruction(ownerDocument) {
this.ownerDocument = ownerDocument;
}
exports.ProcessingInstruction = ProcessingInstruction;
ProcessingInstruction.prototype = {
get type() { return 'PI'; },
get target() {
if (typeof this.targetID === 'string')
return this.targetID;
else
return this.ownerDocument._codepages.__attrdata__[this.targetID].name;
},
_setTarget: function(target) {
this.targetID = target;
if (typeof target === 'string')
return this._data = [];
else
return this._data = [target];
},
// XXX: this seems impolite...
_getAttribute: Element.prototype._getAttribute,
get data() { return this._getAttribute(this._data); },
};
function Opaque(ownerDocument, data) {
this.ownerDocument = ownerDocument;
this.data = data;
}
exports.Opaque = Opaque;
Opaque.prototype = {
get type() { return 'OPAQUE'; },
};
function Reader(data, codepages) {
this._data = data instanceof Writer ? data.bytes : data;
this._codepages = codepages;
this.rewind();
}
exports.Reader = Reader;
Reader.prototype = {
_get_uint8: function() {
if (this._index === this._data.length)
throw StopIteration;
return this._data[this._index++];
},
_get_mb_uint32: function() {
var b;
var result = 0;
do {
b = this._get_uint8();
result = result*128 + (b & 0x7f);
} while(b & 0x80);
return result;
},
_get_slice: function(length) {
var start = this._index;
this._index += length;
return this._data.subarray(start, this._index);
},
_get_c_string: function() {
var start = this._index;
while (this._get_uint8());
return this._data.subarray(start, this._index - 1);
},
rewind: function() {
// Although in theory we could cache this.document since we no longer use
// iterators, there is clearly some kind of rep exposure that goes awry
// for us, so I'm having us re-do our work. This does not matter in the
// normal use-case, just for debugging and just for our test server, which
// both rely on rewind().
this._index = 0;
var v = this._get_uint8();
this.version = ((v & 0xf0) + 1).toString() + '.' + (v & 0x0f).toString();
this.pid = this._get_mb_uint32();
this.charset = mib2str[this._get_mb_uint32()] || 'unknown';
this._decoder = TextDecoder(this.charset);
var tbl_len = this._get_mb_uint32();
this.strings = new StringTable(this._get_slice(tbl_len), this._decoder);
this.document = this._getDocument();
},
// start = version publicid charset strtbl body
// strtbl = length *byte
// body = *pi element *pi
// element = stag [ 1*attribute END ] [ *content END ]
//
// content = element | string | extension | entity | pi | opaque
//
// stag = TAG | ( LITERAL index )
// attribute = attrStart *attrValue
// attrStart = ATTRSTART | ( LITERAL index )
// attrValue = ATTRVALUE | string | extension | entity
//
// extension = ( EXT_I termstr ) | ( EXT_T index ) | EXT
//
// string = inline | tableref
// inline = STR_I termstr
// tableref = STR_T index
//
// entity = ENTITY entcode
// entcode = mb_u_int32 // UCS-4 character code
//
// pi = PI attrStart *attrValue END
//
// opaque = OPAQUE length *byte
//
// version = u_int8 containing WBXML version number
// publicid = mb_u_int32 | ( zero index )
// charset = mb_u_int32
// termstr = charset-dependent string with termination
// index = mb_u_int32 // integer index into string table.
// length = mb_u_int32 // integer length.
// zero = u_int8 // containing the value zero (0).
_getDocument: function() {
// Parser states
var States = {
BODY: 0,
ATTRIBUTES: 1,
ATTRIBUTE_PI: 2,
};
var state = States.BODY;
var currentNode;
var currentAttr;
var codepage = 0;
var depth = 0;
var foundRoot = false;
var doc = [];
var appendString = (function(s) {
if (state === States.BODY) {
if (!currentNode)
currentNode = new Text(this, s);
else
currentNode.textContent += s;
}
else { // if (state === States.ATTRIBUTES || state === States.ATTRIBUTE_PI)
currentAttr.push(s);
}
// We can assume that we're in a valid state, so don't bother checking
// here.
}).bind(this);
try { while (true) {
var tok = this._get_uint8();
if (tok === Tokens.SWITCH_PAGE) {
codepage = this._get_uint8();
if (!(codepage in this._codepages.__nsnames__))
throw new ParseError('unknown codepage '+codepage);
}
else if (tok === Tokens.END) {
if (state === States.BODY && depth-- > 0) {
if (currentNode) {
doc.push(currentNode);
currentNode = null;
}
doc.push(new EndTag(this));
}
else if (state === States.ATTRIBUTES || state === States.ATTRIBUTE_PI) {
state = States.BODY;
doc.push(currentNode);
currentNode = null;
currentAttr = null;
}
else {
throw new ParseError('unexpected END token');
}
}
else if (tok === Tokens.ENTITY) {
if (state === States.BODY && depth === 0)
throw new ParseError('unexpected ENTITY token');
var e = this._get_mb_uint32();
appendString('&#'+e+';');
}
else if (tok === Tokens.STR_I) {
if (state === States.BODY && depth === 0)
throw new ParseError('unexpected STR_I token');
appendString(this._decoder.decode(this._get_c_string()));
}
else if (tok === Tokens.PI) {
if (state !== States.BODY)
throw new ParseError('unexpected PI token');
state = States.ATTRIBUTE_PI;
if (currentNode)
doc.push(currentNode);
currentNode = new ProcessingInstruction(this);
}
else if (tok === Tokens.STR_T) {
if (state === States.BODY && depth === 0)
throw new ParseError('unexpected STR_T token');
var r = this._get_mb_uint32();
appendString(this.strings.get(r));
}
else if (tok === Tokens.OPAQUE) {
if (state !== States.BODY)
throw new ParseError('unexpected OPAQUE token');
var len = this._get_mb_uint32();
var data = this._get_slice(len);
if (currentNode) {
doc.push(currentNode);
currentNode = null;
}
doc.push(new Opaque(this, data));
}
else if (((tok & 0x40) || (tok & 0x80)) && (tok & 0x3f) < 3) {
var hi = tok & 0xc0;
var lo = tok & 0x3f;
var subtype;
var value;
if (hi === Tokens.EXT_I_0) {
subtype = 'string';
value = this._decoder.decode(this._get_c_string());
}
else if (hi === Tokens.EXT_T_0) {
subtype = 'integer';
value = this._get_mb_uint32();
}
else { // if (hi === Tokens.EXT_0)
subtype = 'byte';
value = null;
}
var ext = new Extension(this, subtype, lo, value);
if (state === States.BODY) {
if (currentNode) {
doc.push(currentNode);
currentNode = null;
}
doc.push(ext);
}
else { // if (state === States.ATTRIBUTES || state === States.ATTRIBUTE_PI)
currentAttr.push(ext);
}
}
else if (state === States.BODY) {
if (depth === 0) {
if (foundRoot)
throw new ParseError('multiple root nodes found');
foundRoot = true;
}
var tag = (codepage << 8) + (tok & 0x3f);
if ((tok & 0x3f) === Tokens.LITERAL) {
var r = this._get_mb_uint32();
tag = this.strings.get(r);
}
if (currentNode)
doc.push(currentNode);
currentNode = new Element(this, (tok & 0x40) ? 'STAG' : 'TAG', tag);
if (tok & 0x40)
depth++;
if (tok & 0x80) {
state = States.ATTRIBUTES;
}
else {
state = States.BODY;
doc.push(currentNode);
currentNode = null;
}
}
else { // if (state === States.ATTRIBUTES || state === States.ATTRIBUTE_PI)
var attr = (codepage << 8) + tok;
if (!(tok & 0x80)) {
if (tok === Tokens.LITERAL) {
var r = this._get_mb_uint32();
attr = this.strings.get(r);
}
if (state === States.ATTRIBUTE_PI) {
if (currentAttr)
throw new ParseError('unexpected attribute in PI');
currentAttr = currentNode._setTarget(attr);
}
else {
currentAttr = currentNode._addAttribute(attr);
}
}
else {
currentAttr.push(attr);
}
}
} } catch (e) {
if (!(e instanceof StopIteration))
throw e;
}
return doc;
},
dump: function(indentation, header) {
var result = '';
if (indentation === undefined)
indentation = 2;
var indent = function(level) {
return new Array(level*indentation + 1).join(' ');
};
var tagstack = [];
if (header) {
result += 'Version: ' + this.version + '\n';
result += 'Public ID: ' + this.pid + '\n';
result += 'Charset: ' + this.charset + '\n';
result += 'String table:\n "' +
this.strings.strings.join('"\n "') + '"\n\n';
}
var newline = false;
var doc = this.document;
var doclen = doc.length;
for (var iNode = 0; iNode < doclen; iNode++) {
var node = doc[iNode];
if (node.type === 'TAG' || node.type === 'STAG') {
result += indent(tagstack.length) + '<' + node.tagName;
var attributes = node.getAttributes();
for (var i = 0; i < attributes.length; i++) {
var attr = attributes[i];
result += ' ' + attr.name + '="' + attr.value + '"';
}
if (node.type === 'STAG') {
tagstack.push(node.tagName);
result += '>\n';
}
else
result += '/>\n';
}
else if (node.type === 'ETAG') {
var tag = tagstack.pop();
result += indent(tagstack.length) + '</' + tag + '>\n';
}
else if (node.type === 'TEXT') {
result += indent(tagstack.length) + node.textContent + '\n';
}
else if (node.type === 'PI') {
result += indent(tagstack.length) + '<?' + node.target;
if (node.data)
result += ' ' + node.data;
result += '?>\n';
}
else if (node.type === 'OPAQUE') {
result += indent(tagstack.length) + '<![CDATA[' + node.data + ']]>\n';
}
else {
throw new Error('Unknown node type "' + node.type + '"');
}
}
return result;
},
};
function Writer(version, pid, charset, strings) {
this._rawbuf = new ArrayBuffer(1024);
this._buffer = new Uint8Array(this._rawbuf);
this._pos = 0;
this._codepage = 0;
this._tagStack = [];
var infos = version.split('.').map(function(x) {
return parseInt(x);
});
var major = infos[0], minor = infos[1];
var v = ((major - 1) << 4) + minor;
var charsetNum = charset;
if (typeof charset === 'string') {
charsetNum = str2mib[charset];
if (charsetNum === undefined)
throw new Error('unknown charset '+charset);
}
var encoder = this._encoder = TextEncoder(charset);
this._write(v);
this._write(pid);
this._write(charsetNum);
if (strings) {
var bytes = strings.map(function(s) { return encoder.encode(s); });
var len = bytes.reduce(function(x, y) { return x + y.length + 1; }, 0);
this._write_mb_uint32(len);
for (var iter in Iterator(bytes)) {
var b = iter[1];
this._write_bytes(b);
this._write(0x00);
}
}
else {
this._write(0x00);
}
}
exports.Writer = Writer;
Writer.Attribute = function(name, value) {
this.isValue = typeof name === 'number' && (name & 0x80);
if (this.isValue && value !== undefined)
throw new Error("Can't specify a value for attribute value constants");
this.name = name;
this.value = value;
};
Writer.StringTableRef = function(index) {
this.index = index;
};
Writer.Entity = function(code) {
this.code = code;
};
Writer.Extension = function(subtype, index, data) {
var validTypes = {
'string': { value: Tokens.EXT_I_0,
validator: function(data) {
return typeof data === 'string';
} },
'integer': { value: Tokens.EXT_T_0,
validator: function(data) {
return typeof data === 'number';
} },
'byte': { value: Tokens.EXT_0,
validator: function(data) {
return data === null || data === undefined;
} },
};
var info = validTypes[subtype];
if (!info)
throw new Error('Invalid WBXML Extension type');
if (!info.validator(data))
throw new Error('Data for WBXML Extension does not match type');
if (index !== 0 && index !== 1 && index !== 2)
throw new Error('Invalid WBXML Extension index');
this.subtype = info.value;
this.index = index;
this.data = data;
};
Writer.a = function(name, val) { return new Writer.Attribute(name, val); };
Writer.str_t = function(index) { return new Writer.StringTableRef(index); };
Writer.ent = function(code) { return new Writer.Entity(code) };
Writer.ext = function(subtype, index, data) { return new Writer.Extension(
subtype, index, data); };
Writer.prototype = {
_write: function(tok) {
// Expand the buffer by a factor of two if we ran out of space.
if (this._pos === this._buffer.length - 1) {
this._rawbuf = new ArrayBuffer(this._rawbuf.byteLength * 2);
var buffer = new Uint8Array(this._rawbuf);
for (var i = 0; i < this._buffer.length; i++)
buffer[i] = this._buffer[i];
this._buffer = buffer;
}
this._buffer[this._pos++] = tok;
},
_write_mb_uint32: function(value) {
var bytes = [];
bytes.push(value % 0x80);
while (value >= 0x80) {
value >>= 7;
bytes.push(0x80 + (value % 0x80));
}
for (var i = bytes.length - 1; i >= 0; i--)
this._write(bytes[i]);
},
_write_bytes: function(bytes) {
for (var i = 0; i < bytes.length; i++)
this._write(bytes[i]);
},
_write_str: function(str) {
this._write_bytes(this._encoder.encode(str));
},
_setCodepage: function(codepage) {
if (this._codepage !== codepage) {
this._write(Tokens.SWITCH_PAGE);
this._write(codepage);
this._codepage = codepage;
}
},
_writeTag: function(tag, stag, attrs) {
if (tag === undefined)
throw new Error('unknown tag');
var flags = 0x00;
if (stag)
flags += 0x40;
if (attrs.length)
flags += 0x80;
if (tag instanceof Writer.StringTableRef) {
this._write(Tokens.LITERAL + flags);
this._write_mb_uint32(tag.index);
}
else {
this._setCodepage(tag >> 8);
this._write((tag & 0xff) + flags);
}
if (attrs.length) {
for (var iter in Iterator(attrs)) {
var attr = iter[1];
this._writeAttr(attr);
}
this._write(Tokens.END);
}
},
_writeAttr: function(attr) {
if (!(attr instanceof Writer.Attribute))
throw new Error('Expected an Attribute object');
if (attr.isValue)
throw new Error("Can't use attribute value constants here");
if (attr.name instanceof Writer.StringTableRef) {
this._write(Tokens.LITERAL);
this._write(attr.name.index);
}
else {
this._setCodepage(attr.name >> 8);
this._write(attr.name & 0xff);
}
this._writeText(attr.value, true);
},
_writeText: function(value, inAttr) {
if (Array.isArray(value)) {
for (var iter in Iterator(value)) {
var piece = iter[1];
this._writeText(piece, inAttr);
}
}
else if (value instanceof Writer.StringTableRef) {
this._write(Tokens.STR_T);
this._write_mb_uint32(value.index);
}
else if (value instanceof Writer.Entity) {
this._write(Tokens.ENTITY);
this._write_mb_uint32(value.code);
}
else if (value instanceof Writer.Extension) {
this._write(value.subtype + value.index);
if (value.subtype === Tokens.EXT_I_0) {
this._write_str(value.data);
this._write(0x00);
}
else if (value.subtype === Tokens.EXT_T_0) {
this._write_mb_uint32(value.data);
}
}
else if (value instanceof Writer.Attribute) {
if (!value.isValue)
throw new Error('Unexpected Attribute object');
if (!inAttr)
throw new Error("Can't use attribute value constants outside of " +
"attributes");
this._setCodepage(value.name >> 8);
this._write(value.name & 0xff);
}
else if (value !== null && value !== undefined) {
this._write(Tokens.STR_I);
this._write_str(value.toString());
this._write(0x00);
}
},
tag: function(tag) {
var tail = arguments.length > 1 ? arguments[arguments.length - 1] : null;
if (tail === null || tail instanceof Writer.Attribute) {
var rest = Array.prototype.slice.call(arguments, 1);
this._writeTag(tag, false, rest);
return this;
}
else {
var head = Array.prototype.slice.call(arguments, 0, -1);
return this.stag.apply(this, head)
.text(tail)
.etag();
}
},
stag: function(tag) {
var rest = Array.prototype.slice.call(arguments, 1);
this._writeTag(tag, true, rest);
this._tagStack.push(tag);
return this;
},
etag: function(tag) {
if (this._tagStack.length === 0)
throw new Error('Spurious etag() call!');
var expectedTag = this._tagStack.pop();
if (tag !== undefined && tag !== expectedTag)
throw new Error('Closed the wrong tag');
this._write(Tokens.END);
return this;
},
text: function(value) {
this._writeText(value);
return this;
},
pi: function(target, data) {
this._write(Tokens.PI);
this._writeAttr(Writer.a(target, data));
this._write(Tokens.END);
return this;
},
ext: function(subtype, index, data) {
return this.text(Writer.ext(subtype, index, data));
},
opaque: function(data) {
this._write(Tokens.OPAQUE);
this._write_mb_uint32(data.length);
if (typeof data === 'string') {
this._write_str(data);
}
else {
for (var i = 0; i < data.length; i++)
this._write(data[i]);
}
return this;
},
get buffer() { return this._rawbuf.slice(0, this._pos); },
get bytes() { return new Uint8Array(this._rawbuf, 0, this._pos); },
};
function EventParser() {
this.listeners = [];
this.onerror = function(e) { throw e; };
}
exports.EventParser = EventParser;
EventParser.prototype = {
addEventListener: function(path, callback) {
this.listeners.push({path: path, callback: callback});
},
_pathMatches: function(a, b) {
return a.length === b.length && a.every(function(val, i) {
if (b[i] === '*')
return true;
else if (Array.isArray(b[i])) {
return b[i].indexOf(val) !== -1;
}
else
return val === b[i];
});
},
run: function(reader) {
var fullPath = [];
var recPath = [];
var recording = 0;
var doc = reader.document;
var doclen = doc.length;
for (var iNode = 0; iNode < doclen; iNode++) {
var node = doc[iNode];
if (node.type === 'TAG') {
fullPath.push(node.tag);
for (var iter in Iterator(this.listeners)) {
var listener = iter[1];
if (this._pathMatches(fullPath, listener.path)) {
node.children = [];
try {
listener.callback(node);
}
catch (e) {
if (this.onerror)
this.onerror(e);
}
}
}
fullPath.pop();
}
else if (node.type === 'STAG') {
fullPath.push(node.tag);
for (var iter in Iterator(this.listeners)) {
var listener = iter[1];
if (this._pathMatches(fullPath, listener.path)) {
recording++;
}
}
}
else if (node.type === 'ETAG') {
for (var iter in Iterator(this.listeners)) {
var listener = iter[1];
if (this._pathMatches(fullPath, listener.path)) {
recording--;
try {
listener.callback(recPath[recPath.length-1]);
}
catch (e) {
if (this.onerror)
this.onerror(e);
}
}
}
fullPath.pop();
}
if (recording) {
if (node.type === 'STAG') {
node.type = 'TAG';
node.children = [];
if (recPath.length)
recPath[recPath.length-1].children.push(node);
recPath.push(node);
}
else if (node.type === 'ETAG') {
recPath.pop();
}
else {
node.children = [];
recPath[recPath.length-1].children.push(node);
}
}
}
},
};
return exports;
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/Common',[], factory);
else
root.ASCPCommon = factory();
}(this, function() {
'use strict';
return {
Enums: {
Status: {
InvalidContent: '101',
InvalidWBXML: '102',
InvalidXML: '103',
InvalidDateTime: '104',
InvalidCombinationOfIDs: '105',
InvalidIDs: '106',
InvalidMIME: '107',
DeviceIdMissingOrInvalid: '108',
DeviceTypeMissingOrInvalid: '109',
ServerError: '110',
ServerErrorRetryLater: '111',
ActiveDirectoryAccessDenied: '112',
MailboxQuotaExceeded: '113',
MailboxServerOffline: '114',
SendQuotaExceeded: '115',
MessageRecipientUnresolved: '116',
MessageReplyNotAllowed: '117',
MessagePreviouslySent: '118',
MessageHasNoRecipient: '119',
MailSubmissionFailed: '120',
MessageReplyFailed: '121',
AttachmentIsTooLarge: '122',
UserHasNoMailbox: '123',
UserCannotBeAnonymous: '124',
UserPrincipalCouldNotBeFound: '125',
UserDisabledForSync: '126',
UserOnNewMailboxCannotSync: '127',
UserOnLegacyMailboxCannotSync: '128',
DeviceIsBlockedForThisUser: '129',
AccessDenied: '130',
AccountDisabled: '131',
SyncStateNotFound: '132',
SyncStateLocked: '133',
SyncStateCorrupt: '134',
SyncStateAlreadyExists: '135',
SyncStateVersionInvalid: '136',
CommandNotSupported: '137',
VersionNotSupported: '138',
DeviceNotFullyProvisionable: '139',
RemoteWipeRequested: '140',
LegacyDeviceOnStrictPolicy: '141',
DeviceNotProvisioned: '142',
PolicyRefresh: '143',
InvalidPolicyKey: '144',
ExternallyManagedDevicesNotAllowed: '145',
NoRecurrenceInCalendar: '146',
UnexpectedItemClass: '147',
RemoteServerHasNoSSL: '148',
InvalidStoredRequest: '149',
ItemNotFound: '150',
TooManyFolders: '151',
NoFoldersFounds: '152',
ItemsLostAfterMove: '153',
FailureInMoveOperation: '154',
MoveCommandDisallowedForNonPersistentMoveAction: '155',
MoveCommandInvalidDestinationFolder: '156',
AvailabilityTooManyRecipients: '160',
AvailabilityDLLimitReached: '161',
AvailabilityTransientFailure: '162',
AvailabilityFailure: '163',
BodyPartPreferenceTypeNotSupported: '164',
DeviceInformationRequired: '165',
InvalidAccountId: '166',
AccountSendDisabled: '167',
IRM_FeatureDisabled: '168',
IRM_TransientError: '169',
IRM_PermanentError: '170',
IRM_InvalidTemplateID: '171',
IRM_OperationNotPermitted: '172',
NoPicture: '173',
PictureTooLarge: '174',
PictureLimitReached: '175',
BodyPart_ConversationTooLarge: '176',
MaximumDevicesReached: '177',
},
},
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/Contacts',[], factory);
else
root.ASCPContacts = factory();
}(this, function() {
'use strict';
return {
Tags: {
Anniversary: 0x0105,
AssistantName: 0x0106,
AssistantPhoneNumber: 0x0107,
Birthday: 0x0108,
Body: 0x0109,
BodySize: 0x010A,
BodyTruncated: 0x010B,
Business2PhoneNumber: 0x010C,
BusinessAddressCity: 0x010D,
BusinessAddressCountry: 0x010E,
BusinessAddressPostalCode: 0x010F,
BusinessAddressState: 0x0110,
BusinessAddressStreet: 0x0111,
BusinessFaxNumber: 0x0112,
BusinessPhoneNumber: 0x0113,
CarPhoneNumber: 0x0114,
Categories: 0x0115,
Category: 0x0116,
Children: 0x0117,
Child: 0x0118,
CompanyName: 0x0119,
Department: 0x011A,
Email1Address: 0x011B,
Email2Address: 0x011C,
Email3Address: 0x011D,
FileAs: 0x011E,
FirstName: 0x011F,
Home2PhoneNumber: 0x0120,
HomeAddressCity: 0x0121,
HomeAddressCountry: 0x0122,
HomeAddressPostalCode: 0x0123,
HomeAddressState: 0x0124,
HomeAddressStreet: 0x0125,
HomeFaxNumber: 0x0126,
HomePhoneNumber: 0x0127,
JobTitle: 0x0128,
LastName: 0x0129,
MiddleName: 0x012A,
MobilePhoneNumber: 0x012B,
OfficeLocation: 0x012C,
OtherAddressCity: 0x012D,
OtherAddressCountry: 0x012E,
OtherAddressPostalCode: 0x012F,
OtherAddressState: 0x0130,
OtherAddressStreet: 0x0131,
PagerNumber: 0x0132,
RadioPhoneNumber: 0x0133,
Spouse: 0x0134,
Suffix: 0x0135,
Title: 0x0136,
WebPage: 0x0137,
YomiCompanyName: 0x0138,
YomiFirstName: 0x0139,
YomiLastName: 0x013A,
CompressedRTF: 0x013B,
Picture: 0x013C,
Alias: 0x013D,
WeightedRank: 0x013E,
},
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/Calendar',[], factory);
else
root.ASCPCalendar = factory();
}(this, function() {
'use strict';
return {
Tags: {
TimeZone: 0x0405,
AllDayEvent: 0x0406,
Attendees: 0x0407,
Attendee: 0x0408,
Email: 0x0409,
Name: 0x040A,
Body: 0x040B,
BodyTruncated: 0x040C,
BusyStatus: 0x040D,
Categories: 0x040E,
Category: 0x040F,
CompressedRTF: 0x0410,
DtStamp: 0x0411,
EndTime: 0x0412,
Exception: 0x0413,
Exceptions: 0x0414,
Deleted: 0x0415,
ExceptionStartTime: 0x0416,
Location: 0x0417,
MeetingStatus: 0x0418,
OrganizerEmail: 0x0419,
OrganizerName: 0x041A,
Recurrence: 0x041B,
Type: 0x041C,
Until: 0x041D,
Occurrences: 0x041E,
Interval: 0x041F,
DayOfWeek: 0x0420,
DayOfMonth: 0x0421,
WeekOfMonth: 0x0422,
MonthOfYear: 0x0423,
Reminder: 0x0424,
Sensitivity: 0x0425,
Subject: 0x0426,
StartTime: 0x0427,
UID: 0x0428,
AttendeeStatus: 0x0429,
AttendeeType: 0x042A,
Attachment: 0x042B,
Attachments: 0x042C,
AttName: 0x042D,
AttSize: 0x042E,
AttOid: 0x042F,
AttMethod: 0x0430,
AttRemoved: 0x0431,
DisplayName: 0x0432,
DisallowNewTimeProposal: 0x0433,
ResponseRequested: 0x0434,
AppointmentReplyTime: 0x0435,
ResponseType: 0x0436,
CalendarType: 0x0437,
IsLeapMonth: 0x0438,
FirstDayOfWeek: 0x0439,
OnlineMeetingConfLink: 0x043A,
OnlineMeetingExternalLink: 0x043B,
},
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/MeetingResponse',[], factory);
else
root.ASCPMeetingResponse = factory();
}(this, function() {
'use strict';
return {
Tags: {
CalendarId: 0x0805,
CollectionId: 0x0806,
MeetingResponse: 0x0807,
RequestId: 0x0808,
Request: 0x0809,
Result: 0x080A,
Status: 0x080B,
UserResponse: 0x080C,
InstanceId: 0x080E,
},
Enums: {
Status: {
Success: '1',
InvalidRequest: '2',
MailboxError: '3',
ServerError: '4',
},
UserResponse: {
Accepted: '1',
Tentative: '2',
Declined: '3',
},
},
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/Tasks',[], factory);
else
root.ASCPTasks = factory();
}(this, function() {
'use strict';
return {
Tags: {
Body: 0x0905,
BodySize: 0x0906,
BodyTruncated: 0x0907,
Categories: 0x0908,
Category: 0x0909,
Complete: 0x090A,
DateCompleted: 0x090B,
DueDate: 0x090C,
UtcDueDate: 0x090D,
Importance: 0x090E,
Recurrence: 0x090F,
Recurrence_Type: 0x0910,
Recurrence_Start: 0x0911,
Recurrence_Until: 0x0912,
Recurrence_Occurrences: 0x0913,
Recurrence_Interval: 0x0914,
Recurrence_DayOfMonth: 0x0915,
Recurrence_DayOfWeek: 0x0916,
Recurrence_WeekOfMonth: 0x0917,
Recurrence_MonthOfYear: 0x0918,
Recurrence_Regenerate: 0x0919,
Recurrence_DeadOccur: 0x091A,
ReminderSet: 0x091B,
ReminderTime: 0x091C,
Sensitivity: 0x091D,
StartDate: 0x091E,
UtcStartDate: 0x091F,
Subject: 0x0920,
CompressedRTF: 0x0921,
OrdinalDate: 0x0922,
SubOrdinalDate: 0x0923,
CalendarType: 0x0924,
IsLeapMonth: 0x0925,
FirstDayOfWeek: 0x0926,
}
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/ResolveRecipients',[], factory);
else
root.ASCPResolveRecipients = factory();
}(this, function() {
'use strict';
return {
Tags: {
ResolveRecipients: 0x0A05,
Response: 0x0A06,
Status: 0x0A07,
Type: 0x0A08,
Recipient: 0x0A09,
DisplayName: 0x0A0A,
EmailAddress: 0x0A0B,
Certificates: 0x0A0C,
Certificate: 0x0A0D,
MiniCertificate: 0x0A0E,
Options: 0x0A0F,
To: 0x0A10,
CertificateRetrieval: 0x0A11,
RecipientCount: 0x0A12,
MaxCertificates: 0x0A13,
MaxAmbiguousRecipients: 0x0A14,
CertificateCount: 0x0A15,
Availability: 0x0A16,
StartTime: 0x0A17,
EndTime: 0x0A18,
MergedFreeBusy: 0x0A19,
Picture: 0x0A1A,
MaxSize: 0x0A1B,
Data: 0x0A1C,
MaxPictures: 0x0A1D,
},
Enums: {
Status: {
Success: '1',
AmbiguousRecipientFull: '2',
AmbiguousRecipientPartial: '3',
RecipientNotFound: '4',
ProtocolError: '5',
ServerError: '6',
InvalidSMIMECert: '7',
CertLimitReached: '8',
},
CertificateRetrieval: {
None: '1',
Full: '2',
Mini: '3',
},
MergedFreeBusy: {
Free: '0',
Tentative: '1',
Busy: '2',
Oof: '3',
NoData: '4',
},
},
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/ValidateCert',[], factory);
else
root.ASCPValidateCert = factory();
}(this, function() {
'use strict';
return {
Tags: {
ValidateCert: 0x0B05,
Certificates: 0x0B06,
Certificate: 0x0B07,
CertificateChain: 0x0B08,
CheckCRL: 0x0B09,
Status: 0x0B0A,
},
Enums: {
Status: {
Success: '1',
ProtocolError: '2',
InvalidSignature: '3',
UntrustedSource: '4',
InvalidChain: '5',
NotForEmail: '6',
Expired: '7',
InconsistentTimes: '8',
IdMisused: '9',
MissingInformation: '10',
CAEndMismatch: '11',
EmailAddressMismatch: '12',
Revoked: '13',
ServerOffline: '14',
ChainRevoked: '15',
RevocationUnknown: '16',
UnknownError: '17',
},
},
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/Contacts2',[], factory);
else
root.ASCPContacts2 = factory();
}(this, function() {
'use strict';
return {
Tags: {
CustomerId: 0x0C05,
GovernmentId: 0x0C06,
IMAddress: 0x0C07,
IMAddress2: 0x0C08,
IMAddress3: 0x0C09,
ManagerName: 0x0C0A,
CompanyMainPhone: 0x0C0B,
AccountName: 0x0C0C,
NickName: 0x0C0D,
MMS: 0x0C0E,
}
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/Ping',[], factory);
else
root.ASCPPing = factory();
}(this, function() {
'use strict';
return {
Tags: {
Ping: 0x0D05,
AutdState: 0x0D06,
Status: 0x0D07,
HeartbeatInterval: 0x0D08,
Folders: 0x0D09,
Folder: 0x0D0A,
Id: 0x0D0B,
Class: 0x0D0C,
MaxFolders: 0x0D0D,
},
Enums: {
Status: {
Expired: '1',
Changed: '2',
MissingParameters: '3',
SyntaxError: '4',
InvalidInterval: '5',
TooManyFolders: '6',
SyncFolders: '7',
ServerError: '8',
},
},
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/Provision',[], factory);
else
root.ASCPProvision = factory();
}(this, function() {
'use strict';
return {
Tags: {
Provision: 0x0E05,
Policies: 0x0E06,
Policy: 0x0E07,
PolicyType: 0x0E08,
PolicyKey: 0x0E09,
Data: 0x0E0A,
Status: 0x0E0B,
RemoteWipe: 0x0E0C,
EASProvisionDoc: 0x0E0D,
DevicePasswordEnabled: 0x0E0E,
AlphanumericDevicePasswordRequired: 0x0E0F,
DeviceEncryptionEnabled: 0x0E10,
RequireStorageCardEncryption: 0x0E10,
PasswordRecoveryEnabled: 0x0E11,
AttachmentsEnabled: 0x0E13,
MinDevicePasswordLength: 0x0E14,
MaxInactivityTimeDeviceLock: 0x0E15,
MaxDevicePasswordFailedAttempts: 0x0E16,
MaxAttachmentSize: 0x0E17,
AllowSimpleDevicePassword: 0x0E18,
DevicePasswordExpiration: 0x0E19,
DevicePasswordHistory: 0x0E1A,
AllowStorageCard: 0x0E1B,
AllowCamera: 0x0E1C,
RequireDeviceEncryption: 0x0E1D,
AllowUnsignedApplications: 0x0E1E,
AllowUnsignedInstallationPackages: 0x0E1F,
MinDevicePasswordComplexCharacters: 0x0E20,
AllowWiFi: 0x0E21,
AllowTextMessaging: 0x0E22,
AllowPOPIMAPEmail: 0x0E23,
AllowBluetooth: 0x0E24,
AllowIrDA: 0x0E25,
RequireManualSyncWhenRoaming: 0x0E26,
AllowDesktopSync: 0x0E27,
MaxCalendarAgeFilter: 0x0E28,
AllowHTMLEmail: 0x0E29,
MaxEmailAgeFilter: 0x0E2A,
MaxEmailBodyTruncationSize: 0x0E2B,
MaxEmailHTMLBodyTruncationSize: 0x0E2C,
RequireSignedSMIMEMessages: 0x0E2D,
RequireEncryptedSMIMEMessages: 0x0E2E,
RequireSignedSMIMEAlgorithm: 0x0E2F,
RequireEncryptionSMIMEAlgorithm: 0x0E30,
AllowSMIMEEncryptionAlgorithmNegotiation: 0x0E31,<|fim▁hole|> AllowBrowser: 0x0E33,
AllowConsumerEmail: 0x0E34,
AllowRemoteDesktop: 0x0E35,
AllowInternetSharing: 0x0E36,
UnapprovedInROMApplicationList: 0x0E37,
ApplicationName: 0x0E38,
ApprovedApplicationList: 0x0E39,
Hash: 0x0E3A,
}
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/Search',[], factory);
else
root.ASCPSearch = factory();
}(this, function() {
'use strict';
return {
Tags: {
Search: 0x0F05,
Stores: 0x0F06,
Store: 0x0F07,
Name: 0x0F08,
Query: 0x0F09,
Options: 0x0F0A,
Range: 0x0F0B,
Status: 0x0F0C,
Response: 0x0F0D,
Result: 0x0F0E,
Properties: 0x0F0F,
Total: 0x0F10,
EqualTo: 0x0F11,
Value: 0x0F12,
And: 0x0F13,
Or: 0x0F14,
FreeText: 0x0F15,
DeepTraversal: 0x0F17,
LongId: 0x0F18,
RebuildResults: 0x0F19,
LessThan: 0x0F1A,
GreaterThan: 0x0F1B,
Schema: 0x0F1C,
Supported: 0x0F1D,
UserName: 0x0F1E,
Password: 0x0F1F,
ConversationId: 0x0F20,
Picture: 0x0F21,
MaxSize: 0x0F22,
MaxPictures: 0x0F23,
},
Enums: {
Status: {
Success: '1',
InvalidRequest: '2',
ServerError: '3',
BadLink: '4',
AccessDenied: '5',
NotFound: '6',
ConnectionFailure: '7',
TooComplex: '8',
Timeout: '10',
SyncFolders: '11',
EndOfRange: '12',
AccessBlocked: '13',
CredentialsRequired: '14',
}
}
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/GAL',[], factory);
else
root.ASCPGAL = factory();
}(this, function() {
'use strict';
return {
Tags: {
DisplayName: 0x1005,
Phone: 0x1006,
Office: 0x1007,
Title: 0x1008,
Company: 0x1009,
Alias: 0x100A,
FirstName: 0x100B,
LastName: 0x100C,
HomePhone: 0x100D,
MobilePhone: 0x100E,
EmailAddress: 0x100F,
Picture: 0x1010,
Status: 0x1011,
Data: 0x1012,
}
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/Settings',[], factory);
else
root.ASCPSettings = factory();
}(this, function() {
'use strict';
return {
Tags: {
Settings: 0x1205,
Status: 0x1206,
Get: 0x1207,
Set: 0x1208,
Oof: 0x1209,
OofState: 0x120A,
StartTime: 0x120B,
EndTime: 0x120C,
OofMessage: 0x120D,
AppliesToInternal: 0x120E,
AppliesToExternalKnown: 0x120F,
AppliesToExternalUnknown: 0x1210,
Enabled: 0x1211,
ReplyMessage: 0x1212,
BodyType: 0x1213,
DevicePassword: 0x1214,
Password: 0x1215,
DeviceInformation: 0x1216,
Model: 0x1217,
IMEI: 0x1218,
FriendlyName: 0x1219,
OS: 0x121A,
OSLanguage: 0x121B,
PhoneNumber: 0x121C,
UserInformation: 0x121D,
EmailAddresses: 0x121E,
SmtpAddress: 0x121F,
UserAgent: 0x1220,
EnableOutboundSMS: 0x1221,
MobileOperator: 0x1222,
PrimarySmtpAddress: 0x1223,
Accounts: 0x1224,
Account: 0x1225,
AccountId: 0x1226,
AccountName: 0x1227,
UserDisplayName: 0x1228,
SendDisabled: 0x1229,
/* Missing tag value 0x122A */
RightsManagementInformation: 0x122B,
},
Enums: {
Status: {
Success: '1',
ProtocolError: '2',
AccessDenied: '3',
ServerError: '4',
InvalidArguments: '5',
ConflictingArguments: '6',
DeniedByPolicy: '7',
},
OofState: {
Disabled: '0',
Global: '1',
TimeBased: '2',
}
}
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/DocumentLibrary',[], factory);
else
root.ASCPDocumentLibrary = factory();
}(this, function() {
'use strict';
return {
Tags: {
LinkId: 0x1305,
DisplayName: 0x1306,
IsFolder: 0x1307,
CreationDate: 0x1308,
LastModifiedDate: 0x1309,
IsHidden: 0x130A,
ContentLength: 0x130B,
ContentType: 0x130C,
},
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/Email2',[], factory);
else
root.ASCPEmail2 = factory();
}(this, function() {
'use strict';
return {
Tags: {
UmCallerID: 0x1605,
UmUserNotes: 0x1606,
UmAttDuration: 0x1607,
UmAttOrder: 0x1608,
ConversationId: 0x1609,
ConversationIndex: 0x160A,
LastVerbExecuted: 0x160B,
LastVerbExecutionTime: 0x160C,
ReceivedAsBcc: 0x160D,
Sender: 0x160E,
CalendarType: 0x160F,
IsLeapMonth: 0x1610,
AccountId: 0x1611,
FirstDayOfWeek: 0x1612,
MeetingMessageType: 0x1613,
},
Enums: {
LastVerbExecuted: {
Unknown: '0',
ReplyToSender: '1',
ReplyToAll: '2',
Forward: '3',
},
CalendarType: {
Default: '0',
Gregorian: '1',
GregorianUS: '2',
Japan: '3',
Taiwan: '4',
Korea: '5',
Hijri: '6',
Thai: '7',
Hebrew: '8',
GregorianMeFrench: '9',
GregorianArabic: '10',
GregorianTranslatedEnglish: '11',
GregorianTranslatedFrench: '12',
JapaneseLunar: '14',
ChineseLunar: '15',
KoreanLunar: '20',
},
FirstDayOfWeek: {
Sunday: '0',
Monday: '1',
Tuesday: '2',
Wednesday: '3',
Thursday: '4',
Friday: '5',
Saturday: '6',
},
MeetingMessageType: {
Unspecified: '0',
InitialRequest: '1',
FullUpdate: '2',
InformationalUpdate: '3',
Outdated: '4',
DelegatorsCopy: '5',
Delegated: '6',
}
}
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/Notes',[], factory);
else
root.ASCPNotes = factory();
}(this, function() {
'use strict';
return {
Tags: {
Subject: 0x1705,
MessageClass: 0x1706,
LastModifiedDate: 0x1707,
Categories: 0x1708,
Category: 0x1709,
}
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory();
else if (typeof define === 'function' && define.amd)
define('activesync/codepages/RightsManagement',[], factory);
else
root.ASCPRightsManagement = factory();
}(this, function() {
'use strict';
return {
Tags: {
RightsManagementSupport: 0x1805,
RightsManagementTemplates: 0x1806,
RightsManagementTemplate: 0x1807,
RightsManagementLicense: 0x1808,
EditAllowed: 0x1809,
ReplyAllowed: 0x180A,
ReplyAllAllowed: 0x180B,
ForwardAllowed: 0x180C,
ModifyRecipientsAllowed: 0x180D,
ExtractAllowed: 0x180E,
PrintAllowed: 0x180F,
ExportAllowed: 0x1810,
ProgrammaticAccessAllowed: 0x1811,
Owner: 0x1812,
ContentExpiryDate: 0x1813,
TemplateID: 0x1814,
TemplateName: 0x1815,
TemplateDescription: 0x1816,
ContentOwner: 0x1817,
RemoveRightsManagementDistribution: 0x1818,
}
};
}));
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (root, factory) {
if (typeof exports === 'object')
module.exports = factory(require('wbxml'), require('activesync/codepages'));
else if (typeof define === 'function' && define.amd)
define('activesync/protocol',['wbxml', 'activesync/codepages'], factory);
else
root.ActiveSyncProtocol = factory(WBXML, ActiveSyncCodepages);
}(this, function(WBXML, ASCP) {
'use strict';
var exports = {};
function nullCallback() {}
/**
* Create a constructor for a custom error type that works like a built-in
* Error.
*
* @param name the string name of the error
* @param parent (optional) a parent class for the error, defaults to Error
* @param extraArgs an array of extra arguments that can be passed to the
* constructor of this error type
* @return the constructor for this error
*/
function makeError(name, parent, extraArgs) {
function CustomError() {
// Try to let users call this as CustomError(...) without the "new". This
// is imperfect, and if you call this function directly and give it a
// |this| that's a CustomError, things will break. Don't do it!
var self = this instanceof CustomError ?
this : Object.create(CustomError.prototype);
var tmp = Error();
var offset = 1;
self.stack = tmp.stack.substring(tmp.stack.indexOf('\n') + 1);
self.message = arguments[0] || tmp.message;
if (extraArgs) {
offset += extraArgs.length;
for (var i = 0; i < extraArgs.length; i++)
self[extraArgs[i]] = arguments[i+1];
}
var m = /@(.+):(.+)/.exec(self.stack);
self.fileName = arguments[offset] || (m && m[1]) || "";
self.lineNumber = arguments[offset + 1] || (m && m[2]) || 0;
return self;
}
CustomError.prototype = Object.create((parent || Error).prototype);
CustomError.prototype.name = name;
CustomError.prototype.constructor = CustomError;
return CustomError;
}
var AutodiscoverError = makeError('ActiveSync.AutodiscoverError');
exports.AutodiscoverError = AutodiscoverError;
var AutodiscoverDomainError = makeError('ActiveSync.AutodiscoverDomainError',
AutodiscoverError);
exports.AutodiscoverDomainError = AutodiscoverDomainError;
var HttpError = makeError('ActiveSync.HttpError', null, ['status']);
exports.HttpError = HttpError;
function nsResolver(prefix) {
var baseUrl = 'http://schemas.microsoft.com/exchange/autodiscover/';
var ns = {
rq: baseUrl + 'mobilesync/requestschema/2006',
ad: baseUrl + 'responseschema/2006',
ms: baseUrl + 'mobilesync/responseschema/2006',
};
return ns[prefix] || null;
}
function Version(str) {
var details = str.split('.').map(function(x) {
return parseInt(x);
});
this.major = details[0], this.minor = details[1];
}
exports.Version = Version;
Version.prototype = {
eq: function(other) {
if (!(other instanceof Version))
other = new Version(other);
return this.major === other.major && this.minor === other.minor;
},
ne: function(other) {
return !this.eq(other);
},
gt: function(other) {
if (!(other instanceof Version))
other = new Version(other);
return this.major > other.major ||
(this.major === other.major && this.minor > other.minor);
},
gte: function(other) {
if (!(other instanceof Version))
other = new Version(other);
return this.major >= other.major ||
(this.major === other.major && this.minor >= other.minor);
},
lt: function(other) {
return !this.gte(other);
},
lte: function(other) {
return !this.gt(other);
},
toString: function() {
return this.major + '.' + this.minor;
},
};
/**
* Set the Authorization header on an XMLHttpRequest.
*
* @param xhr the XMLHttpRequest
* @param username the username
* @param password the user's password
*/
function setAuthHeader(xhr, username, password) {
var authorization = 'Basic ' + btoa(username + ':' + password);
xhr.setRequestHeader('Authorization', authorization);
}
/**
* Perform autodiscovery for the server associated with this account.
*
* @param aEmailAddress the user's email address
* @param aPassword the user's password
* @param aTimeout a timeout (in milliseconds) for the request
* @param aCallback a callback taking an error status (if any) and the
* server's configuration
* @param aNoRedirect true if autodiscovery should *not* follow any
* specified redirects (typically used when autodiscover has already
* told us about a redirect)
*/
function autodiscover(aEmailAddress, aPassword, aTimeout, aCallback,
aNoRedirect) {
if (!aCallback) aCallback = nullCallback;
var domain = aEmailAddress.substring(aEmailAddress.indexOf('@') + 1);
// The first time we try autodiscovery, we should try to recover from
// AutodiscoverDomainErrors and HttpErrors. The second time, *all* errors
// should be reported to the callback.
do_autodiscover(domain, aEmailAddress, aPassword, aTimeout, aNoRedirect,
function(aError, aConfig) {
if (aError instanceof AutodiscoverDomainError ||
aError instanceof HttpError)
do_autodiscover('autodiscover.' + domain, aEmailAddress, aPassword,
aTimeout, aNoRedirect, aCallback);
else
aCallback(aError, aConfig);
});
}
exports.autodiscover = autodiscover;
/**
* Perform the actual autodiscovery process for a given URL.
*
* @param aHost the host name to attempt autodiscovery for
* @param aEmailAddress the user's email address
* @param aPassword the user's password
* @param aTimeout a timeout (in milliseconds) for the request
* @param aNoRedirect true if autodiscovery should *not* follow any
* specified redirects (typically used when autodiscover has already
* told us about a redirect)
* @param aCallback a callback taking an error status (if any) and the
* server's configuration
*/
function do_autodiscover(aHost, aEmailAddress, aPassword, aTimeout,
aNoRedirect, aCallback) {
var xhr = new XMLHttpRequest({mozSystem: true, mozAnon: true});
xhr.open('POST', 'https://' + aHost + '/autodiscover/autodiscover.xml',
true);
setAuthHeader(xhr, aEmailAddress, aPassword);
xhr.setRequestHeader('Content-Type', 'text/xml');
xhr.timeout = aTimeout;
xhr.upload.onprogress = xhr.upload.onload = function() {
xhr.timeout = 0;
};
xhr.onload = function() {
if (xhr.status < 200 || xhr.status >= 300)
return aCallback(new HttpError(xhr.statusText, xhr.status));
var uid = Math.random();
self.postMessage({
uid: uid,
type: 'configparser',
cmd: 'accountactivesync',
args: [xhr.responseText]
});
self.addEventListener('message', function onworkerresponse(evt) {
var data = evt.data;
if (data.type != 'configparser' || data.cmd != 'accountactivesync' ||
data.uid != uid) {
return;
}
self.removeEventListener(evt.type, onworkerresponse);
var args = data.args;
var config = args[0], error = args[1], redirectedEmail = args[2];
if (error) {
aCallback(new AutodiscoverDomainError(error), config);
} else if (redirectedEmail) {
autodiscover(redirectedEmail, aPassword, aTimeout, aCallback, true);
} else {
aCallback(null, config);
}
});
};
xhr.ontimeout = xhr.onerror = function() {
aCallback(new Error('Error getting Autodiscover URL'));
};
// TODO: use something like
// http://ejohn.org/blog/javascript-micro-templating/ here?
var postdata =
'<?xml version="1.0" encoding="utf-8"?>\n' +
'<Autodiscover xmlns="' + nsResolver('rq') + '">\n' +
' <Request>\n' +
' <EMailAddress>' + aEmailAddress + '</EMailAddress>\n' +
' <AcceptableResponseSchema>' + nsResolver('ms') +
'</AcceptableResponseSchema>\n' +
' </Request>\n' +
'</Autodiscover>';
xhr.send(postdata);
}
/**
* Create a new ActiveSync connection.
*
* ActiveSync connections use XMLHttpRequests to communicate with the
* server. These XHRs are created with mozSystem: true and mozAnon: true to,
* respectively, help with CORS, and to ignore the authentication cache. The
* latter is important because 1) it prevents the HTTP auth dialog from
* appearing if the user's credentials are wrong and 2) it allows us to
* connect to the same server as multiple users.
*
* @param aDeviceId (optional) a string identifying this device
* @param aDeviceType (optional) a string identifying the type of this device
*/
function Connection(aDeviceId, aDeviceType) {
this._deviceId = aDeviceId || 'v140Device';
this._deviceType = aDeviceType || 'SmartPhone';
this.timeout = 0;
this._connected = false;
this._waitingForConnection = false;
this._connectionError = null;
this._connectionCallbacks = [];
this.baseUrl = null;
this._username = null;
this._password = null;
this.versions = [];
this.supportedCommands = [];
this.currentVersion = null;
}
exports.Connection = Connection;
Connection.prototype = {
/**
* Perform any callbacks added during the connection process.
*
* @param aError the error status (if any)
*/
_notifyConnected: function(aError) {
if (aError)
this.disconnect();
for (var iter in Iterator(this._connectionCallbacks)) {
var callback = iter[1];
callback.apply(callback, arguments);
}
this._connectionCallbacks = [];
},
/**
* Get the connection status.
*
* @return true iff we are fully connected to the server
*/
get connected() {
return this._connected;
},
/*
* Initialize the connection with a server and account credentials.
*
* @param aServer the ActiveSync server to connect to
* @param aUsername the account's username
* @param aPassword the account's password
*/
open: function(aServer, aUsername, aPassword) {
this.baseUrl = aServer + '/Microsoft-Server-ActiveSync';
this._username = aUsername;
this._password = aPassword;
},
/**
* Connect to the server with this account by getting the OPTIONS from
* the server (and verifying the account's credentials).
*
* @param aCallback a callback taking an error status (if any) and the
* server's options.
*/
connect: function(aCallback) {
// If we're already connected, just run the callback and return.
if (this.connected) {
if (aCallback)
aCallback(null);
return;
}
// Otherwise, queue this callback up to fire when we do connect.
if (aCallback)
this._connectionCallbacks.push(aCallback);
// Don't do anything else if we're already trying to connect.
if (this._waitingForConnection)
return;
this._waitingForConnection = true;
this._connectionError = null;
this.getOptions((function(aError, aOptions) {
this._waitingForConnection = false;
this._connectionError = aError;
if (aError) {
console.error('Error connecting to ActiveSync:', aError);
return this._notifyConnected(aError, aOptions);
}
this._connected = true;
this.versions = aOptions.versions;
this.supportedCommands = aOptions.commands;
this.currentVersion = new Version(aOptions.versions.slice(-1)[0]);
return this._notifyConnected(null, aOptions);
}).bind(this));
},
/**
* Disconnect from the ActiveSync server, and reset the connection state.
* The server and credentials remain set however, so you can safely call
* connect() again immediately after.
*/
disconnect: function() {
if (this._waitingForConnection)
throw new Error("Can't disconnect while waiting for server response");
this._connected = false;
this.versions = [];
this.supportedCommands = [];
this.currentVersion = null;
},
/**
* Attempt to provision this account. XXX: Currently, this doesn't actually
* do anything, but it's useful as a test command for Gmail to ensure that
* the user entered their password correctly.
*
* @param aCallback a callback taking an error status (if any) and the
* WBXML response
*/
provision: function(aCallback) {
var pv = ASCP.Provision.Tags;
var w = new WBXML.Writer('1.3', 1, 'UTF-8');
w.stag(pv.Provision)
.etag();
this.postCommand(w, aCallback);
},
/**
* Get the options for the server associated with this account.
*
* @param aCallback a callback taking an error status (if any), and the
* resulting options.
*/
getOptions: function(aCallback) {
if (!aCallback) aCallback = nullCallback;
var conn = this;
var xhr = new XMLHttpRequest({mozSystem: true, mozAnon: true});
xhr.open('OPTIONS', this.baseUrl, true);
setAuthHeader(xhr, this._username, this._password);
xhr.timeout = this.timeout;
xhr.upload.onprogress = xhr.upload.onload = function() {
xhr.timeout = 0;
};
xhr.onload = function() {
if (xhr.status < 200 || xhr.status >= 300) {
console.error('ActiveSync options request failed with response ' +
xhr.status);
aCallback(new HttpError(xhr.statusText, xhr.status));
return;
}
var result = {
versions: xhr.getResponseHeader('MS-ASProtocolVersions').split(','),
commands: xhr.getResponseHeader('MS-ASProtocolCommands').split(','),
};
aCallback(null, result);
};
xhr.ontimeout = xhr.onerror = function() {
var error = new Error('Error getting OPTIONS URL');
console.error(error);
aCallback(error);
};
// Set the response type to "text" so that we don't try to parse an empty
// body as XML.
xhr.responseType = 'text';
xhr.send();
},
/**
* Check if the server supports a particular command. Requires that we be
* connected to the server already.
*
* @param aCommand a string/tag representing the command type
* @return true iff the command is supported
*/
supportsCommand: function(aCommand) {
if (!this.connected)
throw new Error('Connection required to get command');
if (typeof aCommand === 'number')
aCommand = ASCP.__tagnames__[aCommand];
return this.supportedCommands.indexOf(aCommand) !== -1;
},
/**
* DEPRECATED. See postCommand() below.
*/
doCommand: function() {
console.warn('doCommand is deprecated. Use postCommand instead.');
this.postCommand.apply(this, arguments);
},
/**
* Send a WBXML command to the ActiveSync server and listen for the
* response.
*
* @param aCommand the WBXML representing the command or a string/tag
* representing the command type for empty commands
* @param aCallback a callback to call when the server has responded; takes
* two arguments: an error status (if any) and the response as a
* WBXML reader. If the server returned an empty response, the
* response argument is null.
* @param aExtraParams (optional) an object containing any extra URL
* parameters that should be added to the end of the request URL
* @param aExtraHeaders (optional) an object containing any extra HTTP
* headers to send in the request
* @param aProgressCallback (optional) a callback to invoke with progress
* information, when available. Two arguments are provided: the
* number of bytes received so far, and the total number of bytes
* expected (when known, 0 if unknown).
*/
postCommand: function(aCommand, aCallback, aExtraParams, aExtraHeaders,
aProgressCallback) {
var contentType = 'application/vnd.ms-sync.wbxml';
if (typeof aCommand === 'string' || typeof aCommand === 'number') {
this.postData(aCommand, contentType, null, aCallback, aExtraParams,
aExtraHeaders);
}
else {
var r = new WBXML.Reader(aCommand, ASCP);
var commandName = r.document[0].localTagName;
this.postData(commandName, contentType, aCommand.buffer, aCallback,
aExtraParams, aExtraHeaders, aProgressCallback);
}
},
/**
* Send arbitrary data to the ActiveSync server and listen for the response.
*
* @param aCommand a string (or WBXML tag) representing the command type
* @param aContentType the content type of the post data
* @param aData the data to be posted
* @param aCallback a callback to call when the server has responded; takes
* two arguments: an error status (if any) and the response as a
* WBXML reader. If the server returned an empty response, the
* response argument is null.
* @param aExtraParams (optional) an object containing any extra URL
* parameters that should be added to the end of the request URL
* @param aExtraHeaders (optional) an object containing any extra HTTP
* headers to send in the request
* @param aProgressCallback (optional) a callback to invoke with progress
* information, when available. Two arguments are provided: the
* number of bytes received so far, and the total number of bytes
* expected (when known, 0 if unknown).
*/
postData: function(aCommand, aContentType, aData, aCallback, aExtraParams,
aExtraHeaders, aProgressCallback) {
// Make sure our command name is a string.
if (typeof aCommand === 'number')
aCommand = ASCP.__tagnames__[aCommand];
if (!this.supportsCommand(aCommand)) {
var error = new Error("This server doesn't support the command " +
aCommand);
console.error(error);
aCallback(error);
return;
}
// Build the URL parameters.
var params = [
['Cmd', aCommand],
['User', this._username],
['DeviceId', this._deviceId],
['DeviceType', this._deviceType]
];
if (aExtraParams) {
for (var iter in Iterator(params)) {
var param = iter[1];
if (param[0] in aExtraParams)
throw new TypeError('reserved URL parameter found');
}
for (var kv in Iterator(aExtraParams))
params.push(kv);
}
var paramsStr = params.map(function(i) {
return encodeURIComponent(i[0]) + '=' + encodeURIComponent(i[1]);
}).join('&');
// Now it's time to make our request!
var xhr = new XMLHttpRequest({mozSystem: true, mozAnon: true});
xhr.open('POST', this.baseUrl + '?' + paramsStr, true);
setAuthHeader(xhr, this._username, this._password);
xhr.setRequestHeader('MS-ASProtocolVersion', this.currentVersion);
xhr.setRequestHeader('Content-Type', aContentType);
// Add extra headers if we have any.
if (aExtraHeaders) {
for (var iter in Iterator(aExtraHeaders)) {
var key = iter[0], key = iter[1];
xhr.setRequestHeader(key, value);
}
}
xhr.timeout = this.timeout;
xhr.upload.onprogress = xhr.upload.onload = function() {
xhr.timeout = 0;
};
xhr.onprogress = function(event) {
if (aProgressCallback)
aProgressCallback(event.loaded, event.total);
};
var conn = this;
var parentArgs = arguments;
xhr.onload = function() {
// This status code is a proprietary Microsoft extension used to
// indicate a redirect, not to be confused with the draft-standard
// "Unavailable For Legal Reasons" status. More info available here:
// <http://msdn.microsoft.com/en-us/library/gg651019.aspx>
if (xhr.status === 451) {
conn.baseUrl = xhr.getResponseHeader('X-MS-Location');
conn.postData.apply(conn, parentArgs);
return;
}
if (xhr.status < 200 || xhr.status >= 300) {
console.error('ActiveSync command ' + aCommand + ' failed with ' +
'response ' + xhr.status);
aCallback(new HttpError(xhr.statusText, xhr.status));
return;
}
var response = null;
if (xhr.response.byteLength > 0)
response = new WBXML.Reader(new Uint8Array(xhr.response), ASCP);
aCallback(null, response);
};
xhr.ontimeout = xhr.onerror = function() {
var error = new Error('Error getting command URL');
console.error(error);
aCallback(error);
};
xhr.responseType = 'arraybuffer';
xhr.send(aData);
},
};
return exports;
}));<|fim▁end|>
|
AllowSMIMESoftCerts: 0x0E32,
|
<|file_name|>XMIResultFormatter.java<|end_file_name|><|fim▁begin|>/*
* XMIResultFormatter.java
*
* Copyright (c) 2011, Database Research Group, Institute of Computer Science, University of Heidelberg.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU General Public License.
*
* authors: Andreas Fay, Jannik Strötgen
* email: [email protected], [email protected]
*
* HeidelTime is a multilingual, cross-domain temporal tagger.
* For details, see http://dbs.ifi.uni-heidelberg.de/heideltime
*/
package de.unihd.dbs.heideltime.standalone.components.impl;
<|fim▁hole|>
import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.MatchResult;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.uima.cas.impl.XmiCasSerializer;
import org.apache.uima.jcas.JCas;
import org.apache.uima.util.XMLSerializer;
import de.unihd.dbs.heideltime.standalone.components.ResultFormatter;
/**
* Result formatter based on XMI.
*
* @see {@link org.apache.uima.examples.xmi.XmiWriterCasConsumer}
*
* @author Andreas Fay, University of Heidelberg
* @version 1.0
*/
public class XMIResultFormatter implements ResultFormatter {
@Override
public String format(JCas jcas) throws Exception {
ByteArrayOutputStream outStream = null;
try {
// Write XMI
outStream = new ByteArrayOutputStream();
XmiCasSerializer ser = new XmiCasSerializer(jcas.getTypeSystem());
XMLSerializer xmlSer = new XMLSerializer(outStream, false);
ser.serialize(jcas.getCas(), xmlSer.getContentHandler());
// Convert output stream to string
// String newOut = outStream.toString("UTF-8");
String newOut = outStream.toString();
// System.err.println("NEWOUT:"+newOut);
//
// if (newOut.matches("^<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>.*$")){
// newOut = newOut.replaceFirst("<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>",
// "<\\?xml version=\"1.0\" encoding=\""+Charset.defaultCharset().name()+"\"\\?>");
// }
// if (newOut.matches("^.*?sofaString=\"(.*?)\".*$")){
// for (MatchResult r : findMatches(Pattern.compile("^(.*?sofaString=\")(.*?)(\".*)$"), newOut)){
// String stringBegin = r.group(1);
// String sofaString = r.group(2);
// System.err.println("SOFASTRING:"+sofaString);
// String stringEnd = r.group(3);
// // The sofaString is encoded as UTF-8.
// // However, at this point it has to be translated back into the defaultCharset.
// byte[] defaultDocText = new String(sofaString.getBytes(), "UTF-8").getBytes(Charset.defaultCharset().name());
// String docText = new String(defaultDocText);
// System.err.println("DOCTEXT:"+docText);
// newOut = stringBegin + docText + stringEnd;
//// newOut = newOut.replaceFirst("sofaString=\".*?\"", "sofaString=\"" + docText + "\"");
// }
// }
// System.err.println("NEWOUT:"+newOut);
return newOut;
} finally {
if (outStream != null) {
outStream.close();
}
}
}
/**
* Find all the matches of a pattern in a charSequence and return the
* results as list.
*
* @param pattern
* @param s
* @return
*/
public static Iterable<MatchResult> findMatches(Pattern pattern,
CharSequence s) {
List<MatchResult> results = new ArrayList<MatchResult>();
for (Matcher m = pattern.matcher(s); m.find();)
results.add(m.toMatchResult());
return results;
}
}<|fim▁end|>
| |
<|file_name|>stage_results_unittest.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for the stage results."""
from __future__ import print_function
import mock
import os
import signal
import StringIO
import time
from chromite.cbuildbot import cbuildbot_config
from chromite.cbuildbot import failures_lib
from chromite.cbuildbot import results_lib
from chromite.cbuildbot import cbuildbot_run
from chromite.cbuildbot.builders import simple_builders
from chromite.cbuildbot.stages import generic_stages
from chromite.cbuildbot.stages import sync_stages
from chromite.lib import cros_build_lib
from chromite.lib import cros_test_lib
from chromite.lib import parallel
class PassStage(generic_stages.BuilderStage):
"""PassStage always works"""
class Pass2Stage(generic_stages.BuilderStage):
"""Pass2Stage always works"""
class FailStage(generic_stages.BuilderStage):
"""FailStage always throws an exception"""
FAIL_EXCEPTION = failures_lib.StepFailure("Fail stage needs to fail.")
def PerformStage(self):
"""Throw the exception to make us fail."""
raise self.FAIL_EXCEPTION
class SkipStage(generic_stages.BuilderStage):
"""SkipStage is skipped."""
config_name = 'signer_tests'
class SneakyFailStage(generic_stages.BuilderStage):
"""SneakyFailStage exits with an error."""
def PerformStage(self):
"""Exit without reporting back."""
# pylint: disable=protected-access
os._exit(1)
class SuicideStage(generic_stages.BuilderStage):
"""SuicideStage kills itself with kill -9."""
def PerformStage(self):
"""Exit without reporting back."""
os.kill(os.getpid(), signal.SIGKILL)
class SetAttrStage(generic_stages.BuilderStage):
"""Stage that sets requested run attribute to a value."""
DEFAULT_ATTR = 'unittest_value'
VALUE = 'HereTakeThis'
def __init__(self, builder_run, delay=2, attr=DEFAULT_ATTR, *args, **kwargs):
super(SetAttrStage, self).__init__(builder_run, *args, **kwargs)
self.delay = delay
self.attr = attr
def PerformStage(self):
"""Wait self.delay seconds then set requested run attribute."""
time.sleep(self.delay)
self._run.attrs.SetParallel(self.attr, self.VALUE)
def QueueableException(self):
return cbuildbot_run.ParallelAttributeError(self.attr)
class GetAttrStage(generic_stages.BuilderStage):
"""Stage that accesses requested run attribute and confirms value."""
DEFAULT_ATTR = 'unittest_value'
def __init__(self, builder_run, tester=None, timeout=5, attr=DEFAULT_ATTR,
*args, **kwargs):
super(GetAttrStage, self).__init__(builder_run, *args, **kwargs)
self.tester = tester
self.timeout = timeout
self.attr = attr
def PerformStage(self):
"""Wait for attrs.test value to show up."""
assert not self._run.attrs.HasParallel(self.attr)
value = self._run.attrs.GetParallel(self.attr, self.timeout)
if self.tester:
self.tester(value)
def QueueableException(self):
return cbuildbot_run.ParallelAttributeError(self.attr)
def TimeoutException(self):
return cbuildbot_run.AttrTimeoutError(self.attr)
class BuildStagesResultsTest(cros_test_lib.TestCase):
"""Tests for stage results and reporting."""
def setUp(self):
# Always stub RunCommmand out as we use it in every method.
self._bot_id = 'x86-generic-paladin'
build_config = cbuildbot_config.GetConfig()[self._bot_id]
self.build_root = '/fake_root'
# Create a class to hold
class Options(object):
"""Dummy class to hold option values."""
options = Options()
options.archive_base = 'gs://dontcare'
options.buildroot = self.build_root
options.debug = False
options.prebuilts = False
options.clobber = False
options.nosdk = False
options.remote_trybot = False
options.latest_toolchain = False
options.buildnumber = 1234
options.chrome_rev = None
options.branch = 'dontcare'
options.chrome_root = False
self._manager = parallel.Manager()
self._manager.__enter__()
self._run = cbuildbot_run.BuilderRun(options, build_config, self._manager)
results_lib.Results.Clear()
def tearDown(self):
# Mimic exiting with statement for self._manager.
self._manager.__exit__(None, None, None)
def _runStages(self):
"""Run a couple of stages so we can capture the results"""
# Run two pass stages, and one fail stage.
PassStage(self._run).Run()
Pass2Stage(self._run).Run()
self.assertRaises(
failures_lib.StepFailure,
FailStage(self._run).Run)
def _verifyRunResults(self, expectedResults, max_time=2.0):
actualResults = results_lib.Results.Get()
# Break out the asserts to be per item to make debugging easier
self.assertEqual(len(expectedResults), len(actualResults))
for i in xrange(len(expectedResults)):
entry = actualResults[i]
xname, xresult = expectedResults[i]
if entry.result not in results_lib.Results.NON_FAILURE_TYPES:
self.assertTrue(isinstance(entry.result, BaseException))
if isinstance(entry.result, failures_lib.StepFailure):
self.assertEqual(str(entry.result), entry.description)
self.assertTrue(entry.time >= 0 and entry.time < max_time)
self.assertEqual(xname, entry.name)
self.assertEqual(type(xresult), type(entry.result))
self.assertEqual(repr(xresult), repr(entry.result))
def _PassString(self):
record = results_lib.Result('Pass', results_lib.Results.SUCCESS, 'None',
'Pass', '', '0')
return results_lib.Results.SPLIT_TOKEN.join(record) + '\n'
def testRunStages(self):
"""Run some stages and verify the captured results"""
self.assertEqual(results_lib.Results.Get(), [])
self._runStages()
# Verify that the results are what we expect.
expectedResults = [
('Pass', results_lib.Results.SUCCESS),
('Pass2', results_lib.Results.SUCCESS),
('Fail', FailStage.FAIL_EXCEPTION),
]
self._verifyRunResults(expectedResults)
def testSuccessTest(self):
"""Run some stages and verify the captured results"""
results_lib.Results.Record('Pass', results_lib.Results.SUCCESS)
self.assertTrue(results_lib.Results.BuildSucceededSoFar())
results_lib.Results.Record('Fail', FailStage.FAIL_EXCEPTION, time=1)
self.assertFalse(results_lib.Results.BuildSucceededSoFar())
results_lib.Results.Record('Pass2', results_lib.Results.SUCCESS)
self.assertFalse(results_lib.Results.BuildSucceededSoFar())
def _TestParallelStages(self, stage_objs):
builder = simple_builders.SimpleBuilder(self._run)
error = None
# pylint: disable=protected-access
with mock.patch.multiple(parallel._BackgroundTask, PRINT_INTERVAL=0.01):
try:
builder._RunParallelStages(stage_objs)
except parallel.BackgroundFailure as ex:
error = ex
<|fim▁hole|> stage_objs = [stage(self._run) for stage in
(PassStage, SneakyFailStage, FailStage, SuicideStage,
Pass2Stage)]
error = self._TestParallelStages(stage_objs)
self.assertTrue(error)
expectedResults = [
('Pass', results_lib.Results.SUCCESS),
('Fail', FailStage.FAIL_EXCEPTION),
('Pass2', results_lib.Results.SUCCESS),
('SneakyFail', error),
('Suicide', error),
]
self._verifyRunResults(expectedResults)
def testParallelStageCommunicationOK(self):
"""Test run attr communication betweeen parallel stages."""
def assert_test(value):
self.assertEqual(value, SetAttrStage.VALUE,
'Expected value %r to be passed between stages, but'
' got %r.' % (SetAttrStage.VALUE, value))
stage_objs = [
SetAttrStage(self._run),
GetAttrStage(self._run, assert_test, timeout=30),
GetAttrStage(self._run, assert_test, timeout=30),
]
error = self._TestParallelStages(stage_objs)
self.assertFalse(error)
expectedResults = [
('SetAttr', results_lib.Results.SUCCESS),
('GetAttr', results_lib.Results.SUCCESS),
('GetAttr', results_lib.Results.SUCCESS),
]
self._verifyRunResults(expectedResults, max_time=30.0)
# Make sure run attribute propagated up to the top, too.
value = self._run.attrs.GetParallel('unittest_value')
self.assertEqual(SetAttrStage.VALUE, value)
def testParallelStageCommunicationTimeout(self):
"""Test run attr communication between parallel stages that times out."""
def assert_test(value):
self.assertEqual(value, SetAttrStage.VALUE,
'Expected value %r to be passed between stages, but'
' got %r.' % (SetAttrStage.VALUE, value))
stage_objs = [SetAttrStage(self._run, delay=11),
GetAttrStage(self._run, assert_test, timeout=1),
]
error = self._TestParallelStages(stage_objs)
self.assertTrue(error)
expectedResults = [
('SetAttr', results_lib.Results.SUCCESS),
('GetAttr', stage_objs[1].TimeoutException()),
]
self._verifyRunResults(expectedResults, max_time=12.0)
def testParallelStageCommunicationNotQueueable(self):
"""Test setting non-queueable run attr in parallel stage."""
stage_objs = [SetAttrStage(self._run, attr='release_tag'),
GetAttrStage(self._run, timeout=2),
]
error = self._TestParallelStages(stage_objs)
self.assertTrue(error)
expectedResults = [
('SetAttr', stage_objs[0].QueueableException()),
('GetAttr', stage_objs[1].TimeoutException()),
]
self._verifyRunResults(expectedResults, max_time=12.0)
def testStagesReportSuccess(self):
"""Tests Stage reporting."""
sync_stages.ManifestVersionedSyncStage.manifest_manager = None
# Store off a known set of results and generate a report
results_lib.Results.Record('Sync', results_lib.Results.SUCCESS, time=1)
results_lib.Results.Record('Build', results_lib.Results.SUCCESS, time=2)
results_lib.Results.Record('Test', FailStage.FAIL_EXCEPTION, time=3)
results_lib.Results.Record('SignerTests', results_lib.Results.SKIPPED)
result = cros_build_lib.CommandResult(cmd=['/bin/false', '/nosuchdir'],
returncode=2)
results_lib.Results.Record(
'Archive',
cros_build_lib.RunCommandError(
'Command "/bin/false /nosuchdir" failed.\n',
result), time=4)
results = StringIO.StringIO()
results_lib.Results.Report(results)
expectedResults = (
"************************************************************\n"
"** Stage Results\n"
"************************************************************\n"
"** PASS Sync (0:00:01)\n"
"************************************************************\n"
"** PASS Build (0:00:02)\n"
"************************************************************\n"
"** FAIL Test (0:00:03) with StepFailure\n"
"************************************************************\n"
"** FAIL Archive (0:00:04) in /bin/false\n"
"************************************************************\n"
)
expectedLines = expectedResults.split('\n')
actualLines = results.getvalue().split('\n')
# Break out the asserts to be per item to make debugging easier
for i in xrange(min(len(actualLines), len(expectedLines))):
self.assertEqual(expectedLines[i], actualLines[i])
self.assertEqual(len(expectedLines), len(actualLines))
def testStagesReportError(self):
"""Tests Stage reporting with exceptions."""
sync_stages.ManifestVersionedSyncStage.manifest_manager = None
# Store off a known set of results and generate a report
results_lib.Results.Record('Sync', results_lib.Results.SUCCESS, time=1)
results_lib.Results.Record('Build', results_lib.Results.SUCCESS, time=2)
results_lib.Results.Record('Test', FailStage.FAIL_EXCEPTION,
'failException Msg\nLine 2', time=3)
result = cros_build_lib.CommandResult(cmd=['/bin/false', '/nosuchdir'],
returncode=2)
results_lib.Results.Record(
'Archive',
cros_build_lib.RunCommandError(
'Command "/bin/false /nosuchdir" failed.\n',
result),
'FailRunCommand msg', time=4)
results = StringIO.StringIO()
results_lib.Results.Report(results)
expectedResults = (
"************************************************************\n"
"** Stage Results\n"
"************************************************************\n"
"** PASS Sync (0:00:01)\n"
"************************************************************\n"
"** PASS Build (0:00:02)\n"
"************************************************************\n"
"** FAIL Test (0:00:03) with StepFailure\n"
"************************************************************\n"
"** FAIL Archive (0:00:04) in /bin/false\n"
"************************************************************\n"
"\n"
"Failed in stage Test:\n"
"\n"
"failException Msg\n"
"Line 2\n"
"\n"
"Failed in stage Archive:\n"
"\n"
"FailRunCommand msg\n"
)
expectedLines = expectedResults.split('\n')
actualLines = results.getvalue().split('\n')
# Break out the asserts to be per item to make debugging easier
for i in xrange(min(len(actualLines), len(expectedLines))):
self.assertEqual(expectedLines[i], actualLines[i])
self.assertEqual(len(expectedLines), len(actualLines))
def testStagesReportReleaseTag(self):
"""Tests Release Tag entry in stages report."""
current_version = "release_tag_string"
archive_urls = {
'board1': 'http://foo.com/bucket/bot-id1/version/index.html',
'board2': 'http://foo.com/bucket/bot-id2/version/index.html',}
# Store off a known set of results and generate a report
results_lib.Results.Record('Pass', results_lib.Results.SUCCESS, time=1)
results = StringIO.StringIO()
results_lib.Results.Report(results, archive_urls, current_version)
expectedResults = (
"************************************************************\n"
"** RELEASE VERSION: release_tag_string\n"
"************************************************************\n"
"** Stage Results\n"
"************************************************************\n"
"** PASS Pass (0:00:01)\n"
"************************************************************\n"
"** BUILD ARTIFACTS FOR THIS BUILD CAN BE FOUND AT:\n"
"** board1: %s\n"
"@@@STEP_LINK@Artifacts[board1]: bot-id1/version@%s@@@\n"
"** board2: %s\n"
"@@@STEP_LINK@Artifacts[board2]: bot-id2/version@%s@@@\n"
"************************************************************\n"
% (archive_urls['board1'], archive_urls['board1'],
archive_urls['board2'], archive_urls['board2']))
expectedLines = expectedResults.split('\n')
actualLines = results.getvalue().split('\n')
# Break out the asserts to be per item to make debugging easier
for i in xrange(len(expectedLines)):
self.assertEqual(expectedLines[i], actualLines[i])
self.assertEqual(len(expectedLines), len(actualLines))
def testSaveCompletedStages(self):
"""Tests that we can save out completed stages."""
# Run this again to make sure we have the expected results stored
results_lib.Results.Record('Pass', results_lib.Results.SUCCESS)
results_lib.Results.Record('Fail', FailStage.FAIL_EXCEPTION)
results_lib.Results.Record('Pass2', results_lib.Results.SUCCESS)
saveFile = StringIO.StringIO()
results_lib.Results.SaveCompletedStages(saveFile)
self.assertEqual(saveFile.getvalue(), self._PassString())
def testRestoreCompletedStages(self):
"""Tests that we can read in completed stages."""
results_lib.Results.RestoreCompletedStages(
StringIO.StringIO(self._PassString()))
previous = results_lib.Results.GetPrevious()
self.assertEqual(previous.keys(), ['Pass'])
def testRunAfterRestore(self):
"""Tests that we skip previously completed stages."""
# Fake results_lib.Results.RestoreCompletedStages
results_lib.Results.RestoreCompletedStages(
StringIO.StringIO(self._PassString()))
self._runStages()
# Verify that the results are what we expect.
expectedResults = [
('Pass', results_lib.Results.SUCCESS),
('Pass2', results_lib.Results.SUCCESS),
('Fail', FailStage.FAIL_EXCEPTION),
]
self._verifyRunResults(expectedResults)
def testFailedButForgiven(self):
"""Tests that warnings are flagged as such."""
results_lib.Results.Record('Warn', results_lib.Results.FORGIVEN, time=1)
results = StringIO.StringIO()
results_lib.Results.Report(results)
self.assertTrue('@@@STEP_WARNINGS@@@' in results.getvalue())<|fim▁end|>
|
return error
def testParallelStages(self):
|
<|file_name|>ServletContainerTestCase.java<|end_file_name|><|fim▁begin|>package org.jboss.hal.testsuite.test.configuration.undertow;
import org.apache.commons.lang.RandomStringUtils;
import org.jboss.arquillian.graphene.page.Page;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.hal.testsuite.category.Shared;
import org.jboss.hal.testsuite.page.config.UndertowServletPage;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.wildfly.extras.creaper.core.online.operations.Address;
import org.wildfly.extras.creaper.core.online.operations.OperationException;
import java.io.IOException;
import java.util.concurrent.TimeoutException;
@RunWith(Arquillian.class)
@Category(Shared.class)
public class ServletContainerTestCase extends UndertowTestCaseAbstract {
@Page
private UndertowServletPage page;
//identifiers
private static final String ALLOW_NON_STANDARD_WRAPPERS = "allow-non-standard-wrappers";
private static final String DEFAULT_BUFFER_CACHE = "default-buffer-cache";
private static final String DEFAULT_ENCODING = "default-encoding";
private static final String DEFAULT_SESSION_TIMEOUT = "default-session-timeout";
private static final String DIRECTORY_LISTING = "directory-listing";
private static final String DISABLE_CACHING_FOR_SECURED_PAGES = "disable-caching-for-secured-pages";
private static final String EAGER_FILTER_INITIALIZATION = "eager-filter-initialization";
private static final String IGNORE_FLUSH = "ignore-flush";
private static final String STACK_TRACE_ON_ERROR = "stack-trace-on-error";
private static final String USE_LISTENER_ENCODING = "use-listener-encoding";
//values
private static final String STACK_TRACE_ON_ERROR_VALUE = "all";
<|fim▁hole|> private static final Address SERVLET_CONTAINER_ADDRESS = UNDERTOW_ADDRESS.and("servlet-container", SERVLET_CONTAINER);
@BeforeClass
public static void setUp() throws InterruptedException, IOException, TimeoutException {
operations.add(SERVLET_CONTAINER_ADDRESS);
}
@Before
public void before() {
page.navigate();
page.selectServletContainer(SERVLET_CONTAINER);
}
@AfterClass
public static void tearDown() throws InterruptedException, IOException, TimeoutException, OperationException {
operations.remove(SERVLET_CONTAINER_ADDRESS);
}
@Test
public void setAllowNonStandardWrappersToTrue() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, ALLOW_NON_STANDARD_WRAPPERS, true);
}
@Test
public void setAllowNonStandardWrappersToFalse() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, ALLOW_NON_STANDARD_WRAPPERS, false);
}
@Test
public void editDefaultBufferCache() throws Exception {
editTextAndVerify(SERVLET_CONTAINER_ADDRESS, DEFAULT_BUFFER_CACHE, undertowOps.createBufferCache());
}
@Test
public void editDefaultEncoding() throws Exception {
editTextAndVerify(SERVLET_CONTAINER_ADDRESS, DEFAULT_ENCODING);
}
@Test
public void editDefaultSessionTimeout() throws Exception {
editTextAndVerify(SERVLET_CONTAINER_ADDRESS, DEFAULT_SESSION_TIMEOUT, 42);
}
@Test
public void editDefaultSessionTimeoutInvalid() throws Exception {
verifyIfErrorAppears(DEFAULT_SESSION_TIMEOUT, "54sdfg");
}
@Test
public void setDirectoryListingToTrue() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, DIRECTORY_LISTING, true);
}
@Test
public void setDirectoryListingToFalse() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, DIRECTORY_LISTING, false);
}
@Test
public void setDisableCachingForSecuredPagesToTrue() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, DISABLE_CACHING_FOR_SECURED_PAGES, true);
}
@Test
public void setDisableCachingForSecuredPagesToFalse() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, DISABLE_CACHING_FOR_SECURED_PAGES, false);
}
@Test
public void setIgnoreFlushToTrue() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, IGNORE_FLUSH, true);
}
@Test
public void setIgnoreFlushToFalse() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, IGNORE_FLUSH, false);
}
@Test
public void setEagerFilterInitializationToTrue() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, EAGER_FILTER_INITIALIZATION, true);
}
@Test
public void setEagerFilterInitializationToFalse() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, EAGER_FILTER_INITIALIZATION, false);
}
@Test
public void selectStackTraceOnError() throws Exception {
selectOptionAndVerify(SERVLET_CONTAINER_ADDRESS, STACK_TRACE_ON_ERROR, STACK_TRACE_ON_ERROR_VALUE);
}
@Test
public void setUseListenerEncodingToTrue() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, USE_LISTENER_ENCODING, true);
}
@Test
public void setUseListenerEncodingToFalse() throws Exception {
editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, USE_LISTENER_ENCODING, false);
}
}<|fim▁end|>
|
private static final String SERVLET_CONTAINER = "servlet-container_" + RandomStringUtils.randomAlphanumeric(5);
|
<|file_name|>not-found.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
@Component({<|fim▁hole|> selector: 'app-not-found',
templateUrl: 'not-found.component.html',
})
export class NotFoundComponent { }<|fim▁end|>
| |
<|file_name|>serve.js<|end_file_name|><|fim▁begin|>var path = require('path');
var url = require('url');
var closure = require('closure-util');
var nomnom = require('nomnom');
var log = closure.log;
var options = nomnom.options({
port: {
abbr: 'p',
'default': 4000,
help: 'Port for incoming connections',
metavar: 'PORT'
},
loglevel: {
abbr: 'l',
choices: ['silly', 'verbose', 'info', 'warn', 'error'],
'default': 'info',
help: 'Log level',
metavar: 'LEVEL'<|fim▁hole|>
/** @type {string} */
log.level = options.loglevel;
log.info('ol3-cesium', 'Parsing dependencies ...');
var manager = new closure.Manager({
closure: true, // use the bundled Closure Library
lib: [
'src/**/*.js'
],
ignoreRequires: '^ol\\.'
});
manager.on('error', function(e) {
log.error('ol3-cesium', e.message);
});
manager.on('ready', function() {
var server = new closure.Server({
manager: manager,
loader: '/@loader'
});
server.listen(options.port, function() {
log.info('ol3-cesium', 'Listening on http://localhost:' +
options.port + '/ (Ctrl+C to stop)');
});
server.on('error', function(err) {
log.error('ol3-cesium', 'Server failed to start: ' + err.message);
process.exit(1);
});
});<|fim▁end|>
|
}
}).parse();
|
<|file_name|>builtin_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package expression
import (
"reflect"
"sync"
. "github.com/pingcap/check"
"github.com/pingcap/tidb/parser/ast"
"github.com/pingcap/tidb/parser/charset"
"github.com/pingcap/tidb/parser/model"
"github.com/pingcap/tidb/parser/mysql"
"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/util/chunk"
)
func evalBuiltinFuncConcurrent(f builtinFunc, row chunk.Row) (d types.Datum, err error) {
wg := sync.WaitGroup{}
concurrency := 10
wg.Add(concurrency)
var lock sync.Mutex
err = nil
for i := 0; i < concurrency; i++ {
go func() {
defer wg.Done()
di, erri := evalBuiltinFunc(f, chunk.Row{})
lock.Lock()
if err == nil {
d, err = di, erri
}
lock.Unlock()
}()
}
wg.Wait()
return
}
func evalBuiltinFunc(f builtinFunc, row chunk.Row) (d types.Datum, err error) {
var (
res interface{}
isNull bool
)
switch f.getRetTp().EvalType() {
case types.ETInt:
var intRes int64
intRes, isNull, err = f.evalInt(row)<|fim▁hole|> if mysql.HasUnsignedFlag(f.getRetTp().Flag) {
res = uint64(intRes)
} else {
res = intRes
}
case types.ETReal:
res, isNull, err = f.evalReal(row)
case types.ETDecimal:
res, isNull, err = f.evalDecimal(row)
case types.ETDatetime, types.ETTimestamp:
res, isNull, err = f.evalTime(row)
case types.ETDuration:
res, isNull, err = f.evalDuration(row)
case types.ETJson:
res, isNull, err = f.evalJSON(row)
case types.ETString:
res, isNull, err = f.evalString(row)
}
if isNull || err != nil {
d.SetNull()
return d, err
}
d.SetValue(res, f.getRetTp())
return
}
// tblToDtbl is a util function for test.
func tblToDtbl(i interface{}) []map[string][]types.Datum {
l := reflect.ValueOf(i).Len()
tbl := make([]map[string][]types.Datum, l)
for j := 0; j < l; j++ {
v := reflect.ValueOf(i).Index(j).Interface()
val := reflect.ValueOf(v)
t := reflect.TypeOf(v)
item := make(map[string][]types.Datum, val.NumField())
for k := 0; k < val.NumField(); k++ {
tmp := val.Field(k).Interface()
item[t.Field(k).Name] = makeDatums(tmp)
}
tbl[j] = item
}
return tbl
}
func makeDatums(i interface{}) []types.Datum {
if i != nil {
t := reflect.TypeOf(i)
val := reflect.ValueOf(i)
switch t.Kind() {
case reflect.Slice:
l := val.Len()
res := make([]types.Datum, l)
for j := 0; j < l; j++ {
res[j] = types.NewDatum(val.Index(j).Interface())
}
return res
}
}
return types.MakeDatums(i)
}
func (s *testEvaluatorSuite) TestIsNullFunc(c *C) {
fc := funcs[ast.IsNull]
f, err := fc.getFunction(s.ctx, s.datumsToConstants(types.MakeDatums(1)))
c.Assert(err, IsNil)
v, err := evalBuiltinFunc(f, chunk.Row{})
c.Assert(err, IsNil)
c.Assert(v.GetInt64(), Equals, int64(0))
f, err = fc.getFunction(s.ctx, s.datumsToConstants(types.MakeDatums(nil)))
c.Assert(err, IsNil)
v, err = evalBuiltinFunc(f, chunk.Row{})
c.Assert(err, IsNil)
c.Assert(v.GetInt64(), Equals, int64(1))
}
func (s *testEvaluatorSuite) TestLock(c *C) {
lock := funcs[ast.GetLock]
f, err := lock.getFunction(s.ctx, s.datumsToConstants(types.MakeDatums(nil, 1)))
c.Assert(err, IsNil)
v, err := evalBuiltinFunc(f, chunk.Row{})
c.Assert(err, IsNil)
c.Assert(v.GetInt64(), Equals, int64(1))
releaseLock := funcs[ast.ReleaseLock]
f, err = releaseLock.getFunction(s.ctx, s.datumsToConstants(types.MakeDatums(1)))
c.Assert(err, IsNil)
v, err = evalBuiltinFunc(f, chunk.Row{})
c.Assert(err, IsNil)
c.Assert(v.GetInt64(), Equals, int64(1))
}
func (s *testEvaluatorSuite) TestDisplayName(c *C) {
c.Assert(GetDisplayName(ast.EQ), Equals, "=")
c.Assert(GetDisplayName(ast.NullEQ), Equals, "<=>")
c.Assert(GetDisplayName(ast.IsTruthWithoutNull), Equals, "IS TRUE")
c.Assert(GetDisplayName("abs"), Equals, "abs")
c.Assert(GetDisplayName("other_unknown_func"), Equals, "other_unknown_func")
}
// newFunctionForTest creates a new ScalarFunction using funcName and arguments,
// it is different from expression.NewFunction which needs an additional retType argument.
func newFunctionForTest(ctx sessionctx.Context, funcName string, args ...Expression) (Expression, error) {
fc, ok := funcs[funcName]
if !ok {
return nil, errFunctionNotExists.GenWithStackByArgs("FUNCTION", funcName)
}
funcArgs := make([]Expression, len(args))
copy(funcArgs, args)
f, err := fc.getFunction(ctx, funcArgs)
if err != nil {
return nil, err
}
return &ScalarFunction{
FuncName: model.NewCIStr(funcName),
RetType: f.getRetTp(),
Function: f,
}, nil
}
var (
// MySQL int8.
int8Con = &Constant{RetType: &types.FieldType{Tp: mysql.TypeLonglong, Charset: charset.CharsetBin, Collate: charset.CollationBin}}
// MySQL varchar.
varcharCon = &Constant{RetType: &types.FieldType{Tp: mysql.TypeVarchar, Charset: charset.CharsetUTF8, Collate: charset.CollationUTF8}}
)<|fim▁end|>
| |
<|file_name|>ComputeVersion.go<|end_file_name|><|fim▁begin|>package models
<|fim▁hole|>type ComputeVersion struct {
Versions []ComputeVersionValue `json:"versions"`
}
type ComputeVersionValue struct {
Id string `json:"id"`
Status string `json:"status"`
Updated time.Time `json:"updated"`
}<|fim▁end|>
|
import (
"time"
)
|
<|file_name|>Plugin.ts<|end_file_name|><|fim▁begin|>import PluginManager from 'tinymce/core/api/PluginManager';
import * as Commands from './api/Commands';
import * as Options from './api/Options';
import * as FilterContent from './core/FilterContent';
import * as ResolveName from './core/ResolveName';
import * as Buttons from './ui/Buttons';
export default (): void => {
PluginManager.add('pagebreak', (editor) => {
Options.register(editor);
Commands.register(editor);
Buttons.register(editor);
FilterContent.setup(editor);<|fim▁hole|> });
};<|fim▁end|>
|
ResolveName.setup(editor);
|
<|file_name|>testold.py<|end_file_name|><|fim▁begin|>import matplotlib.pyplot as plt
import numpy as np
import sys
import time
import scipy.interpolate as ip
infile = sys.argv[1]
indata = np.load(infile)
spec = indata[0]
samp_rate = indata[1]
fftsize = indata[2]
center_freq = 1419.4 # MHz
halffft = int(0.5*fftsize)
freqs = 0.5*samp_rate*np.array(range(-halffft,halffft))/(halffft)
#plt.plot(spec)
delta_nu = samp_rate/fftsize
plt.plot(freqs,spec)
plt.xlabel('relative to center [Mhz]')
RFI = [[1419.4-0.210, 0.02],
#[1419.4-1.937, 0.015],
#[1419.4-4.4, 0.015],
#[1419.4+3.0, 0.01],
#[center_freq, 8*delta_nu] # remove dip in the center of band, always about 4 fft points wide. Use 8, else errors
]
#plt.figure()
#plt.plot(spec)
# DEFINE FLAGS in HZ
for item in RFI:
print item
RFI_freq = item[0]
RFI_width = item[1]
ch0_freq = center_freq - 0.5*samp_rate
ind_low = int(np.floor((RFI_freq-0.5*RFI_width - ch0_freq)/delta_nu))
ind_high = int(np.ceil((RFI_freq+0.5*RFI_width - ch0_freq)/delta_nu))
margin = min((ind_high-ind_low), ind_low, len(spec)-ind_high)
RFI_org = np.array([spec[ind_low-margin:ind_low], spec[ind_high:ind_high+margin]])
RFI_part = RFI_org.flatten()
xdata = range(ind_low-margin, ind_low) + range(ind_high, ind_high+margin)
print np.size(xdata), np.size(RFI_part)
spl = ip.UnivariateSpline(xdata,RFI_part, k=1, s=0)
interpdata = spl(range(ind_low, ind_high))
print interpdata
spec[ind_low:ind_high] = interpdata[:]
plt.figure()
plt.plot(RFI_part)
plt.plot(interpdata)
#plt.figure()
#plt.plot(freqs, spec)
#for flag in flags:
#
# Calculate flag indices
# For each flag, interpolate flagged values (splines)
# when all flaggs are applied and interpolated, proceed with convolve!
#plt.figure()
#convspec = np.convolve(spec, [1,1,1,1], mode='same')
#w = sig.boxcar(4)
#convspec=np.convolve(w/w.sum(),spec,mode='valid')
##convspec = sig.decimate(spec, 2)
#fftsize = fftsize/2
#halffft = int(0.5*fftsize)
#convfreqs = 0.5*samp_rate*np.array(range(-halffft,halffft))/(halffft)
#print np.shape(convspec)
#print np.shape(convfreqs)<|fim▁hole|><|fim▁end|>
|
#plt.plot(convfreqs,convspec)
#plt.xlabel('relative to center [Mhz]')
plt.show()
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This code is part of Qiskit.
#<|fim▁hole|>#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=invalid-name
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
"""
Sphinx documentation builder
"""
# -- Project information -----------------------------------------------------
project = 'Qiskit'
copyright = '2019, Qiskit Development Team' # pylint: disable=redefined-builtin
author = 'Qiskit Development Team'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '0.12.0'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.napoleon',
'sphinx.ext.viewcode',
'sphinx.ext.extlinks',
'sphinx_tabs.tabs',
'sphinx_automodapi.automodapi',
'IPython.sphinxext.ipython_console_highlighting',
'IPython.sphinxext.ipython_directive',
'reno.sphinxext',
]
# If true, figures, tables and code-blocks are automatically numbered if they
# have a caption.
numfig = True
# A dictionary mapping 'figure', 'table', 'code-block' and 'section' to
# strings that are used for format of figure numbers. As a special character,
# %s will be replaced to figure number.
numfig_format = {
'table': 'Table %s'
}
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# A boolean that decides whether module names are prepended to all object names
# (for object types where a “module” of some kind is defined), e.g. for
# py:function directives.
add_module_names = False
# A list of prefixes that are ignored for sorting the Python module index
# (e.g., if this is set to ['foo.'], then foo.bar is shown under B, not F).
# This can be handy if you document a project that consists of a single
# package. Works only for the HTML builder currently.
modindex_common_prefix = ['qiskit.']
# -- Configuration for extlinks extension ------------------------------------
# Refer to https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme' # use the theme in subdir 'theme'
html_sidebars = {'**': ['globaltoc.html']}
html_last_updated_fmt = '%Y/%m/%d'<|fim▁end|>
|
# (C) Copyright IBM 2018.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.