prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>xsky_path23.py<|end_file_name|><|fim▁begin|>import zstackwoodpecker.test_state as ts_header
import os
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template5", checking_point=8, path_list=[<|fim▁hole|> [TestAction.create_volume, 'volume1', 'flag=scsi'],
[TestAction.attach_volume, 'vm1', 'volume1'],
[TestAction.create_volume, 'volume2', 'flag=scsi'],
[TestAction.attach_volume, 'vm1', 'volume2'],
[TestAction.create_volume, 'volume3', 'flag=scsi'],
[TestAction.attach_volume, 'vm1', 'volume3'],
[TestAction.create_vm_snapshot, 'vm1', 'vm1-snapshot1'],
[TestAction.resize_volume, 'vm1', 5*1024*1024],
[TestAction.detach_volume, 'volume1'],
[TestAction.create_vm_snapshot, 'vm1', 'vm1-snapshot5'],
[TestAction.delete_vm_snapshot, 'vm1-snapshot1'],
[TestAction.create_vm_snapshot, 'vm1', 'vm1-snapshot8'],
[TestAction.clone_vm, 'vm1', 'vm2'],
[TestAction.create_volume_backup, 'volume2', 'volume2-backup1'],
[TestAction.migrate_vm, 'vm1'],
[TestAction.delete_vm_snapshot, 'vm1-snapshot5'],
])
'''
The final status:
Running:['vm1', 'vm2']
Stopped:[]
Enadbled:['vm1-snapshot5', 'volume2-snapshot5', 'volume3-snapshot5', 'volume2-backup1']
attached:['volume2', 'volume3']
Detached:['volume1']
Deleted:['vm1-snapshot1', 'volume1-snapshot1', 'volume2-snapshot1', 'volume3-snapshot1', 'vm1-snapshot8', 'volume2-snapshot8', 'volume3-snapshot8']
Expunged:[]
Ha:[]
Group:
'''<|fim▁end|> | [TestAction.create_vm, 'vm1', ], |
<|file_name|>document_renderer.py<|end_file_name|><|fim▁begin|># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
from document_parser import ParseDocument
from third_party.json_schema_compiler.model import UnixName<|fim▁hole|>
class DocumentRenderer(object):
'''Performs document-level rendering such as the title, references,
and table of contents: pulling that data out of the document, then
replacing the $(title), $(ref:...) and $(table_of_contents) tokens with them.
This can be thought of as a parallel to TemplateRenderer; while
TemplateRenderer is responsible for interpreting templates and rendering files
within the template engine, DocumentRenderer is responsible for interpreting
higher-level document concepts like the title and TOC, then performing string
replacement for them. The syntax for this replacement is $(...) where ... is
the concept. Currently title and table_of_contents are supported.
'''
def __init__(self, table_of_contents_renderer, ref_resolver):
self._table_of_contents_renderer = table_of_contents_renderer
self._ref_resolver = ref_resolver
def _RenderLinks(self, document, path):
''' Replaces all $(ref:...) references in |document| with html links.
References have two forms:
$(ref:api.node) - Replaces the reference with a link to node on the
API page. The title is set to the name of the node.
$(ref:api.node Title) - Same as the previous form, but title is set
to "Title".
'''
START_REF = '$(ref:'
END_REF = ')'
MAX_REF_LENGTH = 256
new_document = []
# Keeps track of position within |document|
cursor_index = 0
start_ref_index = document.find(START_REF)
while start_ref_index != -1:
end_ref_index = document.find(END_REF, start_ref_index)
if (end_ref_index == -1 or
end_ref_index - start_ref_index > MAX_REF_LENGTH):
end_ref_index = document.find(' ', start_ref_index)
logging.error('%s:%s has no terminating ) at line %s' % (
path,
document[start_ref_index:end_ref_index],
document.count('\n', 0, end_ref_index)))
new_document.append(document[cursor_index:end_ref_index + 1])
else:
ref = document[start_ref_index:end_ref_index]
ref_parts = ref[len(START_REF):].split(None, 1)
# Guess the api name from the html name, replacing '_' with '.' (e.g.
# if the page is app_window.html, guess the api name is app.window)
api_name = os.path.splitext(os.path.basename(path))[0].replace('_', '.')
title = ref_parts[0] if len(ref_parts) == 1 else ref_parts[1]
ref_dict = self._ref_resolver.SafeGetLink(ref_parts[0],
namespace=api_name,
title=title)
new_document.append(document[cursor_index:start_ref_index])
new_document.append('<a href=%s>%s</a>' % (ref_dict['href'],
ref_dict['text']))
cursor_index = end_ref_index + 1
start_ref_index = document.find(START_REF, cursor_index)
new_document.append(document[cursor_index:])
return ''.join(new_document)
def Render(self, document, path, render_title=False):
# Render links first so that parsing and later replacements aren't
# affected by $(ref...) substitutions
document = self._RenderLinks(document, path)
parsed_document = ParseDocument(document, expect_title=render_title)
toc_text, toc_warnings = self._table_of_contents_renderer.Render(
parsed_document.sections)
# Only 1 title and 1 table of contents substitution allowed; in the common
# case, save necessarily running over the entire file.
if parsed_document.title:
document = document.replace('$(title)', parsed_document.title, 1)
return (document.replace('$(table_of_contents)', toc_text, 1),
parsed_document.warnings + toc_warnings)<|fim▁end|> | |
<|file_name|>fig08_10.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# fig08_10.py
#
# Author: Billy Wilson Arante
# Created: 2016/10/10
from rational import Rational
def main():
"""Main"""
# Objects of class Rational
rational1 = Rational() # 1/1
rational2 = Rational(10, 30) # 10/30 reduces to 1/3
rational3 = Rational(-7, 14) # -7/14 reduces to -1/2
# Printing objects of class Rational
print "Rational 1:", rational1
print "Rational 2:", rational2
print "Rational 3:", rational3
print<|fim▁hole|>
# Testing mathematical operators
print rational1, "/", rational2, "=", rational1 / rational2
print rational3, "-", rational2, "=", rational3 - rational2
print rational2, "*", rational3, "-", rational1, "=", \
rational2 * rational3 - rational1
# Overloading + implicitly overloads +=
rational1 += rational2 * rational3
print "\nrational1 after adding rational2 * rational3:", rational1
print
# Test comparison operators
print rational1, "<=", rational2, ":", rational1 <= rational2
print rational1, ">", rational3, ":", rational1 > rational3
print
# Test built-in function abs
print "The absolute value of", rational3, "is:", abs(rational3)
print
# Test coercion
print rational2, "as an integer is:", int(rational2)
print rational2, "as a float is:", float(rational2)
print rational2, "+ 1 =", rational2 + 1
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>OscReceiver.cpp<|end_file_name|><|fim▁begin|>/*
TUIO C++ Library
Copyright (c) 2005-2014 Martin Kaltenbrunner <[email protected]>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 3.0 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library.
*/
#include "OscReceiver.h"
using namespace TUIO;
using namespace osc;
void OscReceiver::ProcessMessage( const ReceivedMessage& msg, const IpEndpointName& remoteEndpoint) {
for (std::list<TuioClient*>::iterator client=clientList.begin(); client!= clientList.end(); client++)
(*client)->processOSC(msg);
}
void OscReceiver::ProcessBundle( const ReceivedBundle& b, const IpEndpointName& remoteEndpoint) {
try {
for( ReceivedBundle::const_iterator i = b.ElementsBegin(); i != b.ElementsEnd(); ++i ){
if( i->IsBundle() )
ProcessBundle( ReceivedBundle(*i), remoteEndpoint);<|fim▁hole|> }
} catch (MalformedBundleException& e) {
std::cerr << "malformed OSC bundle: " << e.what() << std::endl;
}
}
void OscReceiver::ProcessPacket( const char *data, int size, const IpEndpointName& remoteEndpoint ) {
try {
ReceivedPacket p( data, size );
if(p.IsBundle()) ProcessBundle( ReceivedBundle(p), remoteEndpoint);
else ProcessMessage( ReceivedMessage(p), remoteEndpoint);
} catch (MalformedBundleException& e) {
std::cerr << "malformed OSC bundle: " << e.what() << std::endl;
}
}
bool OscReceiver::isConnected() {
return connected;
}
void OscReceiver::addTuioClient(TuioClient *client) {
clientList.push_back(client);
}<|fim▁end|> | else
ProcessMessage( ReceivedMessage(*i), remoteEndpoint); |
<|file_name|>reference.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, division, print_function, unicode_literals
from amaascore.core.amaas_model import AMaaSModel
class Reference(AMaaSModel):
def __init__(self, reference_value, reference_primary=False, *args, **kwargs):
self.reference_value = reference_value
self.reference_primary = reference_primary
super(Reference, self).__init__(*args, **kwargs)<|fim▁hole|>
@property
def reference_primary(self):
if hasattr(self, '_reference_primary'):
return self._reference_primary
@reference_primary.setter
def reference_primary(self, value):
"""
Always convert to bool if the service/database returns 0 or 1
"""
if value is not None:
self._reference_primary = True if value else False<|fim▁end|> | |
<|file_name|>expression.cpp<|end_file_name|><|fim▁begin|>#include "expression.h"
#include "opt.h"<|fim▁hole|>
std::vector<vv::vm::command> vv::ast::expression::code() const
{
auto vec = generate();
optimize(vec);
return vec;
}<|fim▁end|> | #include "vm/instruction.h" |
<|file_name|>array_init.js<|end_file_name|><|fim▁begin|>//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
<|fim▁hole|>function DumpArray(a)
{
var undef_start = -1;
for (var i = 0; i < a.length; i++)
{
if (a[i] == undefined)
{
if (undef_start == -1)
{
undef_start = i;
}
}
else
{
if (undef_start != -1)
{
WScript.Echo(undef_start + "-" + (i-1) + " = undefined");
undef_start = -1;
}
WScript.Echo(i + " = " + a[i]);
}
}
}
DumpArray([]);
DumpArray([ 0 ]);
DumpArray([ 0, 1, 2, 3, 4, 5, 6 ,7 ,8, 9]);
DumpArray([,,,0,,,1,,,2,,,3,,,4,,,5,,,6,,,7,,,8,,,9,,,]);
var s0 = "";
for (var i = 0; i < 100; i++)
{
s0 += ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,";
}
DumpArray(eval("[" + s0 + "1]"));
var s1 = "";
for (var i = 0; i < 30; i++)
{
s1 += s0;
}
DumpArray(eval("[" + s1 + "1]"));
var s2 = "";
for (var i = 0; i < 10; i++)
{
s2 += s1;
}
DumpArray(eval("[" + s2 + "1]"));<|fim▁end|> | |
<|file_name|>macro-pat.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
macro_rules! mypat {
() => (
Some('y')
)
}
macro_rules! char_x {
() => (
'x'
)
}
macro_rules! some {
($x:pat) => (
Some($x)
)
}
macro_rules! indirect {
() => (
some!(char_x!())
)
}
macro_rules! ident_pat {
($x:ident) => (
$x
)
}
fn f(c: Option<char>) -> uint {
match c {
Some('x') => 1,
mypat!() => 2,
_ => 3,
}<|fim▁hole|>
pub fn main() {
assert_eq!(1u, f(Some('x')));
assert_eq!(2u, f(Some('y')));
assert_eq!(3u, f(None));
assert_eq!(1, match Some('x') {
Some(char_x!()) => 1,
_ => 2,
});
assert_eq!(1, match Some('x') {
some!(char_x!()) => 1,
_ => 2,
});
assert_eq!(1, match Some('x') {
indirect!() => 1,
_ => 2,
});
assert_eq!(3, {
let ident_pat!(x) = 2;
x+1
});
}<|fim▁end|> | } |
<|file_name|>mvn.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
from os import path, environ
from subprocess import check_output, CalledProcessError
from sys import stderr
parser = argparse.ArgumentParser()
parser.add_argument('--repository', help='maven repository id')
parser.add_argument('--url', help='maven repository url')
parser.add_argument('-o')
parser.add_argument('-a', help='action (valid actions are: install,deploy)')
parser.add_argument('-v', help='gerrit version')
parser.add_argument('-s', action='append', help='triplet of artifactId:type:path')
args = parser.parse_args()
if not args.v:
print('version is empty', file=stderr)
exit(1)
root = path.abspath(__file__)
while not path.exists(path.join(root, 'WORKSPACE')):
root = path.dirname(root)
if 'install' == args.a:
cmd = [
'mvn',
'install:install-file',
'-Dversion=%s' % args.v,
]
elif 'deploy' == args.a:
cmd = [
'mvn',
'gpg:sign-and-deploy-file',
'-Dversion=%s' % args.v,
'-DrepositoryId=%s' % args.repository,
'-Durl=%s' % args.url,
]
else:
print("unknown action -a %s" % args.a, file=stderr)
exit(1)
for spec in args.s:
artifact, packaging_type, src = spec.split(':')
exe = cmd + [
'-DpomFile=%s' % path.join(root, 'tools', 'maven',
'%s_pom.xml' % artifact),
'-Dpackaging=%s' % packaging_type,
'-Dfile=%s' % src,
]
try:
if environ.get('VERBOSE'):<|fim▁hole|> except Exception as e:
print('%s command failed: %s\n%s' % (args.a, ' '.join(exe), e),
file=stderr)
if environ.get('VERBOSE') and isinstance(e, CalledProcessError):
print('Command output\n%s' % e.output, file=stderr)
exit(1)
out = stderr
if args.o:
out = open(args.o, 'w')
with out as fd:
if args.repository:
print('Repository: %s' % args.repository, file=fd)
if args.url:
print('URL: %s' % args.url, file=fd)
print('Version: %s' % args.v, file=fd)<|fim▁end|> | print(' '.join(exe), file=stderr)
check_output(exe) |
<|file_name|>owdistancemap.py<|end_file_name|><|fim▁begin|>import sys
import itertools
from functools import reduce
from operator import iadd
import numpy<|fim▁hole|> QFormLayout, QGraphicsRectItem, QGraphicsGridLayout,
QFontMetrics, QPen, QIcon, QPixmap, QLinearGradient, QPainter, QColor,
QBrush, QTransform, QGraphicsWidget, QApplication
)
from PyQt4.QtCore import Qt, QRect, QRectF, QSize, QPointF
from PyQt4.QtCore import pyqtSignal as Signal
import pyqtgraph as pg
import Orange.data
import Orange.misc
from Orange.clustering import hierarchical
from Orange.widgets import widget, gui, settings
from Orange.widgets.utils import itemmodels, colorbrewer
from .owhierarchicalclustering import DendrogramWidget, GraphicsSimpleTextList
from Orange.widgets.io import FileFormat
def _remove_item(item):
item.setParentItem(None)
scene = item.scene()
if scene is not None:
scene.removeItem(item)
class DistanceMapItem(pg.ImageItem):
"""A distance matrix image with user selectable regions.
"""
class SelectionRect(QGraphicsRectItem):
def boundingRect(self):
return super().boundingRect().adjusted(-1, -1, 1, 1)
def paint(self, painter, option, widget=None):
t = painter.transform()
rect = t.mapRect(self.rect())
painter.save()
painter.setTransform(QTransform())
pwidth = self.pen().widthF()
painter.setPen(self.pen())
painter.drawRect(rect.adjusted(pwidth, -pwidth, -pwidth, pwidth))
painter.restore()
selectionChanged = Signal()
Clear, Select, Commit = 1, 2, 4
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setAcceptedMouseButtons(Qt.LeftButton | Qt.RightButton)
self.setAcceptHoverEvents(True)
self.__selections = []
#: (QGraphicsRectItem, QRectF) | None
self.__dragging = None
def __select(self, area, command):
if command & self.Clear:
self.__clearSelections()
if command & self.Select:
area = area.normalized()
intersects = [rect.intersects(area)
for item, rect in self.__selections]
def partition(predicate, iterable):
t1, t2 = itertools.tee(iterable)
return (itertools.filterfalse(predicate, t1),
filter(predicate, t2))
def intersects(selection):
_, selarea = selection
return selarea.intersects(area)
disjoint, intersection = partition(intersects, self.__selections)
disjoint = list(disjoint)
intersection = list(intersection)
# merge intersecting selections into a single area
area = reduce(QRect.united, (area for _, area in intersection),
area)
visualarea = self.__visualRectForSelection(area)
item = DistanceMapItem.SelectionRect(visualarea, self)
item.setPen(QPen(Qt.red, 0))
selection = disjoint + [(item, area)]
for item, _ in intersection:
_remove_item(item)
self.__selections = selection
self.selectionChanged.emit()
def __elastic_band_select(self, area, command):
if command & self.Clear and self.__dragging:
item, area = self.__dragging
_remove_item(item)
self.__dragging = None
if command & self.Select:
if self.__dragging:
item, _ = self.__dragging
else:
item = DistanceMapItem.SelectionRect(self)
item.setPen(QPen(Qt.red, 0))
# intersection with existing regions
intersection = [(item, selarea)
for item, selarea in self.__selections
if area.intersects(selarea)]
fullarea = reduce(
QRect.united, (selarea for _, selarea in intersection),
area
)
visualarea = self.__visualRectForSelection(fullarea)
item.setRect(visualarea)
self.__dragging = item, area
if command & self.Commit and self.__dragging:
item, area = self.__dragging
self.__select(area, self.Select)
def mousePressEvent(self, event):
if event.button() == Qt.LeftButton:
r, c = self._cellAt(event.pos())
if r != -1 and c != -1:
# Clear existing selection
# TODO: Fix extended selection.
self.__select(QRect(), self.Clear)
selrange = QRect(c, r, 1, 1)
self.__elastic_band_select(selrange, self.Select | self.Clear)
elif event.button() == Qt.RightButton:
self.__select(QRect(), self.Clear)
super().mousePressEvent(event)
event.accept()
def mouseMoveEvent(self, event):
if event.buttons() & Qt.LeftButton and self.__dragging:
r1, c1 = self._cellAt(event.buttonDownPos(Qt.LeftButton))
r2, c2 = self._cellCloseTo(event.pos())
selrange = QRect(c1, r1, 1, 1).united(QRect(c2, r2, 1, 1))
self.__elastic_band_select(selrange, self.Select)
super().mouseMoveEvent(event)
event.accept()
def mouseReleaseEvent(self, event):
if event.button() == Qt.LeftButton and self.__dragging:
r1, c1 = self._cellAt(event.buttonDownPos(Qt.LeftButton))
r2, c2 = self._cellCloseTo(event.pos())
selrange = QRect(c1, r1, 1, 1).united(QRect(c2, r2, 1, 1))
self.__elastic_band_select(selrange, self.Select | self.Commit)
self.__elastic_band_select(QRect(), self.Clear)
super().mouseReleaseEvent(event)
event.accept()
def _cellAt(self, pos):
"""Return the i, j cell index at `pos` in local coordinates."""
if self.image is None:
return -1, -1
else:
h, w = self.image.shape
i, j = numpy.floor([pos.y(), pos.x()])
if 0 <= i < h and 0 <= j < w:
return int(i), int(j)
else:
return -1, -1
def _cellCloseTo(self, pos):
"""Return the i, j cell index closest to `pos` in local coordinates."""
if self.image is None:
return -1, -1
else:
h, w = self.image.shape
i, j = numpy.floor([pos.y(), pos.x()])
i = numpy.clip(i, 0, h - 1)
j = numpy.clip(j, 0, w - 1)
return int(i), int(j)
def __clearSelections(self):
for item, _ in self.__selections:
_remove_item(item)
self.__selections = []
def __visualRectForSelection(self, rect):
h, w = self.image.shape
rect = rect.normalized()
rect = rect.intersected(QRect(0, 0, w, h))
r1, r2 = rect.top(), rect.bottom() + 1
c1, c2 = rect.left(), rect.right() + 1
return QRectF(QPointF(c1, r1), QPointF(c2, r2))
def __selectionForArea(self, area):
r1, c1 = self._cellAt(area.topLeft())
r2, c2 = self._cellAt(area.bottomRight())
selarea = QRect(c1, r1, c2 - c1 + 1, r2 - r1 + 1)
return selarea.normalized()
def selections(self):
selections = [self.__selectionForArea(area)
for _, area in self.__selections]
return [(range(r.top(), r.bottom() + 1),
range(r.left(), r.right() + 1))
for r in selections]
def hoverMoveEvent(self, event):
super().hoverMoveEvent(event)
i, j = self._cellAt(event.pos())
if i != -1 and j != -1:
d = self.image[i, j]
self.setToolTip("{}, {}: {:.3f}".format(i, j, d))
else:
self.setToolTip("")
_color_palettes = sorted(colorbrewer.colorSchemes["sequential"].items()) + \
[("Blue-Yellow", {2: [(0, 0, 255), (255, 255, 0)]})]
_default_colormap_index = len(_color_palettes) - 1
class OWDistanceMap(widget.OWWidget):
name = "Distance Map"
description = "Visualize a distance matrix."
icon = "icons/DistanceMap.svg"
priority = 1200
inputs = [("Distances", Orange.misc.DistMatrix, "set_distances")]
outputs = [("Data", Orange.data.Table), ("Features", widget.AttributeList)]
settingsHandler = settings.PerfectDomainContextHandler()
#: type of ordering to apply to matrix rows/columns
NoOrdering, Clustering, OrderedClustering = 0, 1, 2
sorting = settings.Setting(NoOrdering)
colormap = settings.Setting(_default_colormap_index)
color_gamma = settings.Setting(0.0)
color_low = settings.Setting(0.0)
color_high = settings.Setting(1.0)
annotation_idx = settings.ContextSetting(0, exclude_metas=False)
autocommit = settings.Setting(True)
graph_name = "grid_widget"
# Disable clustering for inputs bigger than this
_MaxClustering = 3000
# Disable cluster leaf ordering for inputs bigger than this
_MaxOrderedClustering = 1000
def __init__(self):
super().__init__()
self.matrix = None
self._tree = None
self._ordered_tree = None
self._sorted_matrix = None
self._sort_indices = None
self._selection = None
box = gui.widgetBox(self.controlArea, "Element sorting", margin=0)
self.sorting_cb = gui.comboBox(
box, self, "sorting",
items=["None", "Clustering", "Clustering with ordered leaves"],
callback=self._invalidate_ordering)
box = gui.widgetBox(self.controlArea, "Colors")
self.colormap_cb = gui.comboBox(
box, self, "colormap", callback=self._update_color
)
self.colormap_cb.setIconSize(QSize(64, 16))
self.palettes = list(_color_palettes)
init_color_combo(self.colormap_cb, self.palettes, QSize(64, 16))
self.colormap_cb.setCurrentIndex(self.colormap)
form = QFormLayout(
formAlignment=Qt.AlignLeft,
labelAlignment=Qt.AlignLeft,
fieldGrowthPolicy=QFormLayout.AllNonFixedFieldsGrow
)
# form.addRow(
# "Gamma",
# gui.hSlider(box, self, "color_gamma", minValue=0.0, maxValue=1.0,
# step=0.05, ticks=True, intOnly=False,
# createLabel=False, callback=self._update_color)
# )
form.addRow(
"Low",
gui.hSlider(box, self, "color_low", minValue=0.0, maxValue=1.0,
step=0.05, ticks=True, intOnly=False,
createLabel=False, callback=self._update_color)
)
form.addRow(
"High",
gui.hSlider(box, self, "color_high", minValue=0.0, maxValue=1.0,
step=0.05, ticks=True, intOnly=False,
createLabel=False, callback=self._update_color)
)
box.layout().addLayout(form)
box = gui.widgetBox(self.controlArea, "Annotations")
self.annot_combo = gui.comboBox(box, self, "annotation_idx",
callback=self._invalidate_annotations,
contentsLength=12)
self.annot_combo.setModel(itemmodels.VariableListModel())
self.annot_combo.model()[:] = ["None", "Enumeration"]
self.controlArea.layout().addStretch()
gui.auto_commit(self.controlArea, self, "autocommit",
"Send data", "Auto send is on")
self.inline_graph_report()
self.view = pg.GraphicsView(background="w")
self.mainArea.layout().addWidget(self.view)
self.grid_widget = pg.GraphicsWidget()
self.grid = QGraphicsGridLayout()
self.grid_widget.setLayout(self.grid)
self.viewbox = pg.ViewBox(enableMouse=False, enableMenu=False)
self.viewbox.setAcceptedMouseButtons(Qt.NoButton)
self.viewbox.setAcceptHoverEvents(False)
self.grid.addItem(self.viewbox, 1, 1)
self.left_dendrogram = DendrogramWidget(
self.grid_widget, orientation=DendrogramWidget.Left,
selectionMode=DendrogramWidget.NoSelection,
hoverHighlightEnabled=False
)
self.left_dendrogram.setAcceptedMouseButtons(Qt.NoButton)
self.left_dendrogram.setAcceptHoverEvents(False)
self.top_dendrogram = DendrogramWidget(
self.grid_widget, orientation=DendrogramWidget.Top,
selectionMode=DendrogramWidget.NoSelection,
hoverHighlightEnabled=False
)
self.top_dendrogram.setAcceptedMouseButtons(Qt.NoButton)
self.top_dendrogram.setAcceptHoverEvents(False)
self.grid.addItem(self.left_dendrogram, 1, 0)
self.grid.addItem(self.top_dendrogram, 0, 1)
self.right_labels = TextList(
alignment=Qt.AlignLeft)
self.bottom_labels = TextList(
orientation=Qt.Horizontal, alignment=Qt.AlignRight)
self.grid.addItem(self.right_labels, 1, 2)
self.grid.addItem(self.bottom_labels, 2, 1)
self.view.setCentralItem(self.grid_widget)
self.left_dendrogram.hide()
self.top_dendrogram.hide()
self.right_labels.hide()
self.bottom_labels.hide()
self.matrix_item = None
self.dendrogram = None
self.grid_widget.scene().installEventFilter(self)
def set_distances(self, matrix):
self.closeContext()
self.clear()
self.error(0)
if matrix is not None:
N, _ = matrix.shape
if N < 2:
self.error(0, "Empty distance matrix.")
matrix = None
self.matrix = matrix
if matrix is not None:
self.set_items(matrix.row_items, matrix.axis)
else:
self.set_items(None)
if matrix is not None:
N, _ = matrix.shape
else:
N = 0
model = self.sorting_cb.model()
item = model.item(2)
msg = None
if N > OWDistanceMap._MaxOrderedClustering:
item.setFlags(item.flags() & ~Qt.ItemIsEnabled)
if self.sorting == OWDistanceMap.OrderedClustering:
self.sorting = OWDistanceMap.Clustering
msg = "Cluster ordering was disabled due to the input " \
"matrix being to big"
else:
item.setFlags(item.flags() | Qt.ItemIsEnabled)
item = model.item(1)
if N > OWDistanceMap._MaxClustering:
item.setFlags(item.flags() & ~Qt.ItemIsEnabled)
if self.sorting == OWDistanceMap.Clustering:
self.sorting = OWDistanceMap.NoOrdering
msg = "Clustering was disabled due to the input " \
"matrix being to big"
else:
item.setFlags(item.flags() | Qt.ItemIsEnabled)
self.information(1, msg)
def set_items(self, items, axis=1):
self.items = items
model = self.annot_combo.model()
if items is None:
model[:] = ["None", "Enumeration"]
elif not axis:
model[:] = ["None", "Enumeration", "Attribute names"]
elif isinstance(items, Orange.data.Table):
annot_vars = list(items.domain) + list(items.domain.metas)
model[:] = ["None", "Enumeration"] + annot_vars
self.annotation_idx = 0
self.openContext(items.domain)
elif isinstance(items, list) and \
all(isinstance(item, Orange.data.Variable) for item in items):
model[:] = ["None", "Enumeration", "Name"]
else:
model[:] = ["None", "Enumeration"]
self.annotation_idx = min(self.annotation_idx, len(model) - 1)
def clear(self):
self.matrix = None
self.cluster = None
self._tree = None
self._ordered_tree = None
self._sorted_matrix = None
self._selection = []
self._clear_plot()
def handleNewSignals(self):
if self.matrix is not None:
self._update_ordering()
self._setup_scene()
self._update_labels()
self.unconditional_commit()
def _clear_plot(self):
def remove(item):
item.setParentItem(None)
item.scene().removeItem(item)
if self.matrix_item is not None:
self.matrix_item.selectionChanged.disconnect(
self._invalidate_selection)
remove(self.matrix_item)
self.matrix_item = None
self._set_displayed_dendrogram(None)
self._set_labels(None)
def _cluster_tree(self):
if self._tree is None:
self._tree = hierarchical.dist_matrix_clustering(self.matrix)
return self._tree
def _ordered_cluster_tree(self):
if self._ordered_tree is None:
tree = self._cluster_tree()
self._ordered_tree = \
hierarchical.optimal_leaf_ordering(tree, self.matrix)
return self._ordered_tree
def _setup_scene(self):
self._clear_plot()
self.matrix_item = DistanceMapItem(self._sorted_matrix)
# Scale the y axis to compensate for pg.ViewBox's y axis invert
self.matrix_item.scale(1, -1)
self.viewbox.addItem(self.matrix_item)
# Set fixed view box range.
h, w = self._sorted_matrix.shape
self.viewbox.setRange(QRectF(0, -h, w, h), padding=0)
self.matrix_item.selectionChanged.connect(self._invalidate_selection)
if self.sorting == OWDistanceMap.NoOrdering:
tree = None
elif self.sorting == OWDistanceMap.Clustering:
tree = self._cluster_tree()
elif self.sorting == OWDistanceMap.OrderedClustering:
tree = self._ordered_cluster_tree()
self._set_displayed_dendrogram(tree)
self._update_color()
def _set_displayed_dendrogram(self, root):
self.left_dendrogram.set_root(root)
self.top_dendrogram.set_root(root)
self.left_dendrogram.setVisible(root is not None)
self.top_dendrogram.setVisible(root is not None)
constraint = 0 if root is None else -1 # 150
self.left_dendrogram.setMaximumWidth(constraint)
self.top_dendrogram.setMaximumHeight(constraint)
def _invalidate_ordering(self):
self._sorted_matrix = None
if self.matrix is not None:
self._update_ordering()
self._setup_scene()
def _update_ordering(self):
if self.sorting == OWDistanceMap.NoOrdering:
self._sorted_matrix = self.matrix
self._sort_indices = None
else:
if self.sorting == OWDistanceMap.Clustering:
tree = self._cluster_tree()
elif self.sorting == OWDistanceMap.OrderedClustering:
tree = self._ordered_cluster_tree()
leaves = hierarchical.leaves(tree)
indices = numpy.array([leaf.value.index for leaf in leaves])
X = self.matrix
self._sorted_matrix = X[indices[:, numpy.newaxis],
indices[numpy.newaxis, :]]
self._sort_indices = indices
def _invalidate_annotations(self):
if self.matrix is not None:
self._update_labels()
def _update_labels(self, ):
if self.annotation_idx == 0:
labels = None
elif self.annotation_idx == 1:
labels = [str(i + 1) for i in range(self.matrix.shape[0])]
elif self.annot_combo.model()[self.annotation_idx] == "Attribute names":
attr = self.matrix.row_items.domain.attributes
labels = [str(attr[i]) for i in range(self.matrix.shape[0])]
elif self.annotation_idx == 2 and \
isinstance(self.items, widget.AttributeList):
labels = [v.name for v in self.items]
elif isinstance(self.items, Orange.data.Table):
var = self.annot_combo.model()[self.annotation_idx]
column, _ = self.items.get_column_view(var)
labels = [var.repr_val(value) for value in column]
self._set_labels(labels)
def _set_labels(self, labels):
self._labels = labels
if labels and self.sorting != OWDistanceMap.NoOrdering:
sortind = self._sort_indices
labels = [labels[i] for i in sortind]
for textlist in [self.right_labels, self.bottom_labels]:
textlist.set_labels(labels or [])
textlist.setVisible(bool(labels))
constraint = -1 if labels else 0
self.right_labels.setMaximumWidth(constraint)
self.bottom_labels.setMaximumHeight(constraint)
def _update_color(self):
if self.matrix_item:
name, colors = self.palettes[self.colormap]
n, colors = max(colors.items())
colors = numpy.array(colors, dtype=numpy.ubyte)
low, high = self.color_low * 255, self.color_high * 255
points = numpy.linspace(low, high, n)
space = numpy.linspace(0, 255, 255)
r = numpy.interp(space, points, colors[:, 0], left=255, right=0)
g = numpy.interp(space, points, colors[:, 1], left=255, right=0)
b = numpy.interp(space, points, colors[:, 2], left=255, right=0)
colortable = numpy.c_[r, g, b]
self.matrix_item.setLookupTable(colortable)
def _invalidate_selection(self):
ranges = self.matrix_item.selections()
ranges = reduce(iadd, ranges, [])
indices = reduce(iadd, ranges, [])
if self.sorting != OWDistanceMap.NoOrdering:
sortind = self._sort_indices
indices = [sortind[i] for i in indices]
self._selection = list(sorted(set(indices)))
self.commit()
def commit(self):
datasubset = None
featuresubset = None
if not self._selection:
pass
elif isinstance(self.items, Orange.data.Table):
indices = self._selection
if self.matrix.axis == 1:
datasubset = self.items.from_table_rows(self.items, indices)
elif self.matrix.axis == 0:
domain = Orange.data.Domain(
[self.items.domain[i] for i in indices],
self.items.domain.class_vars,
self.items.domain.metas)
datasubset = Orange.data.Table.from_table(domain, self.items)
elif isinstance(self.items, widget.AttributeList):
subset = [self.items[i] for i in self._selection]
featuresubset = widget.AttributeList(subset)
self.send("Data", datasubset)
self.send("Features", featuresubset)
def onDeleteWidget(self):
super().onDeleteWidget()
self.clear()
def send_report(self):
annot = self.annot_combo.currentText()
if self.annotation_idx <= 1:
annot = annot.lower()
self.report_items((
("Sorting", self.sorting_cb.currentText().lower()),
("Annotations", annot)
))
if self.matrix is not None:
self.report_plot()
class TextList(GraphicsSimpleTextList):
def resizeEvent(self, event):
super().resizeEvent(event)
self._updateFontSize()
def _updateFontSize(self):
crect = self.contentsRect()
if self.orientation == Qt.Vertical:
h = crect.height()
else:
h = crect.width()
n = len(getattr(self, "label_items", []))
if n == 0:
return
if self.scene() is not None:
maxfontsize = self.scene().font().pointSize()
else:
maxfontsize = QApplication.instance().font().pointSize()
lineheight = max(1, h / n)
fontsize = min(self._point_size(lineheight), maxfontsize)
font = self.font()
font.setPointSize(fontsize)
self.setFont(font)
self.layout().invalidate()
self.layout().activate()
def _point_size(self, height):
font = self.font()
font.setPointSize(height)
fix = 0
while QFontMetrics(font).lineSpacing() > height and height - fix > 1:
fix += 1
font.setPointSize(height - fix)
return height - fix
##########################
# Color palette management
##########################
def palette_gradient(colors, discrete=False):
n = len(colors)
stops = numpy.linspace(0.0, 1.0, n, endpoint=True)
gradstops = [(float(stop), color) for stop, color in zip(stops, colors)]
grad = QLinearGradient(QPointF(0, 0), QPointF(1, 0))
grad.setStops(gradstops)
return grad
def palette_pixmap(colors, size):
img = QPixmap(size)
img.fill(Qt.transparent)
painter = QPainter(img)
grad = palette_gradient(colors)
grad.setCoordinateMode(QLinearGradient.ObjectBoundingMode)
painter.setPen(Qt.NoPen)
painter.setBrush(QBrush(grad))
painter.drawRect(0, 0, size.width(), size.height())
painter.end()
return img
def init_color_combo(cb, palettes, iconsize):
cb.clear()
iconsize = cb.iconSize()
for name, palette in palettes:
n, colors = max(palette.items())
colors = [QColor(*c) for c in colors]
cb.addItem(QIcon(palette_pixmap(colors, iconsize)), name,
palette)
def test(argv=sys.argv):
app = QApplication(list(argv))
argv = app.arguments()
if len(argv) > 1:
filename = argv[1]
else:
filename = "iris"
import sip
import Orange.distance
w = OWDistanceMap()
w.show()
w.raise_()
data = Orange.data.Table(filename)
dist = Orange.distance.Euclidean(data)
w.set_distances(dist)
w.handleNewSignals()
rval = app.exec_()
w.set_distances(None)
w.saveSettings()
w.onDeleteWidget()
sip.delete(w)
del w
return rval
if __name__ == "__main__":
sys.exit(test())<|fim▁end|> |
from PyQt4.QtGui import ( |
<|file_name|>tools.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import six
from heat.common import template_format
from heat.engine.clients.os import glance
from heat.engine.clients.os import keystone
from heat.engine.clients.os.keystone import fake_keystoneclient as fake_ks
from heat.engine.clients.os import nova
from heat.engine import environment
from heat.engine.resources.aws.ec2 import instance as instances
from heat.engine import stack as parser
from heat.engine import template as templatem
from heat.tests.openstack.nova import fakes as fakes_nova
from heat.tests import utils
wp_template = u'''
heat_template_version: 2014-10-16
description: WordPress
parameters:
KeyName:
description: KeyName
type: string
default: test\u2042
resources:
WebServer:
type: AWS::EC2::Instance
properties:
ImageId: F17-x86_64-gold
InstanceType: m1.large
KeyName: test
UserData: wordpress
'''
string_template_five = '''
heat_template_version: 2013-05-23
description: Random String templates
parameters:
salt:
type: string
default: "quickbrownfox"
resources:
A:
type: OS::Heat::RandomString
properties:
salt: {get_param: salt}
B:
type: OS::Heat::RandomString
properties:
salt: {get_param: salt}
C:
type: OS::Heat::RandomString
depends_on: [A, B]
properties:
salt: {get_attr: [A, value]}
D:
type: OS::Heat::RandomString
depends_on: C
properties:
salt: {get_param: salt}
E:
type: OS::Heat::RandomString
depends_on: C
properties:
salt: {get_param: salt}
'''
string_template_five_update = '''
heat_template_version: 2013-05-23
description: Random String templates
parameters:
salt:
type: string
default: "quickbrownfox123"
resources:
A:
type: OS::Heat::RandomString
properties:
salt: {get_param: salt}
B:
type: OS::Heat::RandomString
properties:
salt: {get_param: salt}
F:
type: OS::Heat::RandomString
depends_on: [A, B]
properties:
salt: {get_param: salt}
G:
type: OS::Heat::RandomString
depends_on: F
properties:
salt: {get_param: salt}
H:
type: OS::Heat::RandomString
depends_on: F
properties:
salt: {get_param: salt}
'''
attr_cache_template = '''
heat_template_version: 2016-04-08
resources:
A:
type: ResourceWithComplexAttributesType
B:
type: OS::Heat::RandomString
properties:
salt: {get_attr: [A, flat_dict, key2]}
C:
type: OS::Heat::RandomString
depends_on: [A, B]
properties:
salt: {get_attr: [A, nested_dict, dict, a]}
D:
type: OS::Heat::RandomString
depends_on: C
properties:
salt: {get_attr: [A, nested_dict, dict, b]}
E:
type: OS::Heat::RandomString
depends_on: C
properties:
salt: {get_attr: [A, flat_dict, key3]}
'''
def get_stack(stack_name, ctx, template=None, with_params=True,
convergence=False, **kwargs):
if template is None:
t = template_format.parse(wp_template)
if with_params:
env = environment.Environment({'KeyName': 'test'})
tmpl = templatem.Template(t, env=env)
else:
tmpl = templatem.Template(t)
else:
t = template_format.parse(template)
tmpl = templatem.Template(t)
stack = parser.Stack(ctx, stack_name, tmpl, convergence=convergence,
**kwargs)
stack.thread_group_mgr = DummyThreadGroupManager()
return stack
def setup_keystone_mocks_with_mock(test_case, stack):
fkc = fake_ks.FakeKeystoneClient()
test_case.patchobject(keystone.KeystoneClientPlugin, '_create')
keystone.KeystoneClientPlugin._create.return_value = fkc
def setup_mock_for_image_constraint_with_mock(test_case, imageId_input,
imageId_output=744):
test_case.patchobject(glance.GlanceClientPlugin,
'find_image_by_name_or_id',
return_value=imageId_output)
def validate_setup_mocks_with_mock(stack, fc, mock_image_constraint=True,
validate_create=True):
instance = stack['WebServer']
metadata = instance.metadata_get()
if mock_image_constraint:
m_image = glance.GlanceClientPlugin.find_image_by_name_or_id
m_image.assert_called_with(
instance.properties['ImageId'])
user_data = instance.properties['UserData']
server_userdata = instance.client_plugin().build_userdata(
metadata, user_data, 'ec2-user')
nova.NovaClientPlugin.build_userdata.assert_called_with(
metadata, user_data, 'ec2-user')
if not validate_create:
return
fc.servers.create.assert_called_once_with(
image=744,
flavor=3,
key_name='test',
name=utils.PhysName(stack.name, 'WebServer'),
security_groups=None,
userdata=server_userdata,
scheduler_hints=None,
meta=None,
nics=None,
availability_zone=None,
block_device_mapping=None)
def setup_mocks_with_mock(testcase, stack, mock_image_constraint=True,
mock_keystone=True):
fc = fakes_nova.FakeClient()
testcase.patchobject(instances.Instance, 'client', return_value=fc)
testcase.patchobject(nova.NovaClientPlugin, 'client', return_value=fc)
instance = stack['WebServer']
metadata = instance.metadata_get()
if mock_image_constraint:
setup_mock_for_image_constraint_with_mock(
testcase, instance.properties['ImageId'])
if mock_keystone:
setup_keystone_mocks_with_mock(testcase, stack)
user_data = instance.properties['UserData']
server_userdata = instance.client_plugin().build_userdata(
metadata, user_data, 'ec2-user')
testcase.patchobject(nova.NovaClientPlugin, 'build_userdata',
return_value=server_userdata)
testcase.patchobject(fc.servers, 'create')
fc.servers.create.return_value = fc.servers.list()[4]
return fc
def setup_stack_with_mock(test_case, stack_name, ctx, create_res=True,
convergence=False):
stack = get_stack(stack_name, ctx, convergence=convergence)
stack.store()
if create_res:
fc = setup_mocks_with_mock(test_case, stack)
stack.create()
stack._persist_state()
validate_setup_mocks_with_mock(stack, fc)
return stack
def clean_up_stack(test_case, stack, delete_res=True):
if delete_res:
fc = fakes_nova.FakeClient()
test_case.patchobject(instances.Instance, 'client', return_value=fc)
test_case.patchobject(fc.servers, 'delete',
side_effect=fakes_nova.fake_exception())
stack.delete()
def stack_context(stack_name, create_res=True, convergence=False):
"""Decorator for creating and deleting stack.
Decorator which creates a stack by using the test case's context and
deletes it afterwards to ensure tests clean up their stacks regardless
of test success/failure.
"""
def stack_delete(test_fn):
@six.wraps(test_fn)
def wrapped_test(test_case, *args, **kwargs):
def create_stack():
ctx = getattr(test_case, 'ctx', None)
if ctx is not None:
stack = setup_stack_with_mock(test_case, stack_name, ctx,
create_res, convergence)
setattr(test_case, 'stack', stack)
def delete_stack():
stack = getattr(test_case, 'stack', None)
if stack is not None and stack.id is not None:
clean_up_stack(test_case, stack, delete_res=create_res)
create_stack()
try:
test_fn(test_case, *args, **kwargs)
except Exception:
exc_class, exc_val, exc_tb = sys.exc_info()
try:
delete_stack()
finally:
six.reraise(exc_class, exc_val, exc_tb)
else:
delete_stack()
return wrapped_test
return stack_delete
class DummyThread(object):
def link(self, callback, *args):
pass
class DummyThreadGroup(object):
def __init__(self):
self.threads = []
def add_timer(self, interval, callback, initial_delay=None,
*args, **kwargs):
self.threads.append(callback)
<|fim▁hole|> def add_thread(self, callback, cnxt, trace, func, *args, **kwargs):
# callback here is _start_with_trace(); func is the 'real' callback
self.threads.append(func)
return DummyThread()
def stop(self, graceful=False):
pass
def wait(self):
pass
class DummyThreadGroupManager(object):
def __init__(self):
self.msg_queues = []
self.messages = []
def start(self, stack, func, *args, **kwargs):
# Just run the function, so we know it's completed in the test
func(*args, **kwargs)
return DummyThread()
def start_with_lock(self, cnxt, stack, engine_id, func, *args, **kwargs):
# Just run the function, so we know it's completed in the test
func(*args, **kwargs)
return DummyThread()
def start_with_acquired_lock(self, stack, lock, func, *args, **kwargs):
# Just run the function, so we know it's completed in the test
func(*args, **kwargs)
return DummyThread()
def send(self, stack_id, message):
self.messages.append(message)
def add_msg_queue(self, stack_id, msg_queue):
self.msg_queues.append(msg_queue)
def remove_msg_queue(self, gt, stack_id, msg_queue):
for q in self.msg_queues.pop(stack_id, []):
if q is not msg_queue:
self.add_event(stack_id, q)
class DummyThreadGroupMgrLogStart(DummyThreadGroupManager):
def __init__(self):
super(DummyThreadGroupMgrLogStart, self).__init__()
self.started = []
def start_with_lock(self, cnxt, stack, engine_id, func, *args, **kwargs):
self.started.append((stack.id, func))
return DummyThread()
def start_with_acquired_lock(self, stack, lock, func, *args, **kwargs):
self.started.append((stack.id, func))
return DummyThread()
def start(self, stack_id, func, *args, **kwargs):
# Here we only store the started task so it can be checked
self.started.append((stack_id, func))<|fim▁end|> | def stop_timers(self):
pass
|
<|file_name|>options.cc<|end_file_name|><|fim▁begin|>#include <mimosa/options/options.hh>
#include "options.hh"
namespace hefur
{
const uint32_t & MAX_PEERS = *mo::addOption<uint32_t>(
"", "max-peers", "the maximum number of peers per torrent",
30000);
const uint32_t & MAX_TORRENT_SIZE = *mo::addOption<uint32_t>(
"", "max-torrent-size", "the maximum torrent size, in MiB",
10);
const uint32_t & MAX_TORRENT_NAME = *mo::addOption<uint32_t>(
"", "max-torrent-name", "the maximum torrent name length to truncate to",
64);
const uint32_t & MAX_SCAN_DEPTH = *mo::addOption<uint32_t>(
"", "max-scan-depth", "the maximum depth while scanning torrent-dir",
64);
const uint32_t & MAX_SCAN_INODES = *mo::addOption<uint32_t>(
"", "max-scan-inodes", "the maximum number of inode to scan while scanning torrent-dir",
128 * 1024);
const uint32_t & ANNOUNCE_INTERVAL = *mo::addOption<uint32_t>(
"", "announce-interval", "the duration in minutes between two announces",
15);
const uint32_t & SCRAPE_INTERVAL = *mo::addOption<uint32_t>(
"", "scrape-interval", "the duration in minutes between two scrapes",
15);
const uint32_t & HTTP_TIMEOUT = *mo::addOption<uint32_t>(
"", "http-timeout", "the number of milliseconds to wait until timeout", 2000);
const uint16_t & HTTP_PORT = *mo::addOption<uint16_t>(
"", "http-port", "the port to use, 0 to disable", 6969);
const uint16_t & HTTPS_PORT = *mo::addOption<uint16_t>(
"", "https-port", "the port to use, 0 to disable", 6970);
const uint16_t & UDP_PORT = *mo::addOption<uint16_t>(
"", "udp-port", "the port to use, 0 to disable", 6969);
const bool & IPV6 = *mo::addSwitch(
"", "ipv6", "bind on ipv6 instead of ipv4");
const bool & ALLOW_PROXY = *mo::addSwitch(
"", "allow-proxy", "allow the peer to specify its address (so usage of proxies)");
const bool & DISABLE_PEERS_PAGE = *mo::addSwitch(
"", "disable-peers-page", "disable the HTTP page /peers, which list torrent's peers");
const bool & DISABLE_STAT_PAGE = *mo::addSwitch(
"", "disable-stat-page", "disable the HTTP page /stat, which list served torrents");
const bool & DISABLE_FILE_PAGE = *mo::addSwitch(
"", "disable-file-page", "disable the HTTP page /file, which serve .torrent files");
const std::string & CERT = *mo::addOption<std::string>(
"", "http-cert", "the path to the certificate", "");
const std::string & KEY = *mo::addOption<std::string>(
"", "http-key", "the path to the key", "");
const std::string & TORRENT_DIR = *mo::addOption<std::string>(
"", "torrent-dir",
"the directory containing the allowed torrents,"
" if empty every torrents are allowed", "");
const uint32_t & SCAN_INTERVAL = *mo::addOption<uint32_t>(
"", "scan-interval", "the duration in seconds between two torrent-dir scans",
60);
const char * VERSION_MSG = mo::addMessage(
"", "version", "display the software's version",
"hefurd " HEFUR_VERSION "\n"<|fim▁hole|> "Copyright (C) 2012 Alexandre Bique\n"
"License: MIT\n"
"This is free software: you are free to change and redistribute it.\n"
"There is NO WARRANTY, to the extent permitted by law.");
const std::string & WWW_DIR = *mo::addOption<std::string>(
"", "www-dir",
"the directory containing the web data files (html, css, img)",
"/usr/share/hefur/www");
const std::string & CONTROL_SOCKET = *mo::addOption<std::string>(
"", "control-socket",
"the path to the control socket",
"/var/run/hefur/control");
}<|fim▁end|> | |
<|file_name|>KerberosTypeConverter.java<|end_file_name|><|fim▁begin|>package com.sequenceiq.freeipa.entity.util;<|fim▁hole|>public class KerberosTypeConverter extends DefaultEnumConverter<KerberosType> {
@Override
public KerberosType getDefault() {
return KerberosType.FREEIPA;
}
}<|fim▁end|> |
import com.sequenceiq.cloudbreak.converter.DefaultEnumConverter;
import com.sequenceiq.freeipa.api.v1.kerberos.model.KerberosType;
|
<|file_name|>manage-journey.guard.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import { Router, CanActivate, CanActivateChild, ActivatedRouteSnapshot } from '@angular/router';
import { JourneyService } from '../journey.service';
@Injectable()
export class ManageJourneyGuard implements CanActivate {<|fim▁hole|> private journeyService: JourneyService,
private router: Router
) {}
canActivate(route: ActivatedRouteSnapshot) {
return this.journeyService.getJourneys()
.map(journeys => {
if (journeys.length) {
this.router.navigate(['/journeys', journeys[0].id]);
return false;
}
return true;
});
}
canActivateChild(route: ActivatedRouteSnapshot) {
return this.canActivate(route);
}
}<|fim▁end|> |
constructor( |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>"""
Scrapy settings for RecipesScraper project.
"""
# Names
BOT_NAME = 'RecipesScraper'
SPIDER_MODULES = ['RecipesScraper.spiders']
NEWSPIDER_MODULE = 'RecipesScraper.spiders'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Disable cookies (enabled by default)
COOKIES_ENABLED = False
# Configure item pipelines
ITEM_PIPELINES = {
'RecipesScraper.pipelines.JsonPipeline': 300,
}
# Enable and configure the AutoThrottle extension (disabled by default)
AUTOTHROTTLE_ENABLED = True
# The initial download delay
AUTOTHROTTLE_START_DELAY = 3
# The maximum download delay to be set in case of high latencies
AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
AUTOTHROTTLE_TARGET_CONCURRENCY = 2.0<|fim▁hole|><|fim▁end|> | # Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False |
<|file_name|>CWE762_Mismatched_Memory_Management_Routines__new_free_struct_54a.cpp<|end_file_name|><|fim▁begin|>/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE762_Mismatched_Memory_Management_Routines__new_free_struct_54a.cpp
Label Definition File: CWE762_Mismatched_Memory_Management_Routines__new_free.label.xml
Template File: sources-sinks-54a.tmpl.cpp
*/
/*
* @description
* CWE: 762 Mismatched Memory Management Routines
* BadSource: Allocate data using new
* GoodSource: Allocate data using malloc()
* Sinks:
* GoodSink: Deallocate data using delete
* BadSink : Deallocate data using free()
* Flow Variant: 54 Data flow: data passed as an argument from one function through three others to a fifth; all five functions are in different source files
*
* */
#include "std_testcase.h"
namespace CWE762_Mismatched_Memory_Management_Routines__new_free_struct_54
{
#ifndef OMITBAD
/* bad function declaration */
void badSink_b(twoIntsStruct * data);
void bad()
{
twoIntsStruct * data;
/* Initialize data*/
data = NULL;
/* POTENTIAL FLAW: Allocate memory with a function that requires delete to free the memory */
data = new twoIntsStruct;
badSink_b(data);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink_b(twoIntsStruct * data);
static void goodG2B()
{
twoIntsStruct * data;
/* Initialize data*/
data = NULL;
/* FIX: Allocate memory from the heap using malloc() */
data = (twoIntsStruct *)malloc(100*sizeof(twoIntsStruct));
if (data == NULL) {exit(-1);}
goodG2BSink_b(data);
}
/* goodB2G uses the BadSource with the GoodSink */
void goodB2GSink_b(twoIntsStruct * data);
static void goodB2G()
{
twoIntsStruct * data;
/* Initialize data*/
data = NULL;
/* POTENTIAL FLAW: Allocate memory with a function that requires delete to free the memory */
data = new twoIntsStruct;
goodB2GSink_b(data);
}
void good()
<|fim▁hole|> goodG2B();
goodB2G();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
using namespace CWE762_Mismatched_Memory_Management_Routines__new_free_struct_54; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif<|fim▁end|> | {
|
<|file_name|>generate_provider_logos_collage_image.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
# Script which generates a collage of provider logos from multiple provider
# logo files.
#
# It works in two steps:
#
# 1. Resize all the provider logo files (reduce the dimensions)
# 2. Assemble a final image from the resized images
import os
import sys
import argparse
import subprocess
import random
from os.path import join as pjoin
DIMENSIONS = '150x150' # Dimensions of the resized image (<width>x<height>)
GEOMETRY = '+4+4' # How to arrange images (+<rows>+<columns>)
TO_CREATE_DIRS = ['resized/', 'final/']
def setup(output_path):
"""
Create missing directories.
"""
for directory in TO_CREATE_DIRS:
final_path = pjoin(output_path, directory)
if not os.path.exists(final_path):
os.makedirs(final_path)
def get_logo_files(input_path):
logo_files = os.listdir(input_path)
logo_files = [name for name in logo_files if
'resized' not in name and name.endswith('png')]
logo_files = [pjoin(input_path, name) for name in logo_files]
return logo_files
def resize_images(logo_files, output_path):
resized_images = []
for logo_file in logo_files:
name, ext = os.path.splitext(os.path.basename(logo_file))
new_name = '%s%s' % (name, ext)
out_name = pjoin(output_path, 'resized/', new_name)
print('Resizing image: %(name)s' % {'name': logo_file})
values = {'name': logo_file, 'out_name': out_name,
'dimensions': DIMENSIONS}
cmd = 'convert %(name)s -resize %(dimensions)s %(out_name)s'
cmd = cmd % values
subprocess.call(cmd, shell=True)
resized_images.append(out_name)
return resized_images
def assemble_final_image(resized_images, output_path):
final_name = pjoin(output_path, 'final/logos.png')
random.shuffle(resized_images)
values = {'images': ' '.join(resized_images), 'geometry': GEOMETRY,
'out_name': final_name}
cmd = 'montage %(images)s -geometry %(geometry)s %(out_name)s'
cmd = cmd % values
print('Generating final image: %(name)s' % {'name': final_name})
subprocess.call(cmd, shell=True)
def main(input_path, output_path):
if not os.path.exists(input_path):
print('Path doesn\'t exist: %s' % (input_path))
sys.exit(2)
if not os.path.exists(output_path):
print('Path doesn\'t exist: %s' % (output_path))
sys.exit(2)
logo_files = get_logo_files(input_path=input_path)
setup(output_path=output_path)<|fim▁hole|>
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Assemble provider logos '
' in a single image')
parser.add_argument('--input-path', action='store',
help='Path to directory which contains provider '
'logo files')
parser.add_argument('--output-path', action='store',
help='Path where the new files will be written')
args = parser.parse_args()
input_path = os.path.abspath(args.input_path)
output_path = os.path.abspath(args.output_path)
main(input_path=input_path, output_path=output_path)<|fim▁end|> | resized_images = resize_images(logo_files=logo_files,
output_path=output_path)
assemble_final_image(resized_images=resized_images,
output_path=output_path) |
<|file_name|>catalog.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import { Headers, Http, Response } from '@angular/http';
import { Floss } from '../models/floss';
import { DB } from '../models/db';
@Injectable()
export class CatalogService {
private flossUrl = 'app/floss'; // URL to web api
constructor(private http: Http) { }
getCatalog(): Promise<Floss[]> {
return Promise.resolve(DB.db);
}
getFloss(id: number): Promise<Floss> {
return this.getCatalog()
.then(catalog => catalog.find(floss => floss.dmc === id));
}
deleteFloss(floss: Floss, group : string): void {
var my : Floss[] = this.load(group);
if(!my){
my = [];
}
let newArr : Floss[] = my.filter(value => value.dmc != floss.dmc);
console.log(newArr);
this.store(group, newArr);
}
addFlossTo(floss: Floss, group: string) : void{
var collection : Floss[] = this.load(group);
if(!collection){
collection = [];
}
let found : Floss = collection.find(value => value.dmc === floss.dmc);
if(found)
return;
console.log(collection);
collection.push(floss);
this.store(group, collection);
}
isFlossIn(floss: Floss, group: string) : boolean {
var collection : Floss[] = this.load(group);
if(!collection){
collection = [];
}
let found : Floss = collection.find(value => value.dmc === floss.dmc);
return found ? true : false;
}
store(name: string, data: any): void {
let localData: any = localStorage.getItem('sara');
if (localData) {
localData = JSON.parse(localData);
} else {
localData = {};
}<|fim▁hole|> }
load(name: string): any {
let data: any = JSON.parse(localStorage.getItem('sara'));
if (!data) {
return undefined;
}
if (name) {
if (data[name]) {
return data[name];
} else {
return undefined;
}
}
return data;
}
}<|fim▁end|> |
localData[name] = data;
localStorage.setItem('sara', JSON.stringify(localData)) |
<|file_name|>TradeboardFragment.java<|end_file_name|><|fim▁begin|>package com.hacks.collegebarter.fragments;
import android.app.Activity;
import android.app.Fragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.hacks.collegebarter.R;
import com.hacks.collegebarter.navdrawer.MainAppActivity;
public class TradeboardFragment extends Fragment{
public static final String ARG_SECTION_NUMBER = "section_number";
// Following constructor
public TradeboardFragment() {
Bundle bundle = new Bundle();<|fim▁hole|> this.setArguments(bundle);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.tradeboard_fragment, container,
false);
return rootView;
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
((MainAppActivity) activity).onSectionAttached(getArguments().getInt(
ARG_SECTION_NUMBER));
}
}<|fim▁end|> | bundle.putInt(ARG_SECTION_NUMBER, 0); |
<|file_name|>filter_xroar_trace.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding:utf-8
"""
Filter Xroar trace files.
see README for more information.
:created: 2014 by Jens Diemer - www.jensdiemer.de
:license: GNU GPL v3 or above, see LICENSE for more details.
"""
import os
import time
import sys
import argparse
class XroarTraceFilter(object):
def __init__(self, infile, outfile):
self.infile = infile
self.outfile = outfile
def load_tracefile(self, f):
sys.stderr.write(
"\nRead %s...\n\n" % f.name
)
addr_stat = {} # TODO: Use collections.Counter
next_update = time.time() + 0.5
line_no = 0 # e.g. empty file
for line_no, line in enumerate(f):
if time.time() > next_update:
sys.stderr.write(
"\rAnalyzed %i op calls..." % line_no
)
sys.stderr.flush()
next_update = time.time() + 0.5
addr = line[:4]
addr_stat.setdefault(addr, 0)
addr_stat[addr] += 1
f.seek(0) # if also used in self.filter()
sys.stderr.write(
"\rAnalyzed %i op calls, complete.\n" % line_no
)
sys.stderr.write(
"\nThe tracefile contains %i unique addresses.\n" % len(addr_stat)
)
return addr_stat
def unique(self):
sys.stderr.write(
"\nunique %s in %s...\n\n" % (self.infile.name, self.outfile.name)
)
unique_addr = set()
total_skiped_lines = 0
skip_count = 0
last_line_no = 0
next_update = time.time() + 1
stat_out = False
for line_no, line in enumerate(self.infile):
if time.time() > next_update:
self.outfile.flush()
if stat_out:
sys.stderr.write("\r")
else:
sys.stderr.write("\n")
sys.stderr.write(
"In %i lines (%i/sec.) are %i unique address calls..." % (
line_no, (line_no - last_line_no), len(unique_addr)
)
)
stat_out = True
sys.stderr.flush()
last_line_no = line_no
next_update = time.time() + 1
addr = line[:4]
if addr in unique_addr:
total_skiped_lines += 1
skip_count += 1
continue
unique_addr.add(addr)
if skip_count != 0:
if stat_out:
# Skip info should not in the same line after stat info
sys.stderr.write("\n")
self.outfile.write(
"... [Skip %i lines] ...\n" % skip_count
)
skip_count = 0
self.outfile.write(line)
stat_out = False
self.outfile.close()
sys.stderr.write(
"%i lines was filtered.\n" % total_skiped_lines
)
def display_addr_stat(self, addr_stat, display_max=None):
if display_max is None:
sys.stdout.write(
"\nList of all called addresses:\n"
)
else:
sys.stdout.write(
"List of the %i most called addresses:\n" % display_max
)
for no, data in enumerate(sorted(self.addr_stat.items(), key=lambda x: x[1], reverse=True)):
if display_max is not None and no >= display_max:
break
sys.stdout.write(
"\tAddress %s called %s times.\n" % data
)
def get_max_count_filter(self, addr_stat, max_count=10):
sys.stderr.write(
"Filter addresses with more than %i calls:\n" % max_count
)
addr_filter = {}
for addr, count in self.addr_stat.items():
if count >= max_count:
addr_filter[addr] = count
return addr_filter
def filter(self, addr_filter):
sys.stderr.write(
"Filter %i addresses.\n" % len(addr_filter)
)
total_skiped_lines = 0
skip_count = 0
last_line_no = 0
next_update = time.time() + 1
for line_no, line in enumerate(self.infile):
if time.time() > next_update:
sys.stderr.write(
"\rFilter %i lines (%i/sec.)..." % (
line_no, (line_no - last_line_no)
)
)
sys.stderr.flush()
last_line_no = line_no
next_update = time.time() + 1
addr = line[:4]
if addr in addr_filter:
total_skiped_lines += 1
skip_count += 1
continue
if skip_count != 0:
self.outfile.write(
"... [Skip %i lines] ...\n" % skip_count
)
skip_count = 0
self.outfile.write(line)
self.outfile.close()
sys.stderr.write(
"%i lines was filtered.\n" % total_skiped_lines
)
def start_stop(self, start_addr, stop_addr):
sys.stderr.write(
"\nFilter starts with $%x and ends with $%x from %s in %s...\n\n" % (
start_addr, stop_addr,
self.infile.name, self.outfile.name
)
)
all_addresses = set()
passed_addresses = set()
start_seperator = "\n ---- [ START $%x ] ---- \n" % start_addr
end_seperator = "\n ---- [ END $%x ] ---- \n" % stop_addr
last_line_no = 0
next_update = time.time() + 1
stat_out = False
in_area = False
for line_no, line in enumerate(self.infile):
try:
addr = int(line[:4], 16)
except ValueError:
continue
passed_addresses.add(addr)
if in_area:
self.outfile.write(line)
stat_out = False
if addr == stop_addr:
sys.stderr.flush()
self.outfile.flush()
sys.stderr.write(end_seperator)
self.outfile.write(end_seperator)
sys.stderr.flush()
self.outfile.flush()
in_area = False
continue
else:
if addr == start_addr:
sys.stderr.flush()
self.outfile.flush()
sys.stderr.write(start_seperator)
self.outfile.write(start_seperator)
in_area = True
self.outfile.write(line)
sys.stderr.flush()
self.outfile.flush()
stat_out = False
continue
if time.time() > next_update:
self.outfile.flush()
if stat_out:
sys.stderr.write("\r")
else:
sys.stderr.write("\n")
sys.stderr.write(
"process %i lines (%i/sec.), wait for $%x..." % (
line_no, (line_no - last_line_no), start_addr,
)
)
passed_addresses -= all_addresses
if passed_addresses:
all_addresses.update(passed_addresses)
passed_addresses = ",".join(["$%x" % i for i in passed_addresses])
sys.stderr.write(
"\nPassed unique addresses: %s\n" % passed_addresses
)
passed_addresses = set()
else:
stat_out = True
sys.stderr.flush()
last_line_no = line_no
next_update = time.time() + 1
self.outfile.close()
def main(args):
xt = XroarTraceFilter(args.infile, args.outfile)
if args.unique:
xt.unique()<|fim▁hole|> xt.start_stop(*args.start_stop)
return
if args.loop_filter:
addr_stat = xt.load_tracefile(args.loop_filter)
xt.filter(addr_filter=addr_stat)
if "display" in args:
addr_stat = xt.load_tracefile(args.infile)
xt.display_addr_stat(addr_stat,
display_max=args.display
)
if args.filter:
addr_stat = xt.load_tracefile(args.infile)
addr_filter = xt.get_max_count_filter(addr_stat,
max_count=args.filter
)
xt.filter(addr_filter)
def start_stop_value(arg):
start_raw, stop_raw = arg.split("-")
start = int(start_raw.strip("$ "), 16)
stop = int(stop_raw.strip("$ "), 16)
sys.stderr.write("Use: $%x-$%x" % (start, stop))
return (start, stop)
def get_cli_args():
parser = argparse.ArgumentParser(description="Filter Xroar traces")
parser.add_argument("infile", nargs="?",
type=argparse.FileType("r"),
default=sys.stdin,
help="Xroar trace file or stdin"
)
parser.add_argument("outfile", nargs="?",
type=argparse.FileType("w"),
default=sys.stdout,
help="If given: write output in a new file else: Display it."
)
parser.add_argument("--display", metavar="MAX",
type=int, default=argparse.SUPPRESS,
nargs="?",
help="Display statistics how often a address is called.",
)
parser.add_argument("--filter", metavar="MAX",
type=int,
nargs="?",
help="Filter the trace: skip addresses that called more than given count.",
)
parser.add_argument("--unique",
action="store_true",
help="Read infile and store in outfile only unique addresses.",
)
parser.add_argument("--loop-filter", metavar="FILENAME",
type=argparse.FileType("r"),
nargs="?",
help="Live Filter with given address file.",
)
parser.add_argument("--start-stop", metavar="START-STOP",
type=start_stop_value,
nargs="?",
help="Enable trace only from $START to $STOP e.g.: --area=$4000-$5000",
)
args = parser.parse_args()
return args
if __name__ == '__main__':
# sys.argv += ["--area=broken"]
# sys.argv += ["--area=1234-5678"]
args = get_cli_args()
main(args)<|fim▁end|> | return
if args.start_stop: |
<|file_name|>driver_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package opentsdb
import (
"fmt"
"net/url"
"testing"
"time"
opentsdb "github.com/bluebreezecf/opentsdb-goclient/client"
opentsdbcfg "github.com/bluebreezecf/opentsdb-goclient/config"
"github.com/stretchr/testify/assert"
"k8s.io/heapster/extpoints"
sink_api "k8s.io/heapster/sinks/api"
sink_util "k8s.io/heapster/sinks/util"
kube_api "k8s.io/kubernetes/pkg/api"
kube_api_unv "k8s.io/kubernetes/pkg/api/unversioned"
"k8s.io/kubernetes/pkg/types"
)
var (
fakeOpenTSDBHost = "192.168.1.8:823"
fakeNodeIp = "192.168.1.23"
fakePodName = "redis-test"
fakePodUid = "redis-test-uid"
fakeLabel = map[string]string{
"name": "redis",
"io.kubernetes.pod.name": "default/redis-test",
"pod_id": fakePodUid,
"pod_namespace": "default",
"pod_name": fakePodName,
"container_name": "redis",
"container_base_image": "kubernetes/redis:v1",
"namespace_id": "namespace-test-uid",
"host_id": fakeNodeIp,
}
errorPingFailed = fmt.Errorf("Failed to connect the target opentsdb.")
errorPutFailed = fmt.Errorf("The target opentsdb gets error and failed to store the datapoints.")
)
type fakeOpenTSDBClient struct {
successfulPing bool
successfulPut bool
receivedDataPoints []opentsdb.DataPoint
}
func (client *fakeOpenTSDBClient) Ping() error {
if client.successfulPing {
return nil
}
return errorPingFailed
}
func (client *fakeOpenTSDBClient) Put(datapoints []opentsdb.DataPoint, queryParam string) (*opentsdb.PutResponse, error) {
if !client.successfulPut {
return nil, errorPutFailed
}
client.receivedDataPoints = append(client.receivedDataPoints, datapoints...)
putRes := opentsdb.PutResponse{
StatusCode: 200,
Failed: 0,
Success: int64(len(datapoints)),
}
return &putRes, nil
}
type fakeOpenTSDBSink struct {
sink_api.ExternalSink
fakeClient *fakeOpenTSDBClient
}
func NewFakeOpenTSDBSink(successfulPing, successfulPut bool) fakeOpenTSDBSink {
client := &fakeOpenTSDBClient{
successfulPing: successfulPing,
successfulPut: successfulPut,
}
cfg := opentsdbcfg.OpenTSDBConfig{OpentsdbHost: fakeOpenTSDBHost}
return fakeOpenTSDBSink{
&openTSDBSink{
client: client,
config: cfg,
ci: sink_util.NewClientInitializer("test", func() error { return nil }, func() error { return nil }, time.Millisecond),
},
client,
}
}
func TestStoreTimeseriesNilInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.StoreTimeseries(nil)
assert.NoError(t, err)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreTimeseriesEmptyInput(t *testing.T) {<|fim▁hole|>}
func TestStoreTimeseriesWithPingFailed(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(false, true)
seriesList := generateFakeTimeseriesList()
err := fakeSink.StoreTimeseries(seriesList)
assert.Equal(t, err, errorPingFailed)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreTimeseriesWithPutFailed(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, false)
seriesList := generateFakeTimeseriesList()
err := fakeSink.StoreTimeseries(seriesList)
assert.Equal(t, err, errorPutFailed)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreTimeseriesSingleTimeserieInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
seriesName := "cpu/limit"
series := generateFakeTimeseries(seriesName, sink_api.MetricGauge, sink_api.UnitsCount, 1000)
//Without any labels
series.Point.Labels = map[string]string{}
seriesList := []sink_api.Timeseries{series}
err := fakeSink.StoreTimeseries(seriesList)
assert.NoError(t, err)
assert.Equal(t, 1, len(fakeSink.fakeClient.receivedDataPoints))
assert.Equal(t, "cpu_limit_gauge", fakeSink.fakeClient.receivedDataPoints[0].Metric)
//tsdbSink.secureTags() add a default tag key and value pair
assert.Equal(t, 1, len(fakeSink.fakeClient.receivedDataPoints[0].Tags))
assert.Equal(t, defaultTagValue, fakeSink.fakeClient.receivedDataPoints[0].Tags[defaultTagName])
}
func TestStoreTimeseriesMultipleTimeseriesInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
seriesList := generateFakeTimeseriesList()
err := fakeSink.StoreTimeseries(seriesList)
assert.NoError(t, err)
assert.Equal(t, len(seriesList), len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreEventsNilInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.StoreEvents(nil)
assert.NoError(t, err)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreEventsEmptyInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.StoreEvents([]kube_api.Event{})
assert.NoError(t, err)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreEventsWithPingFailed(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(false, true)
err := fakeSink.StoreEvents(generateFakeEvents())
assert.Equal(t, err, errorPingFailed)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreEventsWithPutFailed(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, false)
err := fakeSink.StoreEvents(generateFakeEvents())
assert.Equal(t, err, errorPutFailed)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreEventsSingleEventInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
eventTime := kube_api_unv.Unix(12345, 0)
eventSourceHostname := fakeNodeIp
eventReason := "created"
involvedObject := kube_api.ObjectReference{
Kind: "Pod",
Name: fakePodName,
UID: types.UID(fakePodUid),
Namespace: "default",
}
events := []kube_api.Event{
{
Reason: eventReason,
LastTimestamp: eventTime,
Source: kube_api.EventSource{
Host: fakeNodeIp,
},
InvolvedObject: involvedObject,
},
}
err := fakeSink.StoreEvents(events)
assert.NoError(t, err)
assert.Equal(t, 1, len(fakeSink.fakeClient.receivedDataPoints))
assert.Equal(t, eventMetricName, fakeSink.fakeClient.receivedDataPoints[0].Metric)
assert.Equal(t, 4, len(fakeSink.fakeClient.receivedDataPoints[0].Tags))
assert.Equal(t, eventTime.Time.Unix(), fakeSink.fakeClient.receivedDataPoints[0].Timestamp)
assert.Equal(t, fakePodUid, fakeSink.fakeClient.receivedDataPoints[0].Tags["pod_id"])
assert.Equal(t, eventSourceHostname, fakeSink.fakeClient.receivedDataPoints[0].Tags[sink_api.LabelHostname.Key])
assert.Contains(t, fakeSink.fakeClient.receivedDataPoints[0].Value, eventReason)
}
func TestStoreEventsMultipleEventsInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
event1Time := kube_api_unv.Unix(12345, 0)
event2Time := kube_api_unv.Unix(12366, 0)
event1SourceHostname := "event1HostName"
event2SourceHostname := "event2HostName"
event1Reason := "event1"
event2Reason := "event2"
events := []kube_api.Event{
{
Reason: event1Reason,
LastTimestamp: event1Time,
Source: kube_api.EventSource{
Host: event1SourceHostname,
},
},
{
Reason: event2Reason,
LastTimestamp: event2Time,
Source: kube_api.EventSource{
Host: event2SourceHostname,
},
},
}
err := fakeSink.StoreEvents(events)
assert.NoError(t, err)
assert.Equal(t, 2, len(fakeSink.fakeClient.receivedDataPoints))
assert.Equal(t, eventMetricName, fakeSink.fakeClient.receivedDataPoints[0].Metric)
assert.Equal(t, 2, len(fakeSink.fakeClient.receivedDataPoints[0].Tags))
assert.Equal(t, event1Time.Time.Unix(), fakeSink.fakeClient.receivedDataPoints[0].Timestamp)
assert.Equal(t, "", fakeSink.fakeClient.receivedDataPoints[0].Tags["pod_id"])
assert.Equal(t, event1SourceHostname, fakeSink.fakeClient.receivedDataPoints[0].Tags[sink_api.LabelHostname.Key])
assert.Contains(t, fakeSink.fakeClient.receivedDataPoints[0].Value, event1Reason)
assert.Equal(t, eventMetricName, fakeSink.fakeClient.receivedDataPoints[1].Metric)
assert.Equal(t, 2, len(fakeSink.fakeClient.receivedDataPoints[1].Tags))
assert.Equal(t, event2Time.Time.Unix(), fakeSink.fakeClient.receivedDataPoints[1].Timestamp)
assert.Equal(t, "", fakeSink.fakeClient.receivedDataPoints[1].Tags["pod_id"])
assert.Equal(t, event2SourceHostname, fakeSink.fakeClient.receivedDataPoints[1].Tags[sink_api.LabelHostname.Key])
assert.Contains(t, fakeSink.fakeClient.receivedDataPoints[1].Value, event2Reason)
}
func TestRegister(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.Register([]sink_api.MetricDescriptor{})
assert.NoError(t, err)
assert.Nil(t, err)
}
func TestUnregister(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.Unregister([]sink_api.MetricDescriptor{})
assert.NoError(t, err)
assert.Nil(t, err)
}
func TestName(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
name := fakeSink.Name()
assert.Equal(t, name, opentsdbSinkName)
}
func TestDebugInfo(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
debugInfo := fakeSink.DebugInfo()
assert.Contains(t, debugInfo, "Sink Type: OpenTSDB")
assert.Contains(t, debugInfo, "client: Host "+fakeOpenTSDBHost)
assert.Contains(t, debugInfo, "Number of write failures:")
}
func TestCreateOpenTSDBSinkWithEmptyInputs(t *testing.T) {
extSinks, err := CreateOpenTSDBSink(&url.URL{}, extpoints.HeapsterConf{})
assert.NoError(t, err)
assert.NotNil(t, extSinks)
assert.Equal(t, 1, len(extSinks))
tsdbSink, ok := extSinks[0].(*openTSDBSink)
assert.Equal(t, true, ok)
assert.Equal(t, defaultOpentsdbHost, tsdbSink.config.OpentsdbHost)
}
func TestCreateOpenTSDBSinkWithNoEmptyInputs(t *testing.T) {
fakeOpentsdbHost := "192.168.8.23:4242"
extSinks, err := CreateOpenTSDBSink(&url.URL{Host: fakeOpentsdbHost}, extpoints.HeapsterConf{})
assert.NoError(t, err)
assert.NotNil(t, extSinks)
assert.Equal(t, 1, len(extSinks))
tsdbSink, ok := extSinks[0].(*openTSDBSink)
assert.Equal(t, true, ok)
assert.Equal(t, fakeOpentsdbHost, tsdbSink.config.OpentsdbHost)
}
func generateFakeEvents() []kube_api.Event {
event1Time := kube_api_unv.Unix(12345, 0)
event2Time := kube_api_unv.Unix(12366, 0)
event1SourceHostname := "event1HostName"
event2SourceHostname := "event2HostName"
event1Reason := "event1"
event2Reason := "event2"
events := []kube_api.Event{
{
Reason: event1Reason,
LastTimestamp: event1Time,
Source: kube_api.EventSource{
Host: event1SourceHostname,
},
},
{
Reason: event2Reason,
LastTimestamp: event2Time,
Source: kube_api.EventSource{
Host: event2SourceHostname,
},
},
}
return events
}
func generateFakeTimeseriesList() []sink_api.Timeseries {
timeseriesList := make([]sink_api.Timeseries, 0)
series := generateFakeTimeseries("cpu/limit", sink_api.MetricGauge, sink_api.UnitsCount, 1000)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("cpu/usage", sink_api.MetricCumulative, sink_api.UnitsNanoseconds, 43363664)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("filesystem/limit", sink_api.MetricGauge, sink_api.UnitsBytes, 42241163264)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("filesystem/usage", sink_api.MetricGauge, sink_api.UnitsBytes, 32768)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("memory/limit", sink_api.MetricGauge, sink_api.UnitsBytes, -1)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("memory/usage", sink_api.MetricGauge, sink_api.UnitsBytes, 487424)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("memory/working_set", sink_api.MetricGauge, sink_api.UnitsBytes, 491520)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("uptime", sink_api.MetricCumulative, sink_api.UnitsMilliseconds, 910823)
timeseriesList = append(timeseriesList, series)
return timeseriesList
}
func generateFakeTimeseries(name string, metricType sink_api.MetricType, metricUnits sink_api.MetricUnitsType, value interface{}) sink_api.Timeseries {
end := time.Now()
start := end.Add(-10)
point := sink_api.Point{
Name: name,
Labels: fakeLabel,
Value: value,
Start: start,
End: end,
}
metricDesc := sink_api.MetricDescriptor{
Type: metricType,
Units: metricUnits,
}
series := sink_api.Timeseries{
Point: &point,
MetricDescriptor: &metricDesc,
}
return series
}<|fim▁end|> | fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.StoreTimeseries([]sink_api.Timeseries{})
assert.NoError(t, err)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints)) |
<|file_name|>test_featurecursor.py<|end_file_name|><|fim▁begin|>import pytest
from roam_tests import objects
from roam.infodock import FeatureCursor, NoFeature
layer = objects.newmemorylayer()
layer = objects.addfeaturestolayer(layer, 2)
features = layer.getFeatures()
featureone = next(features)
featuretwo = next(features)
@pytest.fixture
def cursor():
return FeatureCursor(layer=layer, features=[featureone, featuretwo])
def test_should_start_at_index_0(cursor):
assert cursor.index == 0
def test_next_should_move_index(cursor):
cursor.next()
assert cursor.index == 1
<|fim▁hole|> cursor.next()
assert cursor.index == 0
def test_back_should_wrap_to_end_when_on_first(cursor):
last = len(cursor.features) - 1
assert cursor.index == 0
cursor.back()
assert cursor.index == last
def test_should_return_feature_at_index(cursor):
assert cursor.feature.id() == featureone.id()
cursor.next()
assert cursor.feature.id() == featuretwo.id()
def test_should_raise_no_feature_on_invalid_index(cursor):
cursor.index = 99
with pytest.raises(NoFeature):
cursor.feature<|fim▁end|> | def test_next_should_wrap_to_start_when_on_last(cursor):
last = len(cursor.features) - 1
cursor.index = last
assert cursor.index == last |
<|file_name|>styles.js<|end_file_name|><|fim▁begin|>import styled from 'styled-components';
export const Container = styled.div`
width: 100%;
height: 100%;
`;
export const EditorContainer = styled.div`
${''/* padding: 30px 30px; */}
width: 100%;
box-sizing: border-box;
position: relative;
font-family: 'Proxima-Nova', 'helvetica', 'arial';
box-sizing: border-box;
font-size: 21px;
color: #131517;
font-weight: 300;
line-height: 1.54;
& h1 {
font-size: 48px;
font-weight: bold;
letter-spacing: -.024em;
line-height: 1.18;
margin-bottom: 20px;
color: #131517;
}
& h2 {
font-size: 28px;
font-weight: normal;
letter-spacing: -.008em;
line-height: 1.24;
margin-bottom: 20px;
color: #797C80;
}<|fim▁hole|> ${''/* list-style: none; */}
}
& ol {
padding-left: 24px;
${''/* list-style: none; */}
}
& li {
font-size: 21px;
line-height: 1,78;
}::selection {
background-color: #B1DFCB;
}
`;<|fim▁end|> | & ul {
padding-left: 24px; |
<|file_name|>pyunit_h2oH2OFrame_ascharacter.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.utils.typechecks import assert_is_type
from h2o.frame import H2OFrame
def h2o_H2OFrame_ascharacter():
"""
Python API test: h2o.frame.H2OFrame.ascharacter()
Copied from pyunit_ascharacter.py
"""
h2oframe = h2o.import_file(path=pyunit_utils.locate("smalldata/junit/cars.csv"))<|fim▁hole|> assert_is_type(newFrame, H2OFrame)
assert newFrame.isstring()[0], "h2o.H2OFrame.ascharacter() command is not working."
if __name__ == "__main__":
pyunit_utils.standalone_test(h2o_H2OFrame_ascharacter())
else:
h2o_H2OFrame_ascharacter()<|fim▁end|> | newFrame = h2oframe['cylinders'].ascharacter()
|
<|file_name|>create_bidder_mutation.ts<|end_file_name|><|fim▁begin|>import { GraphQLNonNull, GraphQLString } from "graphql"
import { mutationWithClientMutationId } from "graphql-relay"
import Bidder from "schema/v2/bidder"
import { ResolverContext } from "types/graphql"
export default mutationWithClientMutationId<any, any, ResolverContext>({
name: "CreateBidder",<|fim▁hole|> description: "Create a bidder",
inputFields: {
saleID: {
type: new GraphQLNonNull(GraphQLString),
},
} /*
FIXME: Generated by the snake_case to camelCase codemod.
Either use this to fix inputs and/or remove this comment.
{
const {
saleID
} = newFields;
const oldFields = {
saleID: sale_id
};
}
*/,
outputFields: {
bidder: {
type: Bidder.type,
resolve: (bidder) => bidder,
},
},
mutateAndGetPayload: ({ saleID: sale_id }, { createBidderLoader }) => {
if (!createBidderLoader) {
return new Error("You need to be signed in to perform this action")
}
return createBidderLoader({ sale_id })
},
})<|fim▁end|> | |
<|file_name|>20.d.ts<|end_file_name|><|fim▁begin|>import * as React from "react";
import { CarbonIconProps } from "../../../";
declare const WatsonHealthNominate20: React.ForwardRefExoticComponent<
CarbonIconProps & React.RefAttributes<SVGSVGElement><|fim▁hole|><|fim▁end|> | >;
export default WatsonHealthNominate20; |
<|file_name|>secondView.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import {Content} from "./content";
export class SecondView extends Content {
constructor() {
super("View #2", `
<div>
<p>This is view #2</p>
</div>
`);
}
}<|fim▁end|> | |
<|file_name|>venn-diagram.component.ts<|end_file_name|><|fim▁begin|>import { Component, Input, OnInit, NgZone } from '@angular/core';
// import model classes
import {Diagram} from '../models/diagram';
import {Topic} from '../models/topic';
@Component({
selector: 'venn-diagram',
templateUrl: './venn-diagram.component.html',
styleUrls: ['./venn-diagram.component.css']
})
export class VennDiagramComponent implements OnInit {
// Venn diagram data model instance
@Input()
diagram: Diagram = new Diagram('Data Science');
// Venn diagram svg
svg = null;
// diagram layouts: circles, ellipses, random
layout = 'circles'; // default
// svg view setttings
width = 540;
height = 540;
minSize = 300;
margin = {top: 30, right: 10, bottom: 10, left: 10};
radius = 100; // main circle radius
/**
* Creates new instance of Venn Diagram component.
* @param ngZone NgZone for scaling diagram svg on app window resize.
*/
constructor(private ngZone: NgZone) {
window.onresize = (windowResizeEvent) => {
ngZone.run( () => {
console.log(`VennDiagram::onWindowResize: w=${window.innerWidth} h=${window.innerHeight}`);
// TODO: scale svg to new window size
});
};
}
/**
* Initializes svg element reference, etc. on angular component init.
*/
ngOnInit() {
// get diagram svg reference
this.svg = document.querySelector('#svg');
}
/**
* Changes diagram segments shape and layout.
*
* @param layoutType Selected layout type: circles, ellipses, or random.
*/
changeLayout(layoutType: string) {
this.layout = layoutType;
this.updateSegments(this.diagram);
}
/**
* Updates actual Venn diagram segments SVG on diagram model changes.
*
* @param diagram Diagram model instance to use for SVG view update.
*/
updateSegments(diagram: Diagram) {
console.log(`VennDiagram::updateSegments: updating ${diagram.name} svg view`);
// loop through diagram topics and create svg view segments for display
for (let i = 0; i < diagram.topics.length; i++) {
// calculate segment element placement angle
// note: for semi-circle use (i/diagram.topics.length)
const angle: number = (i / (diagram.topics.length / 2)) * Math.PI - Math.PI / 2; // start at 90
// calculate x and y position of the segment element
const topic: Topic = diagram.topics[i];
topic.cx = (this.radius * Math.cos(angle)) + (this.width / 2);
topic.cy = (this.radius * Math.sin(angle)) + (this.width / 2);
// calculate segment text label position offset
topic.textX = ((this.radius + 70) * Math.cos(angle)) + (this.width / 2);
topic.textY = ((this.radius + 70) * Math.sin(angle)) + (this.width / 2);
// adjust segment radius for diagram intersection overlap
topic.rx = this.radius + 40;
switch (this.layout) {
case 'ellipses':
// adjust segment Y radius
topic.ry = this.radius - 40;
// rotate label/ellipse angle
topic.rotateAngle = (360 / diagram.topics.length * i - 90) % 180;
break;
case 'random':
// random placement, sizing, and rotation just for fun
topic.cx = this.radius/2 + (Math.random() * (this.width - this.radius - 40));
topic.cy = this.radius/2 + (Math.random() * (this.width - this.radius - 40));
topic.rx = this.radius - (Math.random() * 40);
topic.ry = this.radius - (Math.random() * (this.radius / 4 * 3));
topic.rotateAngle = (i % 4) * 45 - 90; // 0 to 180 in 45 degrees increments
topic.textX = topic.cx;
topic.textY = topic.cy;
break;
default: // circles
topic.ry = this.radius + 40;
topic.rotateAngle = 0;
break;
}
} // end of for loop
} // end of updateSegments(diagram)
/**
* Exports current diagram view SVG via blob data download link trick.
*/
exportSvg() {
console.log(`VennDiagram::exportSvg: ${this.diagram.name}-VennDiagram.svg`);
// get svg content
const svgData = this.svg.outerHTML;
// create svg blob
const svgBlob = new Blob([svgData], {type: 'image/svg+xml;charset=utf-8'});
<|fim▁hole|> // create svg blobl url
const svgBlobUrl = URL.createObjectURL(svgBlob);
// download svg
this.downloadBlob(svgBlobUrl, `${this.diagram.name}-VennDiagram.svg`);
}
/**
* Saves current diagram view image in png format
* via svg blob to canvas image export trick.
*/
saveAsPng() {
const fileName = `${this.diagram.name}-VennDiagram.png`;
console.log(`VennDiagram::saveAsPng: ${fileName}`);
console.log(this.svg.viewBox.baseVal); // .getBBox());
// create canvas for generating image data
const canvas = document.createElement('canvas');
const bbox = this.svg.viewBox.baseVal; // .getBBox();
canvas.width = bbox.width;
canvas.height = bbox.height;
const canvasContext = canvas.getContext('2d');
canvasContext.clearRect(0, 0, bbox.width, bbox.height);
// get svg content
const svgData = this.svg.outerHTML;
// create svg blob
const svgBlob = new Blob([svgData], {type: 'image/svg+xml;charset=utf-8'});
// create svg blobl url
const svgBlobUrl = URL.createObjectURL(svgBlob);
// create diagram image
const image = new Image();
// load and save diagram image
const download = this.downloadBlob;
image.onload = function () {
// draw loaded image on canvas
canvasContext.drawImage(image, 0, 0);
URL.revokeObjectURL(svgBlobUrl);
if (typeof navigator !== 'undefined' && navigator.msSaveOrOpenBlob) {
const imageBlob = canvas.msToBlob();
navigator.msSaveOrOpenBlob(imageBlob, fileName);
} else {
const imageDataUrl = canvas.toDataURL('image/png')
.replace('image/png', 'image/octet-stream');
download(imageDataUrl, fileName);
}
// document.removeChild(canvas);
};
// trigger svg image load
image.src = svgBlobUrl;
} // end of saveAsPng()
/*--------------- Private Methods -----------------------*/
/**
* Downloads a blob for svg export and save as png.
* @param blobUrl Blob data url.
* @param fileName File name for saving blob data.
*/
private downloadBlob(blobUrl, fileName) {
// create download link
const downloadLink = document.createElement('a');
downloadLink.href = blobUrl;
downloadLink.download = fileName;
document.body.appendChild(downloadLink);
// download blob data
downloadLink.click();
// remove download link
document.body.removeChild(downloadLink);
}
}<|fim▁end|> | |
<|file_name|>test_ovs_neutron_agent.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import sys
import mock
import netaddr
from oslo.config import cfg
import testtools
from neutron.agent.linux import async_process
from neutron.agent.linux import ip_lib
from neutron.agent.linux import ovs_lib
from neutron.agent.linux import utils
from neutron.common import constants as n_const
from neutron.openstack.common import log
from neutron.plugins.common import constants as p_const
from neutron.plugins.openvswitch.agent import ovs_neutron_agent
from neutron.plugins.openvswitch.common import constants
from neutron.tests import base
NOTIFIER = ('neutron.plugins.openvswitch.'
'ovs_neutron_plugin.AgentNotifierApi')
OVS_LINUX_KERN_VERS_WITHOUT_VXLAN = "3.12.0"
FAKE_MAC = '00:11:22:33:44:55'
FAKE_IP1 = '10.0.0.1'
FAKE_IP2 = '10.0.0.2'
class CreateAgentConfigMap(base.BaseTestCase):
def test_create_agent_config_map_succeeds(self):
self.assertTrue(ovs_neutron_agent.create_agent_config_map(cfg.CONF))
def test_create_agent_config_map_fails_for_invalid_tunnel_config(self):
# An ip address is required for tunneling but there is no default,
# verify this for both gre and vxlan tunnels.
cfg.CONF.set_override('tunnel_types', [p_const.TYPE_GRE],
group='AGENT')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
cfg.CONF.set_override('tunnel_types', [p_const.TYPE_VXLAN],
group='AGENT')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
def test_create_agent_config_map_enable_tunneling(self):
# Verify setting only enable_tunneling will default tunnel_type to GRE
cfg.CONF.set_override('tunnel_types', None, group='AGENT')
cfg.CONF.set_override('enable_tunneling', True, group='OVS')
cfg.CONF.set_override('local_ip', '10.10.10.10', group='OVS')
cfgmap = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
self.assertEqual(cfgmap['tunnel_types'], [p_const.TYPE_GRE])
def test_create_agent_config_map_fails_no_local_ip(self):
# An ip address is required for tunneling but there is no default
cfg.CONF.set_override('enable_tunneling', True, group='OVS')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
def test_create_agent_config_map_fails_for_invalid_tunnel_type(self):
cfg.CONF.set_override('tunnel_types', ['foobar'], group='AGENT')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
def test_create_agent_config_map_multiple_tunnel_types(self):
cfg.CONF.set_override('local_ip', '10.10.10.10', group='OVS')
cfg.CONF.set_override('tunnel_types', [p_const.TYPE_GRE,
p_const.TYPE_VXLAN], group='AGENT')
cfgmap = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
self.assertEqual(cfgmap['tunnel_types'],
[p_const.TYPE_GRE, p_const.TYPE_VXLAN])
def test_create_agent_config_map_enable_distributed_routing(self):
self.addCleanup(cfg.CONF.reset)
# Verify setting only enable_tunneling will default tunnel_type to GRE
cfg.CONF.set_override('enable_distributed_routing', True,
group='AGENT')
cfgmap = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
self.assertEqual(cfgmap['enable_distributed_routing'], True)
class TestOvsNeutronAgent(base.BaseTestCase):
def setUp(self):
super(TestOvsNeutronAgent, self).setUp()
notifier_p = mock.patch(NOTIFIER)
notifier_cls = notifier_p.start()
self.notifier = mock.Mock()<|fim▁hole|> 'neutron.agent.firewall.NoopFirewallDriver',
group='SECURITYGROUP')
kwargs = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
class MockFixedIntervalLoopingCall(object):
def __init__(self, f):
self.f = f
def start(self, interval=0):
self.f()
with contextlib.nested(
mock.patch('neutron.plugins.openvswitch.agent.ovs_neutron_agent.'
'OVSNeutronAgent.setup_integration_br',
return_value=mock.Mock()),
mock.patch('neutron.plugins.openvswitch.agent.ovs_neutron_agent.'
'OVSNeutronAgent.setup_ancillary_bridges',
return_value=[]),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'create'),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_secure_mode'),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'get_local_port_mac',
return_value='00:00:00:00:00:01'),
mock.patch('neutron.agent.linux.utils.get_interface_mac',
return_value='00:00:00:00:00:01'),
mock.patch('neutron.agent.linux.ovs_lib.'
'get_bridges'),
mock.patch('neutron.openstack.common.loopingcall.'
'FixedIntervalLoopingCall',
new=MockFixedIntervalLoopingCall)):
self.agent = ovs_neutron_agent.OVSNeutronAgent(**kwargs)
self.agent.tun_br = mock.Mock()
self.agent.sg_agent = mock.Mock()
def _mock_port_bound(self, ofport=None, new_local_vlan=None,
old_local_vlan=None):
port = mock.Mock()
port.ofport = ofport
net_uuid = 'my-net-uuid'
fixed_ips = [{'subnet_id': 'my-subnet-uuid',
'ip_address': '1.1.1.1'}]
if old_local_vlan is not None:
self.agent.local_vlan_map[net_uuid] = (
ovs_neutron_agent.LocalVLANMapping(
old_local_vlan, None, None, None))
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute', return_value=True),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val', return_value=str(old_local_vlan)),
mock.patch.object(self.agent.int_br, 'delete_flows')
) as (set_ovs_db_func, get_ovs_db_func, delete_flows_func):
self.agent.port_bound(port, net_uuid, 'local', None, None,
fixed_ips, "compute:None", False)
get_ovs_db_func.assert_called_once_with("Port", mock.ANY, "tag")
if new_local_vlan != old_local_vlan:
set_ovs_db_func.assert_called_once_with(
"Port", mock.ANY, "tag", str(new_local_vlan))
if ofport != -1:
delete_flows_func.assert_called_once_with(in_port=port.ofport)
else:
self.assertFalse(delete_flows_func.called)
else:
self.assertFalse(set_ovs_db_func.called)
self.assertFalse(delete_flows_func.called)
def _setup_for_dvr_test(self, ofport=10):
self._port = mock.Mock()
self._port.ofport = ofport
self._port.vif_id = "1234-5678-90"
self.agent.enable_distributed_routing = True
self.agent.enable_tunneling = True
self.agent.patch_tun_ofport = 1
self.agent.patch_int_ofport = 2
self.agent.dvr_agent.local_ports = {}
self.agent.local_vlan_map = {}
self.agent.dvr_agent.enable_distributed_routing = True
self.agent.dvr_agent.enable_tunneling = True
self.agent.dvr_agent.patch_tun_ofport = 1
self.agent.dvr_agent.patch_int_ofport = 2
self.agent.dvr_agent.tun_br = mock.Mock()
self.agent.dvr_agent.local_dvr_map = {}
self.agent.dvr_agent.registered_dvr_macs = set()
self.agent.dvr_agent.dvr_mac_address = 'aa:22:33:44:55:66'
self._net_uuid = 'my-net-uuid'
self._fixed_ips = [{'subnet_id': 'my-subnet-uuid',
'ip_address': '1.1.1.1'}]
self._compute_port = mock.Mock()
self._compute_port.ofport = 20
self._compute_port.vif_id = "1234-5678-91"
self._old_local_vlan = None
self._compute_fixed_ips = [{'subnet_id': 'my-subnet-uuid',
'ip_address': '1.1.1.3'}]
def test_port_bound_deletes_flows_for_valid_ofport(self):
self._mock_port_bound(ofport=1, new_local_vlan=1)
def test_port_bound_ignores_flows_for_invalid_ofport(self):
self._mock_port_bound(ofport=-1, new_local_vlan=1)
def test_port_bound_does_not_rewire_if_already_bound(self):
self._mock_port_bound(ofport=-1, new_local_vlan=1, old_local_vlan=1)
def test_port_bound_for_dvr_interface(self, ofport=10):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc, 'get_subnet_for_dvr',
return_value={'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_DVR_INTERFACE,
False)
self.assertTrue(add_flow_tun_fn.called)
self.assertTrue(delete_flows_int_fn.called)
def _test_port_bound_for_dvr(self, device_owner):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_subnet_for_dvr',
return_value={
'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_DVR_INTERFACE,
False)
self.agent.port_bound(self._compute_port, self._net_uuid,
'vxlan', None, None,
self._compute_fixed_ips,
device_owner, False)
self.assertTrue(add_flow_tun_fn.called)
self.assertTrue(add_flow_int_fn.called)
self.assertTrue(delete_flows_int_fn.called)
def test_port_bound_for_dvr_with_compute_ports(self):
self._test_port_bound_for_dvr(device_owner="compute:None")
def test_port_bound_for_dvr_with_lbaas_vip_ports(self):
self._test_port_bound_for_dvr(
device_owner=n_const.DEVICE_OWNER_LOADBALANCER)
def test_port_bound_for_dvr_with_csnat_ports(self, ofport=10):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc, 'get_subnet_for_dvr',
return_value={'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_ROUTER_SNAT,
False)
self.assertTrue(add_flow_int_fn.called)
self.assertTrue(delete_flows_int_fn.called)
def test_treat_devices_removed_for_dvr_interface(self, ofport=10):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc, 'get_subnet_for_dvr',
return_value={'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_DVR_INTERFACE,
False)
self.assertTrue(add_flow_tun_fn.called)
self.assertTrue(delete_flows_int_fn.called)
with contextlib.nested(
mock.patch.object(self.agent, 'reclaim_local_vlan'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down',
return_value=None),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br,
'delete_flows')) as (reclaim_vlan_fn,
update_dev_down_fn,
delete_flows_int_fn,
delete_flows_tun_fn):
self.agent.treat_devices_removed([self._port.vif_id])
self.assertTrue(delete_flows_int_fn.called)
self.assertTrue(delete_flows_tun_fn.called)
def _test_treat_devices_removed_for_dvr(self, device_owner):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc, 'get_subnet_for_dvr',
return_value={'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_DVR_INTERFACE,
False)
self.agent.port_bound(self._compute_port,
self._net_uuid, 'vxlan',
None, None,
self._compute_fixed_ips,
device_owner, False)
self.assertTrue(add_flow_tun_fn.called)
self.assertTrue(add_flow_int_fn.called)
self.assertTrue(delete_flows_int_fn.called)
with contextlib.nested(
mock.patch.object(self.agent, 'reclaim_local_vlan'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down',
return_value=None),
mock.patch.object(self.agent.dvr_agent.int_br,
'delete_flows')) as (reclaim_vlan_fn,
update_dev_down_fn,
delete_flows_int_fn):
self.agent.treat_devices_removed([self._compute_port.vif_id])
self.assertTrue(delete_flows_int_fn.called)
def test_treat_devices_removed_for_dvr_with_compute_ports(self):
self._test_treat_devices_removed_for_dvr(device_owner="compute:None")
def test_treat_devices_removed_for_dvr_with_lbaas_vip_ports(self):
self._test_treat_devices_removed_for_dvr(
device_owner=n_const.DEVICE_OWNER_LOADBALANCER)
def test_treat_devices_removed_for_dvr_csnat_port(self, ofport=10):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc, 'get_subnet_for_dvr',
return_value={'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_ROUTER_SNAT,
False)
self.assertTrue(add_flow_int_fn.called)
self.assertTrue(delete_flows_int_fn.called)
with contextlib.nested(
mock.patch.object(self.agent, 'reclaim_local_vlan'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down',
return_value=None),
mock.patch.object(self.agent.dvr_agent.int_br,
'delete_flows')) as (reclaim_vlan_fn,
update_dev_down_fn,
delete_flows_int_fn):
self.agent.treat_devices_removed([self._port.vif_id])
self.assertTrue(delete_flows_int_fn.called)
def test_setup_dvr_flows_on_int_br(self):
self._setup_for_dvr_test()
with contextlib.nested(
mock.patch.object(
self.agent.dvr_agent.plugin_rpc,
'get_dvr_mac_address_by_host',
return_value={'host': 'cn1',
'mac_address': 'aa:bb:cc:dd:ee:ff'}),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br,
'remove_all_flows'),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc,
'get_dvr_mac_address_list',
return_value=[{'host': 'cn1',
'mac_address': 'aa:bb:cc:dd:ee:ff'},
{'host': 'cn2',
'mac_address': '11:22:33:44:55:66'}])) as \
(get_subnet_fn, get_cphost_fn, get_vif_fn,
add_flow_fn, delete_flows_fn):
self.agent.dvr_agent.setup_dvr_flows_on_integ_tun_br()
def _test_port_dead(self, cur_tag=None):
port = mock.Mock()
port.ofport = 1
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute', return_value=True),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val', return_value=cur_tag),
mock.patch.object(self.agent.int_br, 'add_flow')
) as (set_ovs_db_func, get_ovs_db_func, add_flow_func):
self.agent.port_dead(port)
get_ovs_db_func.assert_called_once_with("Port", mock.ANY, "tag")
if cur_tag == ovs_neutron_agent.DEAD_VLAN_TAG:
self.assertFalse(set_ovs_db_func.called)
self.assertFalse(add_flow_func.called)
else:
set_ovs_db_func.assert_called_once_with(
"Port", mock.ANY, "tag", str(ovs_neutron_agent.DEAD_VLAN_TAG))
add_flow_func.assert_called_once_with(
priority=2, in_port=port.ofport, actions="drop")
def test_port_dead(self):
self._test_port_dead()
def test_port_dead_with_port_already_dead(self):
self._test_port_dead(ovs_neutron_agent.DEAD_VLAN_TAG)
def mock_scan_ports(self, vif_port_set=None, registered_ports=None,
updated_ports=None, port_tags_dict=None):
if port_tags_dict is None: # Because empty dicts evaluate as False.
port_tags_dict = {}
with contextlib.nested(
mock.patch.object(self.agent.int_br, 'get_vif_port_set',
return_value=vif_port_set),
mock.patch.object(self.agent.int_br, 'get_port_tag_dict',
return_value=port_tags_dict)
):
return self.agent.scan_ports(registered_ports, updated_ports)
def test_scan_ports_returns_current_only_for_unchanged_ports(self):
vif_port_set = set([1, 3])
registered_ports = set([1, 3])
expected = {'current': vif_port_set}
actual = self.mock_scan_ports(vif_port_set, registered_ports)
self.assertEqual(expected, actual)
def test_scan_ports_returns_port_changes(self):
vif_port_set = set([1, 3])
registered_ports = set([1, 2])
expected = dict(current=vif_port_set, added=set([3]), removed=set([2]))
actual = self.mock_scan_ports(vif_port_set, registered_ports)
self.assertEqual(expected, actual)
def _test_scan_ports_with_updated_ports(self, updated_ports):
vif_port_set = set([1, 3, 4])
registered_ports = set([1, 2, 4])
expected = dict(current=vif_port_set, added=set([3]),
removed=set([2]), updated=set([4]))
actual = self.mock_scan_ports(vif_port_set, registered_ports,
updated_ports)
self.assertEqual(expected, actual)
def test_scan_ports_finds_known_updated_ports(self):
self._test_scan_ports_with_updated_ports(set([4]))
def test_scan_ports_ignores_unknown_updated_ports(self):
# the port '5' was not seen on current ports. Hence it has either
# never been wired or already removed and should be ignored
self._test_scan_ports_with_updated_ports(set([4, 5]))
def test_scan_ports_ignores_updated_port_if_removed(self):
vif_port_set = set([1, 3])
registered_ports = set([1, 2])
updated_ports = set([1, 2])
expected = dict(current=vif_port_set, added=set([3]),
removed=set([2]), updated=set([1]))
actual = self.mock_scan_ports(vif_port_set, registered_ports,
updated_ports)
self.assertEqual(expected, actual)
def test_scan_ports_no_vif_changes_returns_updated_port_only(self):
vif_port_set = set([1, 2, 3])
registered_ports = set([1, 2, 3])
updated_ports = set([2])
expected = dict(current=vif_port_set, updated=set([2]))
actual = self.mock_scan_ports(vif_port_set, registered_ports,
updated_ports)
self.assertEqual(expected, actual)
def test_update_ports_returns_changed_vlan(self):
br = ovs_lib.OVSBridge('br-int', 'sudo')
mac = "ca:fe:de:ad:be:ef"
port = ovs_lib.VifPort(1, 1, 1, mac, br)
lvm = ovs_neutron_agent.LocalVLANMapping(
1, '1', None, 1, {port.vif_id: port})
local_vlan_map = {'1': lvm}
vif_port_set = set([1, 3])
registered_ports = set([1, 2])
port_tags_dict = {1: []}
expected = dict(
added=set([3]), current=vif_port_set,
removed=set([2]), updated=set([1])
)
with mock.patch.dict(self.agent.local_vlan_map, local_vlan_map):
actual = self.mock_scan_ports(
vif_port_set, registered_ports, port_tags_dict=port_tags_dict)
self.assertEqual(expected, actual)
def test_treat_devices_added_returns_raises_for_missing_device(self):
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc,
'get_devices_details_list',
side_effect=Exception()),
mock.patch.object(self.agent.int_br, 'get_vif_port_by_id',
return_value=mock.Mock())):
self.assertRaises(
ovs_neutron_agent.DeviceListRetrievalError,
self.agent.treat_devices_added_or_updated, [{}], False)
def _mock_treat_devices_added_updated(self, details, port, func_name):
"""Mock treat devices added or updated.
:param details: the details to return for the device
:param port: the port that get_vif_port_by_id should return
:param func_name: the function that should be called
:returns: whether the named function was called
"""
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc,
'get_devices_details_list',
return_value=[details]),
mock.patch.object(self.agent.int_br, 'get_vif_port_by_id',
return_value=port),
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down'),
mock.patch.object(self.agent, func_name)
) as (get_dev_fn, get_vif_func, upd_dev_up, upd_dev_down, func):
skip_devs = self.agent.treat_devices_added_or_updated([{}], False)
# The function should not raise
self.assertFalse(skip_devs)
return func.called
def test_treat_devices_added_updated_ignores_invalid_ofport(self):
port = mock.Mock()
port.ofport = -1
self.assertFalse(self._mock_treat_devices_added_updated(
mock.MagicMock(), port, 'port_dead'))
def test_treat_devices_added_updated_marks_unknown_port_as_dead(self):
port = mock.Mock()
port.ofport = 1
self.assertTrue(self._mock_treat_devices_added_updated(
mock.MagicMock(), port, 'port_dead'))
def test_treat_devices_added_does_not_process_missing_port(self):
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc, 'get_device_details'),
mock.patch.object(self.agent.int_br, 'get_vif_port_by_id',
return_value=None)
) as (get_dev_fn, get_vif_func):
self.assertFalse(get_dev_fn.called)
def test_treat_devices_added_updated_updates_known_port(self):
details = mock.MagicMock()
details.__contains__.side_effect = lambda x: True
self.assertTrue(self._mock_treat_devices_added_updated(
details, mock.Mock(), 'treat_vif_port'))
def test_treat_devices_added_updated_skips_if_port_not_found(self):
dev_mock = mock.MagicMock()
dev_mock.__getitem__.return_value = 'the_skipped_one'
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc,
'get_devices_details_list',
return_value=[dev_mock]),
mock.patch.object(self.agent.int_br, 'get_vif_port_by_id',
return_value=None),
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down'),
mock.patch.object(self.agent, 'treat_vif_port')
) as (get_dev_fn, get_vif_func, upd_dev_up,
upd_dev_down, treat_vif_port):
skip_devs = self.agent.treat_devices_added_or_updated([{}], False)
# The function should return False for resync and no device
# processed
self.assertEqual(['the_skipped_one'], skip_devs)
self.assertFalse(treat_vif_port.called)
self.assertFalse(upd_dev_down.called)
self.assertFalse(upd_dev_up.called)
def test_treat_devices_added_updated_put_port_down(self):
fake_details_dict = {'admin_state_up': False,
'port_id': 'xxx',
'device': 'xxx',
'network_id': 'yyy',
'physical_network': 'foo',
'segmentation_id': 'bar',
'network_type': 'baz',
'fixed_ips': [{'subnet_id': 'my-subnet-uuid',
'ip_address': '1.1.1.1'}],
'device_owner': 'compute:None'
}
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc,
'get_devices_details_list',
return_value=[fake_details_dict]),
mock.patch.object(self.agent.int_br, 'get_vif_port_by_id',
return_value=mock.MagicMock()),
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down'),
mock.patch.object(self.agent, 'treat_vif_port')
) as (get_dev_fn, get_vif_func, upd_dev_up,
upd_dev_down, treat_vif_port):
skip_devs = self.agent.treat_devices_added_or_updated([{}], False)
# The function should return False for resync
self.assertFalse(skip_devs)
self.assertTrue(treat_vif_port.called)
self.assertTrue(upd_dev_down.called)
def test_treat_devices_removed_returns_true_for_missing_device(self):
with mock.patch.object(self.agent.plugin_rpc, 'update_device_down',
side_effect=Exception()):
self.assertTrue(self.agent.treat_devices_removed([{}]))
def _mock_treat_devices_removed(self, port_exists):
details = dict(exists=port_exists)
with mock.patch.object(self.agent.plugin_rpc, 'update_device_down',
return_value=details):
with mock.patch.object(self.agent, 'port_unbound') as port_unbound:
self.assertFalse(self.agent.treat_devices_removed([{}]))
self.assertTrue(port_unbound.called)
def test_treat_devices_removed_unbinds_port(self):
self._mock_treat_devices_removed(True)
def test_treat_devices_removed_ignores_missing_port(self):
self._mock_treat_devices_removed(False)
def _test_process_network_ports(self, port_info):
with contextlib.nested(
mock.patch.object(self.agent.sg_agent, "setup_port_filters"),
mock.patch.object(self.agent, "treat_devices_added_or_updated",
return_value=[]),
mock.patch.object(self.agent, "treat_devices_removed",
return_value=False)
) as (setup_port_filters, device_added_updated, device_removed):
self.assertFalse(self.agent.process_network_ports(port_info,
False))
setup_port_filters.assert_called_once_with(
port_info['added'], port_info.get('updated', set()))
device_added_updated.assert_called_once_with(
port_info['added'] | port_info.get('updated', set()), False)
device_removed.assert_called_once_with(port_info['removed'])
def test_process_network_ports(self):
self._test_process_network_ports(
{'current': set(['tap0']),
'removed': set(['eth0']),
'added': set(['eth1'])})
def test_process_network_port_with_updated_ports(self):
self._test_process_network_ports(
{'current': set(['tap0', 'tap1']),
'updated': set(['tap1', 'eth1']),
'removed': set(['eth0']),
'added': set(['eth1'])})
def test_report_state(self):
with mock.patch.object(self.agent.state_rpc,
"report_state") as report_st:
self.agent.int_br_device_count = 5
self.agent._report_state()
report_st.assert_called_with(self.agent.context,
self.agent.agent_state)
self.assertNotIn("start_flag", self.agent.agent_state)
self.assertEqual(
self.agent.agent_state["configurations"]["devices"],
self.agent.int_br_device_count
)
def test_network_delete(self):
with contextlib.nested(
mock.patch.object(self.agent, "reclaim_local_vlan"),
mock.patch.object(self.agent.tun_br, "cleanup_tunnel_port")
) as (recl_fn, clean_tun_fn):
self.agent.network_delete("unused_context",
network_id="123")
self.assertFalse(recl_fn.called)
self.agent.local_vlan_map["123"] = "LVM object"
self.agent.network_delete("unused_context",
network_id="123")
self.assertFalse(clean_tun_fn.called)
recl_fn.assert_called_with("123")
def test_port_update(self):
port = {"id": "123",
"network_id": "124",
"admin_state_up": False}
self.agent.port_update("unused_context",
port=port,
network_type="vlan",
segmentation_id="1",
physical_network="physnet")
self.assertEqual(set(['123']), self.agent.updated_ports)
def test_setup_physical_bridges(self):
with contextlib.nested(
mock.patch.object(ip_lib, "device_exists"),
mock.patch.object(sys, "exit"),
mock.patch.object(utils, "execute"),
mock.patch.object(ovs_lib.OVSBridge, "remove_all_flows"),
mock.patch.object(ovs_lib.OVSBridge, "add_flow"),
mock.patch.object(ovs_lib.OVSBridge, "add_patch_port"),
mock.patch.object(ovs_lib.OVSBridge, "delete_port"),
mock.patch.object(ovs_lib.OVSBridge, "set_db_attribute"),
mock.patch.object(self.agent.int_br, "add_flow"),
mock.patch.object(self.agent.int_br, "add_patch_port"),
mock.patch.object(self.agent.int_br, "delete_port"),
mock.patch.object(self.agent.int_br, "set_db_attribute"),
) as (devex_fn, sysexit_fn, utilsexec_fn, remflows_fn, ovs_add_flow_fn,
ovs_addpatch_port_fn, ovs_delport_fn, ovs_set_attr_fn,
br_add_flow_fn, br_addpatch_port_fn, br_delport_fn,
br_set_attr_fn):
devex_fn.return_value = True
parent = mock.MagicMock()
parent.attach_mock(ovs_addpatch_port_fn, 'phy_add_patch_port')
parent.attach_mock(ovs_add_flow_fn, 'phy_add_flow')
parent.attach_mock(ovs_set_attr_fn, 'phy_set_attr')
parent.attach_mock(br_addpatch_port_fn, 'int_add_patch_port')
parent.attach_mock(br_add_flow_fn, 'int_add_flow')
parent.attach_mock(br_set_attr_fn, 'int_set_attr')
ovs_addpatch_port_fn.return_value = "phy_ofport"
br_addpatch_port_fn.return_value = "int_ofport"
self.agent.setup_physical_bridges({"physnet1": "br-eth"})
expected_calls = [
mock.call.phy_add_flow(priority=1, actions='normal'),
mock.call.int_add_patch_port('int-br-eth',
constants.NONEXISTENT_PEER),
mock.call.phy_add_patch_port('phy-br-eth',
constants.NONEXISTENT_PEER),
mock.call.int_add_flow(priority=2, in_port='int_ofport',
actions='drop'),
mock.call.phy_add_flow(priority=2, in_port='phy_ofport',
actions='drop'),
mock.call.int_set_attr('Interface', 'int-br-eth',
'options:peer', 'phy-br-eth'),
mock.call.phy_set_attr('Interface', 'phy-br-eth',
'options:peer', 'int-br-eth'),
]
parent.assert_has_calls(expected_calls)
self.assertEqual(self.agent.int_ofports["physnet1"],
"int_ofport")
self.assertEqual(self.agent.phys_ofports["physnet1"],
"phy_ofport")
def test_setup_physical_bridges_using_veth_interconnection(self):
self.agent.use_veth_interconnection = True
with contextlib.nested(
mock.patch.object(ip_lib, "device_exists"),
mock.patch.object(sys, "exit"),
mock.patch.object(utils, "execute"),
mock.patch.object(ovs_lib.OVSBridge, "remove_all_flows"),
mock.patch.object(ovs_lib.OVSBridge, "add_flow"),
mock.patch.object(ovs_lib.OVSBridge, "add_port"),
mock.patch.object(ovs_lib.OVSBridge, "delete_port"),
mock.patch.object(self.agent.int_br, "add_port"),
mock.patch.object(self.agent.int_br, "delete_port"),
mock.patch.object(ip_lib.IPWrapper, "add_veth"),
mock.patch.object(ip_lib.IpLinkCommand, "delete"),
mock.patch.object(ip_lib.IpLinkCommand, "set_up"),
mock.patch.object(ip_lib.IpLinkCommand, "set_mtu"),
mock.patch.object(ovs_lib, "get_bridges")
) as (devex_fn, sysexit_fn, utilsexec_fn, remflows_fn, ovs_addfl_fn,
ovs_addport_fn, ovs_delport_fn, br_addport_fn, br_delport_fn,
addveth_fn, linkdel_fn, linkset_fn, linkmtu_fn, get_br_fn):
devex_fn.return_value = True
parent = mock.MagicMock()
parent.attach_mock(utilsexec_fn, 'utils_execute')
parent.attach_mock(linkdel_fn, 'link_delete')
parent.attach_mock(addveth_fn, 'add_veth')
addveth_fn.return_value = (ip_lib.IPDevice("int-br-eth1"),
ip_lib.IPDevice("phy-br-eth1"))
ovs_addport_fn.return_value = "int_ofport"
br_addport_fn.return_value = "phys_veth"
get_br_fn.return_value = ["br-eth"]
self.agent.setup_physical_bridges({"physnet1": "br-eth"})
expected_calls = [mock.call.link_delete(),
mock.call.utils_execute(['/sbin/udevadm',
'settle',
'--timeout=10']),
mock.call.add_veth('int-br-eth',
'phy-br-eth')]
parent.assert_has_calls(expected_calls, any_order=False)
self.assertEqual(self.agent.int_ofports["physnet1"],
"phys_veth")
self.assertEqual(self.agent.phys_ofports["physnet1"],
"int_ofport")
def test_get_peer_name(self):
bridge1 = "A_REALLY_LONG_BRIDGE_NAME1"
bridge2 = "A_REALLY_LONG_BRIDGE_NAME2"
self.agent.use_veth_interconnection = True
self.assertEqual(len(self.agent.get_peer_name('int-', bridge1)),
n_const.DEVICE_NAME_MAX_LEN)
self.assertEqual(len(self.agent.get_peer_name('int-', bridge2)),
n_const.DEVICE_NAME_MAX_LEN)
self.assertNotEqual(self.agent.get_peer_name('int-', bridge1),
self.agent.get_peer_name('int-', bridge2))
def test_setup_tunnel_br(self):
self.tun_br = mock.Mock()
with contextlib.nested(
mock.patch.object(self.agent.int_br, "add_patch_port",
return_value=1),
mock.patch.object(self.agent.tun_br, "add_patch_port",
return_value=2),
mock.patch.object(self.agent.tun_br, "remove_all_flows"),
mock.patch.object(self.agent.tun_br, "add_flow"),
mock.patch.object(ovs_lib, "OVSBridge"),
mock.patch.object(self.agent.tun_br, "reset_bridge"),
mock.patch.object(sys, "exit")
) as (intbr_patch_fn, tunbr_patch_fn, remove_all_fn,
add_flow_fn, ovs_br_fn, reset_br_fn, exit_fn):
self.agent.setup_tunnel_br(None)
self.assertTrue(intbr_patch_fn.called)
def test_setup_tunnel_port(self):
self.agent.tun_br = mock.Mock()
self.agent.l2_pop = False
self.agent.udp_vxlan_port = 8472
self.agent.tun_br_ofports['vxlan'] = {}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, "add_tunnel_port",
return_value='6'),
mock.patch.object(self.agent.tun_br, "add_flow")
) as (add_tun_port_fn, add_flow_fn):
self.agent._setup_tunnel_port(self.agent.tun_br, 'portname',
'1.2.3.4', 'vxlan')
self.assertTrue(add_tun_port_fn.called)
def test_port_unbound(self):
with mock.patch.object(self.agent, "reclaim_local_vlan") as reclvl_fn:
self.agent.enable_tunneling = True
lvm = mock.Mock()
lvm.network_type = "gre"
lvm.vif_ports = {"vif1": mock.Mock()}
self.agent.local_vlan_map["netuid12345"] = lvm
self.agent.port_unbound("vif1", "netuid12345")
self.assertTrue(reclvl_fn.called)
reclvl_fn.called = False
lvm.vif_ports = {}
self.agent.port_unbound("vif1", "netuid12345")
self.assertEqual(reclvl_fn.call_count, 2)
lvm.vif_ports = {"vif1": mock.Mock()}
self.agent.port_unbound("vif3", "netuid12345")
self.assertEqual(reclvl_fn.call_count, 2)
def _prepare_l2_pop_ofports(self):
lvm1 = mock.Mock()
lvm1.network_type = 'gre'
lvm1.vlan = 'vlan1'
lvm1.segmentation_id = 'seg1'
lvm1.tun_ofports = set(['1'])
lvm2 = mock.Mock()
lvm2.network_type = 'gre'
lvm2.vlan = 'vlan2'
lvm2.segmentation_id = 'seg2'
lvm2.tun_ofports = set(['1', '2'])
self.agent.local_vlan_map = {'net1': lvm1, 'net2': lvm2}
self.agent.tun_br_ofports = {'gre':
{'1.1.1.1': '1', '2.2.2.2': '2'}}
self.agent.arp_responder_enabled = True
def test_fdb_ignore_network(self):
self._prepare_l2_pop_ofports()
fdb_entry = {'net3': {}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.tun_br, 'delete_flows'),
mock.patch.object(self.agent, '_setup_tunnel_port'),
mock.patch.object(self.agent, 'cleanup_tunnel_port')
) as (add_flow_fn, del_flow_fn, add_tun_fn, clean_tun_fn):
self.agent.fdb_add(None, fdb_entry)
self.assertFalse(add_flow_fn.called)
self.assertFalse(add_tun_fn.called)
self.agent.fdb_remove(None, fdb_entry)
self.assertFalse(del_flow_fn.called)
self.assertFalse(clean_tun_fn.called)
def test_fdb_ignore_self(self):
self._prepare_l2_pop_ofports()
self.agent.local_ip = 'agent_ip'
fdb_entry = {'net2':
{'network_type': 'gre',
'segment_id': 'tun2',
'ports':
{'agent_ip':
[[FAKE_MAC, FAKE_IP1],
n_const.FLOODING_ENTRY]}}}
with mock.patch.object(self.agent.tun_br,
"deferred") as defer_fn:
self.agent.fdb_add(None, fdb_entry)
self.assertFalse(defer_fn.called)
self.agent.fdb_remove(None, fdb_entry)
self.assertFalse(defer_fn.called)
def test_fdb_add_flows(self):
self._prepare_l2_pop_ofports()
fdb_entry = {'net1':
{'network_type': 'gre',
'segment_id': 'tun1',
'ports':
{'2.2.2.2':
[[FAKE_MAC, FAKE_IP1],
n_const.FLOODING_ENTRY]}}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'deferred'),
mock.patch.object(self.agent.tun_br, 'do_action_flows'),
mock.patch.object(self.agent, '_setup_tunnel_port'),
) as (deferred_fn, do_action_flows_fn, add_tun_fn):
deferred_fn.return_value = ovs_lib.DeferredOVSBridge(
self.agent.tun_br)
self.agent.fdb_add(None, fdb_entry)
self.assertFalse(add_tun_fn.called)
actions = (constants.ARP_RESPONDER_ACTIONS %
{'mac': netaddr.EUI(FAKE_MAC, dialect=netaddr.mac_unix),
'ip': netaddr.IPAddress(FAKE_IP1)})
expected_calls = [
mock.call('add', [dict(table=constants.ARP_RESPONDER,
priority=1,
proto='arp',
dl_vlan='vlan1',
nw_dst=FAKE_IP1,
actions=actions),
dict(table=constants.UCAST_TO_TUN,
priority=2,
dl_vlan='vlan1',
dl_dst=FAKE_MAC,
actions='strip_vlan,'
'set_tunnel:seg1,output:2')]),
mock.call('mod', [dict(table=constants.FLOOD_TO_TUN,
dl_vlan='vlan1',
actions='strip_vlan,'
'set_tunnel:seg1,output:1,2')]),
]
do_action_flows_fn.assert_has_calls(expected_calls)
def test_fdb_del_flows(self):
self._prepare_l2_pop_ofports()
fdb_entry = {'net2':
{'network_type': 'gre',
'segment_id': 'tun2',
'ports':
{'2.2.2.2':
[[FAKE_MAC, FAKE_IP1],
n_const.FLOODING_ENTRY]}}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'deferred'),
mock.patch.object(self.agent.tun_br, 'do_action_flows'),
) as (deferred_fn, do_action_flows_fn):
deferred_fn.return_value = ovs_lib.DeferredOVSBridge(
self.agent.tun_br)
self.agent.fdb_remove(None, fdb_entry)
expected_calls = [
mock.call('mod', [dict(table=constants.FLOOD_TO_TUN,
dl_vlan='vlan2',
actions='strip_vlan,'
'set_tunnel:seg2,output:1')]),
mock.call('del', [dict(table=constants.ARP_RESPONDER,
proto='arp',
dl_vlan='vlan2',
nw_dst=FAKE_IP1),
dict(table=constants.UCAST_TO_TUN,
dl_vlan='vlan2',
dl_dst=FAKE_MAC),
dict(in_port='2')]),
]
do_action_flows_fn.assert_has_calls(expected_calls)
def test_fdb_add_port(self):
self._prepare_l2_pop_ofports()
fdb_entry = {'net1':
{'network_type': 'gre',
'segment_id': 'tun1',
'ports': {'1.1.1.1': [[FAKE_MAC, FAKE_IP1]]}}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'deferred'),
mock.patch.object(self.agent.tun_br, 'do_action_flows'),
mock.patch.object(self.agent, '_setup_tunnel_port')
) as (deferred_fn, do_action_flows_fn, add_tun_fn):
deferred_br = ovs_lib.DeferredOVSBridge(self.agent.tun_br)
deferred_fn.return_value = deferred_br
self.agent.fdb_add(None, fdb_entry)
self.assertFalse(add_tun_fn.called)
fdb_entry['net1']['ports']['10.10.10.10'] = [[FAKE_MAC, FAKE_IP1]]
self.agent.fdb_add(None, fdb_entry)
add_tun_fn.assert_called_with(
deferred_br, 'gre-0a0a0a0a', '10.10.10.10', 'gre')
def test_fdb_del_port(self):
self._prepare_l2_pop_ofports()
fdb_entry = {'net2':
{'network_type': 'gre',
'segment_id': 'tun2',
'ports': {'2.2.2.2': [n_const.FLOODING_ENTRY]}}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'deferred'),
mock.patch.object(self.agent.tun_br, 'do_action_flows'),
mock.patch.object(self.agent.tun_br, 'delete_port')
) as (deferred_fn, do_action_flows_fn, delete_port_fn):
deferred_br = ovs_lib.DeferredOVSBridge(self.agent.tun_br)
deferred_fn.return_value = deferred_br
self.agent.fdb_remove(None, fdb_entry)
delete_port_fn.assert_called_once_with('gre-02020202')
def test_fdb_update_chg_ip(self):
self._prepare_l2_pop_ofports()
fdb_entries = {'chg_ip':
{'net1':
{'agent_ip':
{'before': [[FAKE_MAC, FAKE_IP1]],
'after': [[FAKE_MAC, FAKE_IP2]]}}}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'deferred'),
mock.patch.object(self.agent.tun_br, 'do_action_flows'),
) as (deferred_fn, do_action_flows_fn):
deferred_br = ovs_lib.DeferredOVSBridge(self.agent.tun_br)
deferred_fn.return_value = deferred_br
self.agent.fdb_update(None, fdb_entries)
actions = (constants.ARP_RESPONDER_ACTIONS %
{'mac': netaddr.EUI(FAKE_MAC, dialect=netaddr.mac_unix),
'ip': netaddr.IPAddress(FAKE_IP2)})
expected_calls = [
mock.call('add', [dict(table=constants.ARP_RESPONDER,
priority=1,
proto='arp',
dl_vlan='vlan1',
nw_dst=FAKE_IP2,
actions=actions)]),
mock.call('del', [dict(table=constants.ARP_RESPONDER,
proto='arp',
dl_vlan='vlan1',
nw_dst=FAKE_IP1)])
]
do_action_flows_fn.assert_has_calls(expected_calls)
self.assertEqual(len(expected_calls),
len(do_action_flows_fn.mock_calls))
def test_recl_lv_port_to_preserve(self):
self._prepare_l2_pop_ofports()
self.agent.l2_pop = True
self.agent.enable_tunneling = True
with mock.patch.object(
self.agent.tun_br, 'cleanup_tunnel_port'
) as clean_tun_fn:
self.agent.reclaim_local_vlan('net1')
self.assertFalse(clean_tun_fn.called)
def test_recl_lv_port_to_remove(self):
self._prepare_l2_pop_ofports()
self.agent.l2_pop = True
self.agent.enable_tunneling = True
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'delete_port'),
mock.patch.object(self.agent.tun_br, 'delete_flows')
) as (del_port_fn, del_flow_fn):
self.agent.reclaim_local_vlan('net2')
del_port_fn.assert_called_once_with('gre-02020202')
def test_dvr_mac_address_update(self):
self._setup_for_dvr_test()
with contextlib.nested(
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
#mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (add_flow_fn, add_flow_tn_fn, del_flows_fn):
self.agent.dvr_agent.\
dvr_mac_address_update(
dvr_macs=[{'host': 'cn2',
'mac_address': 'aa:bb:cc:dd:ee:ff'}])
add_flow_tn_fn.assert_called_with(table=constants.DVR_NOT_LEARN,
priority=1,
dl_src='aa:bb:cc:dd:ee:ff',
actions="output:%s"
% self.agent.patch_int_ofport
)
self.assertFalse(del_flows_fn.called)
with contextlib.nested(
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows')
) as (add_flow_fn, del_flows_tn_fn, del_flows_fn):
self.agent.dvr_agent.dvr_mac_address_update(dvr_macs=[])
del_flows_tn_fn.assert_called_with(table=constants.DVR_NOT_LEARN,
dl_src='aa:bb:cc:dd:ee:ff')
self.assertFalse(add_flow_fn.called)
def test_daemon_loop_uses_polling_manager(self):
with mock.patch(
'neutron.agent.linux.polling.get_polling_manager') as mock_get_pm:
with mock.patch.object(self.agent, 'rpc_loop') as mock_loop:
self.agent.daemon_loop()
mock_get_pm.assert_called_with(True, 'sudo',
constants.DEFAULT_OVSDBMON_RESPAWN)
mock_loop.assert_called_once_with(polling_manager=mock.ANY)
def test__setup_tunnel_port_error_negative(self):
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'add_tunnel_port',
return_value='-1'),
mock.patch.object(ovs_neutron_agent.LOG, 'error')
) as (add_tunnel_port_fn, log_error_fn):
ofport = self.agent._setup_tunnel_port(
self.agent.tun_br, 'gre-1', 'remote_ip', p_const.TYPE_GRE)
add_tunnel_port_fn.assert_called_once_with(
'gre-1', 'remote_ip', self.agent.local_ip, p_const.TYPE_GRE,
self.agent.vxlan_udp_port, self.agent.dont_fragment)
log_error_fn.assert_called_once_with(
_("Failed to set-up %(type)s tunnel port to %(ip)s"),
{'type': p_const.TYPE_GRE, 'ip': 'remote_ip'})
self.assertEqual(ofport, 0)
def test__setup_tunnel_port_error_not_int(self):
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'add_tunnel_port',
return_value=None),
mock.patch.object(ovs_neutron_agent.LOG, 'exception'),
mock.patch.object(ovs_neutron_agent.LOG, 'error')
) as (add_tunnel_port_fn, log_exc_fn, log_error_fn):
ofport = self.agent._setup_tunnel_port(
self.agent.tun_br, 'gre-1', 'remote_ip', p_const.TYPE_GRE)
add_tunnel_port_fn.assert_called_once_with(
'gre-1', 'remote_ip', self.agent.local_ip, p_const.TYPE_GRE,
self.agent.vxlan_udp_port, self.agent.dont_fragment)
log_exc_fn.assert_called_once_with(
_("ofport should have a value that can be "
"interpreted as an integer"))
log_error_fn.assert_called_once_with(
_("Failed to set-up %(type)s tunnel port to %(ip)s"),
{'type': p_const.TYPE_GRE, 'ip': 'remote_ip'})
self.assertEqual(ofport, 0)
def test__setup_tunnel_port_error_negative_df_disabled(self):
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'add_tunnel_port',
return_value='-1'),
mock.patch.object(ovs_neutron_agent.LOG, 'error')
) as (add_tunnel_port_fn, log_error_fn):
self.agent.dont_fragment = False
ofport = self.agent._setup_tunnel_port(
self.agent.tun_br, 'gre-1', 'remote_ip', p_const.TYPE_GRE)
add_tunnel_port_fn.assert_called_once_with(
'gre-1', 'remote_ip', self.agent.local_ip, p_const.TYPE_GRE,
self.agent.vxlan_udp_port, self.agent.dont_fragment)
log_error_fn.assert_called_once_with(
_("Failed to set-up %(type)s tunnel port to %(ip)s"),
{'type': p_const.TYPE_GRE, 'ip': 'remote_ip'})
self.assertEqual(ofport, 0)
def test_tunnel_sync_with_ovs_plugin(self):
fake_tunnel_details = {'tunnels': [{'id': '42',
'ip_address': '100.101.102.103'}]}
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc, 'tunnel_sync',
return_value=fake_tunnel_details),
mock.patch.object(self.agent, '_setup_tunnel_port')
) as (tunnel_sync_rpc_fn, _setup_tunnel_port_fn):
self.agent.tunnel_types = ['gre']
self.agent.tunnel_sync()
expected_calls = [mock.call(self.agent.tun_br, 'gre-42',
'100.101.102.103', 'gre')]
_setup_tunnel_port_fn.assert_has_calls(expected_calls)
def test_tunnel_sync_with_ml2_plugin(self):
fake_tunnel_details = {'tunnels': [{'ip_address': '100.101.31.15'}]}
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc, 'tunnel_sync',
return_value=fake_tunnel_details),
mock.patch.object(self.agent, '_setup_tunnel_port')
) as (tunnel_sync_rpc_fn, _setup_tunnel_port_fn):
self.agent.tunnel_types = ['vxlan']
self.agent.tunnel_sync()
expected_calls = [mock.call(self.agent.tun_br, 'vxlan-64651f0f',
'100.101.31.15', 'vxlan')]
_setup_tunnel_port_fn.assert_has_calls(expected_calls)
def test_tunnel_sync_invalid_ip_address(self):
fake_tunnel_details = {'tunnels': [{'ip_address': '300.300.300.300'},
{'ip_address': '100.100.100.100'}]}
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc, 'tunnel_sync',
return_value=fake_tunnel_details),
mock.patch.object(self.agent, '_setup_tunnel_port')
) as (tunnel_sync_rpc_fn, _setup_tunnel_port_fn):
self.agent.tunnel_types = ['vxlan']
self.agent.tunnel_sync()
_setup_tunnel_port_fn.assert_called_once_with(self.agent.tun_br,
'vxlan-64646464',
'100.100.100.100',
'vxlan')
def test_tunnel_update(self):
kwargs = {'tunnel_ip': '10.10.10.10',
'tunnel_type': 'gre'}
self.agent._setup_tunnel_port = mock.Mock()
self.agent.enable_tunneling = True
self.agent.tunnel_types = ['gre']
self.agent.l2_pop = False
self.agent.tunnel_update(context=None, **kwargs)
expected_calls = [
mock.call(self.agent.tun_br, 'gre-0a0a0a0a', '10.10.10.10', 'gre')]
self.agent._setup_tunnel_port.assert_has_calls(expected_calls)
def test_ovs_restart(self):
reply2 = {'current': set(['tap0']),
'added': set(['tap2']),
'removed': set([])}
reply3 = {'current': set(['tap2']),
'added': set([]),
'removed': set(['tap0'])}
with contextlib.nested(
mock.patch.object(async_process.AsyncProcess, "_spawn"),
mock.patch.object(log.ContextAdapter, 'exception'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'scan_ports'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'process_network_ports'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'check_ovs_restart'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'setup_integration_br'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'setup_physical_bridges')
) as (spawn_fn, log_exception, scan_ports, process_network_ports,
check_ovs_restart, setup_int_br, setup_phys_br):
log_exception.side_effect = Exception(
'Fake exception to get out of the loop')
scan_ports.side_effect = [reply2, reply3]
process_network_ports.side_effect = [
False, Exception('Fake exception to get out of the loop')]
check_ovs_restart.side_effect = [False, True]
# This will exit after the second loop
try:
self.agent.daemon_loop()
except Exception:
pass
scan_ports.assert_has_calls([
mock.call(set(), set()),
mock.call(set(), set())
])
process_network_ports.assert_has_calls([
mock.call({'current': set(['tap0']),
'removed': set([]),
'added': set(['tap2'])}, False),
mock.call({'current': set(['tap2']),
'removed': set(['tap0']),
'added': set([])}, True)
])
# Verify the second time through the loop we triggered an
# OVS restart and re-setup the bridges
setup_int_br.assert_has_calls([mock.call()])
setup_phys_br.assert_has_calls([mock.call({})])
class AncillaryBridgesTest(base.BaseTestCase):
def setUp(self):
super(AncillaryBridgesTest, self).setUp()
notifier_p = mock.patch(NOTIFIER)
notifier_cls = notifier_p.start()
self.notifier = mock.Mock()
notifier_cls.return_value = self.notifier
cfg.CONF.set_default('firewall_driver',
'neutron.agent.firewall.NoopFirewallDriver',
group='SECURITYGROUP')
cfg.CONF.set_override('report_interval', 0, 'AGENT')
self.kwargs = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
def _test_ancillary_bridges(self, bridges, ancillary):
device_ids = ancillary[:]
def pullup_side_effect(self, *args):
result = device_ids.pop(0)
return result
with contextlib.nested(
mock.patch('neutron.plugins.openvswitch.agent.ovs_neutron_agent.'
'OVSNeutronAgent.setup_integration_br',
return_value=mock.Mock()),
mock.patch('neutron.agent.linux.utils.get_interface_mac',
return_value='00:00:00:00:00:01'),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'get_local_port_mac',
return_value='00:00:00:00:00:01'),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_secure_mode'),
mock.patch('neutron.agent.linux.ovs_lib.get_bridges',
return_value=bridges),
mock.patch(
'neutron.agent.linux.ovs_lib.get_bridge_external_bridge_id',
side_effect=pullup_side_effect)):
self.agent = ovs_neutron_agent.OVSNeutronAgent(**self.kwargs)
self.assertEqual(len(ancillary), len(self.agent.ancillary_brs))
if ancillary:
bridges = [br.br_name for br in self.agent.ancillary_brs]
for br in ancillary:
self.assertIn(br, bridges)
def test_ancillary_bridges_single(self):
bridges = ['br-int', 'br-ex']
self._test_ancillary_bridges(bridges, ['br-ex'])
def test_ancillary_bridges_none(self):
bridges = ['br-int']
self._test_ancillary_bridges(bridges, [])
def test_ancillary_bridges_multiple(self):
bridges = ['br-int', 'br-ex1', 'br-ex2']
self._test_ancillary_bridges(bridges, ['br-ex1', 'br-ex2'])<|fim▁end|> | notifier_cls.return_value = self.notifier
cfg.CONF.set_default('firewall_driver', |
<|file_name|>DynamicScriptExecutor.java<|end_file_name|><|fim▁begin|>package org.webbuilder.web.service.script;
import org.springframework.stereotype.Service;
import org.webbuilder.utils.script.engine.DynamicScriptEngine;
import org.webbuilder.utils.script.engine.DynamicScriptEngineFactory;
import org.webbuilder.utils.script.engine.ExecuteResult;
import org.webbuilder.web.po.script.DynamicScript;
import javax.annotation.Resource;
import java.util.Map;
/**
* Created by 浩 on 2015-10-29 0029.
*/
@Service
public class DynamicScriptExecutor {
@Resource
private DynamicScriptService dynamicScriptService;
public ExecuteResult exec(String id, Map<String, Object> param) throws Exception {
DynamicScript data = dynamicScriptService.selectByPk(id);
if (data == null) {
ExecuteResult result = new ExecuteResult();
result.setResult(String.format("script %s not found!", id));
result.setSuccess(false);
return result;
}
DynamicScriptEngine engine = DynamicScriptEngineFactory.getEngine(data.getType());
return engine.execute(id, param);<|fim▁hole|>}<|fim▁end|> | } |
<|file_name|>rejuv_histogram_overheal.rs<|end_file_name|><|fim▁begin|>extern crate wow_combat_log;
extern crate chrono;
use std::fs::File;
use std::io::BufReader;
use std::collections::HashMap;
use std::fmt;
use chrono::Duration;
static REJUV_AURAS: &'static [u32] = &[
774, // Rejuv
155777, // Rejuv (Germ)
];
#[derive(Debug, Clone, Default)]
struct RestoComputation<'a> {
map: HashMap<&'a str, [usize; 2]>,
histo: [(u64, u64, u64); 32],
player: &'a str,
}
impl<'a> RestoComputation<'a> {
fn new(player: &'a str) -> Self {
RestoComputation {player: player, .. Default::default() }
}
fn reset_stats(&mut self) {
self.histo = Default::default();
}
fn parse_entry(&mut self, log: &wow_combat_log::Entry<'a>, filter_start_time: Duration) {
use wow_combat_log::Entry::*;
use wow_combat_log::AuraType::*;
if log.base().is_none() {
return;
}
let base = log.base().unwrap();
if base.src.name != self.player {
return;
}
let entry = self.map.entry(log.base().unwrap().dst.id).or_insert([0, 0]);
match *log {
Aura { ty, id, .. } if REJUV_AURAS.contains(&id) && ty != Remove => {
let i = if id == REJUV_AURAS[0] { 0 } else { 1 };
entry[i] = 0;
},
Heal { id, heal: total_heal, overheal, .. } if REJUV_AURAS.contains(&id) => {
if log.timestamp() < filter_start_time {
return;
}
let i = if id == REJUV_AURAS[0] { 0 } else { 1 };
self.histo[entry[i]].0 += overheal;
self.histo[entry[i]].1 += total_heal;
self.histo[entry[i]].2 += 1;
if entry[i] < self.histo.len() {
entry[i] += 1;
}
},
_ => ()
}
}
}
<|fim▁hole|>impl<'a> fmt::Display for RestoComputation<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let overheal = self.histo.iter().map(|x| x.0).sum::<u64>() as f64;
let total = self.histo.iter().map(|x| x.1).sum::<u64>() as f64;
writeln!(f, "total rejuv healing {} ({:.2}% overheal)", total, overheal/total * 100.)?;
write!(f, "Overheal by tick: ")?;
for i in &self.histo {
if i.1 == 0 { break }
write!(f, "{:.2} ({}), ", i.0 as f64/i.1 as f64 * 100., i.2)?;
}
Ok(())
}
}
impl<'a, 'b, 'c> std::ops::SubAssign<&'b RestoComputation<'c>> for RestoComputation<'a> {
fn sub_assign(&mut self, rhs: &'b RestoComputation<'c>) {
for (i, j) in self.histo.iter_mut().zip(rhs.histo.iter()) {
i.0 -= j.0;
i.1 -= j.1;
i.2 -= j.2;
}
}
}
fn main() {
let read = BufReader::new(File::open(std::env::args().nth(1).unwrap()).unwrap());
let player = std::env::args().nth(2).unwrap();
let intern = wow_combat_log::Interner::default();
let start = std::env::args().nth(3).map(|x| Duration::seconds(x.parse().unwrap())).unwrap_or(Duration::zero());
let end = std::env::args().nth(4).map(|x| Duration::seconds(x.parse().unwrap())).unwrap_or(Duration::max_value());
let iter = wow_combat_log::iter(&intern, read);
let iter = iter.take_while(|x| x.timestamp() < end);
let mut encounter_start = None;
let mut total = RestoComputation::new(&player);
let mut encounter = total.clone();
let mut kills = total.clone();
let mut bosses = total.clone();
for log in iter {
use wow_combat_log::Entry::*;
match log {
EncounterStart {..} => {
encounter_start = Some(log.timestamp());
bosses -= &encounter;
kills -= &encounter;
encounter.reset_stats();
},
EncounterEnd {name, kill, ..} => {
if let Some(s) = encounter_start {
println!("duration: {}, start: {}, {}, kill: {}", (log.timestamp() - s).num_seconds(), s.num_seconds(), name, kill);
println!("{}", encounter);
println!("");
encounter_start = None;
}
if !kill {
kills -= &encounter;
}
encounter.reset_stats();
},
_ => ()
}
encounter.parse_entry(&log, start);
total.parse_entry(&log, start);
kills.parse_entry(&log, start);
bosses.parse_entry(&log, start);
}
bosses -= &encounter;
kills -= &encounter;
println!("-------");
println!("");
println!("Log total:");
println!("{}", total);
println!("");
println!("Boss total:");
println!("{}", bosses);
println!("");
println!("Kill total:");
println!("{}", kills);
}<|fim▁end|> | |
<|file_name|>event.rs<|end_file_name|><|fim▁begin|>use crate::Token;
use super::Ready;
pub type SysEvent = Event;
#[derive(Debug, Clone)]
pub struct Event {
token: Token,
readiness: Ready,
}
impl Event {
pub(crate) fn new(readiness: Ready, token: Token) -> Event {
Event { token, readiness }
}
pub fn token(&self) -> Token {
self.token
}
pub fn is_readable(&self) -> bool {
self.readiness.is_readable()
}
<|fim▁hole|> pub fn is_writable(&self) -> bool {
self.readiness.is_writable()
}
pub fn is_error(&self) -> bool {
self.readiness.is_error()
}
pub fn is_hup(&self) -> bool {
self.readiness.is_hup()
}
pub fn is_priority(&self) -> bool {
self.readiness.is_priority()
}
pub fn is_aio(&self) -> bool {
self.readiness.is_aio()
}
pub fn is_lio(&self) -> bool {
self.readiness.is_lio()
}
}<|fim▁end|> | |
<|file_name|>encoder.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 Adam Greig
// Licensed under the MIT license, see LICENSE for details.
//! This module provides the encoding function for turning data into codewords.
//!
//! Please refer to the `encode` and `copy_encode` methods on
//! [`LDPCCode`](../codes/enum.LDPCCode.html) for more details.
// We have a couple of expressions with +0 for clarity of where the 0 comes from
#![allow(clippy::identity_op)]
use core::slice;
use crate::codes::LDPCCode;
/// Trait for the types of codeword we can encode into.
///
/// We implement this for u8 (the standard but slow option), and u32 and u64 which give speedups.
pub trait EncodeInto {
/// Given `codeword` which has the first k bits set to the data to transmit,
/// sets the remaining n-k parity bits.
///
/// Returns a `&mut [u8]` view on `codeword`.
fn encode<'a>(code: &LDPCCode, codeword: &'a mut[Self]) -> &'a mut [u8]
where Self: Sized;
/// First copies `data` into the first k bits of `codeword`, then calls `encode`.
fn copy_encode<'a>(code: &LDPCCode, data: &[u8], codeword: &'a mut[Self]) -> &'a mut [u8]
where Self: Sized;
/// Returns the bit length for this type
fn bitlength() -> usize;
}
impl EncodeInto for u8 {
fn encode<'a>(code: &LDPCCode, codeword: &'a mut[Self]) -> &'a mut [u8] {
let k = code.k();
let r = code.n() - code.k();
let b = code.circulant_size();
let gc = code.compact_generator();
let row_len = r/64;
// Scope the split of codeword into (data, parity)
{
// Split codeword into data and parity sections and then zero the parity bits
let (data, parity) = codeword.split_at_mut(k / 8);
for x in parity.iter_mut() { *x = 0; }
// For each rotation of the generator circulants
for offset in 0..b {
// For each row of circulants
for crow in 0..k/b {
// Data bit (row of full generator matrix)
let bit = crow*b + offset;
if data[bit/8] >> (7-(bit%8)) & 1 == 1 {
// If bit is set, XOR the generator constant in
for (idx, circ) in gc[crow*row_len..(crow+1)*row_len].iter().enumerate() {
parity[idx*8 + 7] ^= (*circ >> 0) as u8;
parity[idx*8 + 6] ^= (*circ >> 8) as u8;
parity[idx*8 + 5] ^= (*circ >> 16) as u8;
parity[idx*8 + 4] ^= (*circ >> 24) as u8;
parity[idx*8 + 3] ^= (*circ >> 32) as u8;
parity[idx*8 + 2] ^= (*circ >> 40) as u8;
parity[idx*8 + 1] ^= (*circ >> 48) as u8;
parity[idx*8 + 0] ^= (*circ >> 56) as u8;
}
}
}
// Now simulate the right-rotation of the generator by left-rotating the parity
for block in 0..r/b {
let parityblock = &mut parity[block*b/8 .. (block+1)*b/8];
let mut carry = parityblock[0] >> 7;
for x in parityblock.iter_mut().rev() {
let c = *x >> 7;
*x = (*x<<1) | carry;
carry = c;
}
}
}
}
// Return a &mut [u8] view on the codeword
codeword
}
fn copy_encode<'a>(code: &LDPCCode, data: &[u8], codeword: &'a mut[Self]) -> &'a mut [u8] {
codeword[..data.len()].copy_from_slice(data);
Self::encode(code, codeword)
}
fn bitlength() -> usize { 8 }
}
impl EncodeInto for u32 {
fn encode<'a>(code: &LDPCCode, codeword: &'a mut[Self]) -> &'a mut [u8] {
let k = code.k();
let r = code.n() - code.k();
let b = code.circulant_size();
let gc = code.compact_generator();
let row_len = r/64;
// Scope the split of codeword into (data, parity)
{
// Split codeword into data and parity sections and then zero the parity bits
let (data, parity) = codeword.split_at_mut(k / 32);
for x in parity.iter_mut() { *x = 0; }
// We treat data as a &[u8] so we bit-index it correctly despite endianness
let data = unsafe { slice::from_raw_parts(data.as_ptr() as *const u8, data.len()*4) };
// For each rotation of the generator circulants
for offset in 0..b {
// For each row of circulants
for crow in 0..k/b {
// Data bit (row of full generator matrix)
let bit = crow*b + offset;
if data[bit/8] >> (7-(bit%8)) & 1 == 1 {
// If bit is set, XOR the generator constant in
for (idx, circ) in gc[crow*row_len..(crow+1)*row_len].iter().enumerate() {
parity[idx*2 + 1] ^= (*circ >> 0) as u32;
parity[idx*2 + 0] ^= (*circ >> 32) as u32;
}
}
}
// Now simulate the right-rotation of the generator by left-rotating the parity
if b >= 32 {
for block in 0..r/b {
let parityblock = &mut parity[block*b/32 .. (block+1)*b/32];
let mut carry = parityblock[0] >> 31;
for x in parityblock.iter_mut().rev() {
let c = *x >> 31;
*x = (*x<<1) | carry;
carry = c;
}
}
} else if b == 16 {
// For small blocks we must rotate inside each parity word instead
for x in parity.iter_mut() {
let block1 = *x & 0xFFFF_0000;
let block2 = *x & 0x0000_FFFF;
*x = (((block1<<1)|(block1>>15)) & 0xFFFF_0000)
| (((block2<<1)|(block2>>15)) & 0x0000_FFFF);
}
}
}
// Need to compensate for endianness
for x in parity.iter_mut() {
*x = x.to_be();
}
}
// Return a &mut [u8] view on the codeword
unsafe {
slice::from_raw_parts_mut::<'a>(codeword.as_mut_ptr() as *mut u8, codeword.len() * 4)
}
}
fn copy_encode<'a>(code: &LDPCCode, data: &[u8], codeword: &'a mut[Self]) -> &'a mut [u8] {
let codeword_u8 = unsafe {
slice::from_raw_parts_mut::<'a>(codeword.as_mut_ptr() as *mut u8, codeword.len() * 4)
};
codeword_u8[..data.len()].copy_from_slice(data);
Self::encode(code, codeword)
}
fn bitlength() -> usize { 32 }
}
impl EncodeInto for u64 {
fn encode<'a>(code: &LDPCCode, codeword: &'a mut[Self]) -> &'a mut [u8] {
let k = code.k();
let r = code.n() - code.k();
let b = code.circulant_size();
let gc = code.compact_generator();
let row_len = r/64;
// Scope the split of codeword into (data, parity)
{
// Split codeword into data and parity sections and then zero the parity bits
let (data, parity) = codeword.split_at_mut(k / 64);
for x in parity.iter_mut() { *x = 0; }
// We treat data as a &[u8] so we bit-index it correctly despite endianness
let data = unsafe { slice::from_raw_parts(data.as_ptr() as *const u8, data.len()*8) };
// For each rotation of the generator circulants
for offset in 0..b {
// For each row of circulants
for crow in 0..k/b {
// Data bit (row of full generator matrix)
let bit = crow*b + offset;
if data[bit/8] >> (7-(bit%8)) & 1 == 1 {
// If bit is set, XOR the generator constant in
for (idx, circ) in gc[crow*row_len..(crow+1)*row_len].iter().enumerate() {
parity[idx] ^= *circ;
}
}
}
// Now simulate the right-rotation of the generator by left-rotating the parity
if b >= 64 {
for block in 0..r/b {
let parityblock = &mut parity[block*b/64 .. (block+1)*b/64];
let mut carry = parityblock[0] >> 63;<|fim▁hole|> let c = *x >> 63;
*x = (*x<<1) | carry;
carry = c;
}
}
} else if b == 32 {
// For small blocks we must rotate inside each parity word instead
for x in parity.iter_mut() {
let block1 = *x & 0xFFFFFFFF_00000000;
let block2 = *x & 0x00000000_FFFFFFFF;
*x = (((block1<<1)|(block1>>31)) & 0xFFFFFFFF_00000000)
| (((block2<<1)|(block2>>31)) & 0x00000000_FFFFFFFF);
}
} else if b == 16 {
for x in parity.iter_mut() {
let block1 = *x & 0xFFFF_0000_0000_0000;
let block2 = *x & 0x0000_FFFF_0000_0000;
let block3 = *x & 0x0000_0000_FFFF_0000;
let block4 = *x & 0x0000_0000_0000_FFFF;
*x = (((block1<<1)|(block1>>15)) & 0xFFFF_0000_0000_0000)
| (((block2<<1)|(block2>>15)) & 0x0000_FFFF_0000_0000)
| (((block3<<1)|(block3>>15)) & 0x0000_0000_FFFF_0000)
| (((block4<<1)|(block4>>15)) & 0x0000_0000_0000_FFFF);
}
}
}
// Need to compensate for endianness
for x in parity.iter_mut() {
*x = x.to_be();
}
}
// Return a &mut [u8] view on the codeword
unsafe {
slice::from_raw_parts_mut::<'a>(codeword.as_mut_ptr() as *mut u8, codeword.len() * 8)
}
}
fn copy_encode<'a>(code: &LDPCCode, data: &[u8], codeword: &'a mut[Self]) -> &'a mut [u8] {
let codeword_u8 = unsafe {
slice::from_raw_parts_mut::<'a>(codeword.as_mut_ptr() as *mut u8, codeword.len() * 8)
};
codeword_u8[..data.len()].copy_from_slice(data);
Self::encode(code, codeword)
}
fn bitlength() -> usize { 64 }
}
impl LDPCCode {
/// Encode a codeword. This function assumes the first k bits of `codeword` have already
/// been set to your data, and will set the remaining n-k bits appropriately.
///
/// `codeword` must be exactly n bits long.
///
/// You can give `codeword` in `u8`, `u32`, or `u64`.
/// The larger types are faster and are interpreted as packed bytes in little endian.
///
/// Returns a view of `codeword` in &mut [u8] which may be convenient if you
/// passed in a larger type but want to use the output as bytes. You can just
/// not use the return value if you wish to keep your original view on `codeword`.
pub fn encode<'a, T>(&self, codeword: &'a mut [T]) -> &'a mut [u8]
where T: EncodeInto
{
assert_eq!(codeword.len() * T::bitlength(), self.n(), "codeword must be n bits long");
EncodeInto::encode(self, codeword)
}
/// Encode a codeword, first copying in the data.
///
/// This is the same as `encode` except you can pass the data which must be k bits long in as
/// `&[u8]` and it will be copied into the first part of `codeword`, which must be n bits long.
///
/// Returns a view of `codeword` in &mut [u8] which may be convenient if you
/// passed in a larger type but want to use the output as bytes. You can just
/// not use the return value if you wish to keep your original view on `codeword`.
pub fn copy_encode<'a, T>(&self, data: &[u8], codeword: &'a mut [T]) -> &'a mut [u8]
where T: EncodeInto
{
assert_eq!(data.len() * 8, self.k(), "data must be k bits long");
assert_eq!(codeword.len() * T::bitlength(), self.n(), "codeword must be n bits long");
EncodeInto::copy_encode(self, data, codeword)
}
}
#[cfg(test)]
mod tests {
use std::prelude::v1::*;
use crate::codes::LDPCCode;
macro_rules! test_encode {
($code:path, $parity:expr) => {
let code = $code;
let parity = $parity;
let txdata: Vec<u8> = (0..code.k()/8).map(|i| i as u8).collect();
// First check we can encode OK in the totally normal way
let mut txcode = vec![0u8; code.n()/8];
txcode[..code.k()/8].copy_from_slice(&txdata);
let rxcode = code.encode(&mut txcode);
let (rxdata, rxparity) = rxcode.split_at(code.k()/8);
assert_eq!(rxdata, &txdata[..]);
assert_eq!(rxparity, &parity[..]);
// Now check copy_encode works
let mut txcode = vec![0u8; code.n()/8];
let rxcode = code.copy_encode(&txdata, &mut txcode);
let (rxdata, rxparity) = rxcode.split_at(code.k()/8);
assert_eq!(rxdata, &txdata[..]);
assert_eq!(rxparity, &parity[..]);
// Now check for u32 version
let mut txcode = vec![0u32; code.n()/32];
let rxcode = code.copy_encode(&txdata, &mut txcode);
let (rxdata, rxparity) = rxcode.split_at(code.k()/8);
assert_eq!(rxdata, &txdata[..]);
assert_eq!(rxparity, &parity[..]);
// Now check for u64 version
let mut txcode = vec![0u64; code.n()/64];
let rxcode = code.copy_encode(&txdata, &mut txcode);
let (rxdata, rxparity) = rxcode.split_at(code.k()/8);
assert_eq!(rxdata, &txdata[..]);
assert_eq!(rxparity, &parity[..]);
};
}
#[test]
fn test_encode() {
test_encode!(LDPCCode::TC128,
[0x34, 0x99, 0x98, 0x87, 0x94, 0xE1, 0x62, 0x56]);
test_encode!(LDPCCode::TC256,
[0x8C, 0x99, 0x21, 0x34, 0xAD, 0xB0, 0xCF, 0xD2,
0x2D, 0xA5, 0xF7, 0x7F, 0xBB, 0x42, 0x34, 0xCD]);
test_encode!(LDPCCode::TC512,
[0xBC, 0x92, 0x1C, 0x98, 0xCC, 0xE2, 0x6C, 0xE8,
0x12, 0x3A, 0x97, 0xFF, 0x73, 0x5B, 0xF6, 0x9E,
0x08, 0xCB, 0x48, 0xC4, 0xC3, 0x00, 0x83, 0x0F,
0x30, 0xE0, 0x98, 0x59, 0xD6, 0x06, 0x7E, 0xBF]);
test_encode!(LDPCCode::TM1280,
[0xF1, 0x68, 0xE0, 0x79, 0x45, 0xE3, 0x08, 0xAE,
0xEF, 0xD1, 0x68, 0x56, 0x60, 0x0A, 0x90, 0xFA,
0xF6, 0x55, 0xA2, 0x01, 0x60, 0x77, 0xF7, 0xE0,
0xFA, 0xB5, 0x49, 0x06, 0xDD, 0x6D, 0xCD, 0x7D]);
test_encode!(LDPCCode::TM1536,
[0x99, 0x4D, 0x02, 0x17, 0x53, 0x87, 0xC8, 0xDD,
0x42, 0x2E, 0x46, 0x29, 0x06, 0x6A, 0x02, 0x6D,
0xE1, 0xAB, 0xB9, 0xA2, 0xAA, 0xE0, 0xF2, 0xE9,
0xF6, 0xAA, 0xE6, 0xF0, 0x42, 0x1E, 0x52, 0x44,
0x5F, 0x62, 0xD1, 0xA8, 0x8F, 0xB2, 0x01, 0x78,
0xB1, 0xD6, 0x2D, 0x0B, 0xD6, 0xB1, 0x4A, 0x6C,
0x93, 0x26, 0x69, 0xAA, 0xE0, 0x55, 0x1A, 0xD9,
0x9B, 0x94, 0x35, 0x27, 0x3F, 0x30, 0x91, 0x83]);
test_encode!(LDPCCode::TM2048,
[0xEE, 0xA9, 0xAA, 0xAF, 0x98, 0xD9, 0x16, 0xCE,
0x6C, 0x2B, 0x28, 0x2D, 0x1A, 0x5B, 0x94, 0x4C,
0xA4, 0xF1, 0xB3, 0xD3, 0x1A, 0xEC, 0x58, 0x5A,
0xB3, 0xE6, 0xA4, 0xC4, 0x0D, 0xFB, 0x4F, 0x4D,
0xD8, 0x07, 0xA1, 0xAD, 0x0A, 0xE9, 0x62, 0xC4,
0xD4, 0x0B, 0xAD, 0xA1, 0x06, 0xE5, 0x6E, 0xC8,
0xA6, 0x68, 0x6A, 0xD5, 0xE6, 0xAC, 0x09, 0xBE,
0x3F, 0xF1, 0xF3, 0x4C, 0x7F, 0x35, 0x90, 0x27,
0xF2, 0x64, 0x69, 0x03, 0x83, 0x37, 0x42, 0x91,
0x21, 0xB7, 0xBA, 0xD0, 0x50, 0xE4, 0x91, 0x42,
0xE4, 0x0D, 0x64, 0x19, 0x70, 0x84, 0xA5, 0xB7,
0x86, 0x6F, 0x06, 0x7B, 0x12, 0xE6, 0xC7, 0xD5,
0xAB, 0x10, 0xDB, 0x03, 0x4F, 0xF6, 0x8A, 0xFE,
0x17, 0xAC, 0x67, 0xBF, 0xF3, 0x4A, 0x36, 0x42,
0x04, 0xAE, 0x85, 0xB3, 0xB6, 0x47, 0xCE, 0xC4,
0x0F, 0xA5, 0x8E, 0xB8, 0xBD, 0x4C, 0xC5, 0xCF]);
test_encode!(LDPCCode::TM5120,
[0x4A, 0xA9, 0xB6, 0x89, 0x47, 0xB9, 0xAA, 0x41,
0x0E, 0xED, 0xF2, 0xCD, 0x03, 0xFD, 0xEE, 0x05,
0x40, 0xD1, 0x74, 0x9E, 0xD5, 0x99, 0x69, 0x47,
0x2C, 0xBD, 0x18, 0xF2, 0xB9, 0xF5, 0x05, 0x2B,
0x35, 0x4B, 0xB6, 0x02, 0x30, 0xBE, 0xE2, 0x24,
0x3A, 0x44, 0xB9, 0x0D, 0x3F, 0xB1, 0xED, 0x2B,
0x93, 0xF7, 0xE3, 0x6C, 0x0A, 0x66, 0xF8, 0x2D,
0xB4, 0xD0, 0xC4, 0x4B, 0x2D, 0x41, 0xDF, 0x0A,
0xAC, 0xCB, 0xF4, 0xD7, 0xC8, 0x0E, 0x3A, 0x9A,
0xBF, 0xD8, 0xE7, 0xC4, 0xDB, 0x1D, 0x29, 0x89,
0x4A, 0x65, 0xEB, 0x61, 0xE2, 0x2F, 0xC0, 0x33,
0x22, 0x0D, 0x83, 0x09, 0x8A, 0x47, 0xA8, 0x5B,
0xE7, 0x0F, 0xFA, 0xC7, 0x12, 0x35, 0x24, 0xEF,
0x4F, 0xA7, 0x52, 0x6F, 0xBA, 0x9D, 0x8C, 0x47,
0x31, 0xB1, 0x14, 0x3F, 0xB1, 0x14, 0x7F, 0x5D,
0x95, 0x15, 0xB0, 0x9B, 0x15, 0xB0, 0xDB, 0xF9]);
test_encode!(LDPCCode::TM6144,
[0xA7, 0x42, 0xB8, 0x6D, 0x95, 0x7B, 0x9C, 0x90,
0xA8, 0x18, 0x30, 0xC2, 0x95, 0x08, 0x7F, 0xD7,
0xAB, 0x4E, 0xB4, 0x61, 0x99, 0x77, 0x90, 0x9C,
0xA4, 0x14, 0x3C, 0xCE, 0x99, 0x04, 0x73, 0xDB,
0x04, 0x59, 0xF4, 0xC0, 0xAD, 0x9F, 0xBA, 0x49,
0x1E, 0x53, 0x0A, 0x05, 0x5A, 0x9D, 0x1F, 0xDF,
0x67, 0x3A, 0x97, 0xA3, 0xCE, 0xFC, 0xD9, 0x2A,
0x7D, 0x30, 0x69, 0x66, 0x39, 0xFE, 0x7C, 0xBC,
0x70, 0x17, 0x15, 0x08, 0x56, 0xBD, 0x0B, 0xAC,
0x89, 0x12, 0x85, 0xB0, 0xF0, 0x88, 0x7F, 0x07,
0x1F, 0x78, 0x7A, 0x67, 0x39, 0xD2, 0x64, 0xC3,
0xE6, 0x7D, 0xEA, 0xDF, 0x9F, 0xE7, 0x10, 0x68,
0x17, 0x89, 0x95, 0x5C, 0x41, 0x92, 0x42, 0x05,
0xBD, 0x62, 0x80, 0x2B, 0x67, 0x59, 0xB2, 0xEB,
0x17, 0x89, 0x95, 0x5C, 0x41, 0x92, 0x42, 0x05,
0xBD, 0x62, 0x80, 0x2B, 0x67, 0x59, 0xB2, 0xEB,
0x3B, 0x15, 0xFB, 0xBE, 0xF2, 0x9B, 0xCE, 0x22,
0x77, 0xDC, 0xEB, 0x28, 0x03, 0xA7, 0x83, 0xAD,
0x7D, 0x53, 0xBD, 0xF8, 0xB4, 0xDD, 0x88, 0x64,
0x31, 0x9A, 0xAD, 0x6E, 0x45, 0xE1, 0xC5, 0xEB,
0x92, 0x3E, 0xBA, 0x63, 0x0F, 0x0F, 0x74, 0x3B,
0x96, 0x7F, 0x2F, 0xA5, 0x09, 0x43, 0xD0, 0xA6,
0x9C, 0x30, 0xB4, 0x6D, 0x01, 0x01, 0x7A, 0x35,
0x98, 0x71, 0x21, 0xAB, 0x07, 0x4D, 0xDE, 0xA8,
0xD0, 0xC9, 0x86, 0x0D, 0x68, 0xA3, 0xDA, 0x41,
0x50, 0xCF, 0x10, 0x6A, 0xA9, 0x24, 0xD0, 0x06,
0x12, 0x0B, 0x44, 0xCF, 0xAA, 0x61, 0x18, 0x83,
0x92, 0x0D, 0xD2, 0xA8, 0x6B, 0xE6, 0x12, 0xC4,
0x1B, 0x75, 0x76, 0xF7, 0xC3, 0xAF, 0x84, 0xE2,
0x16, 0xA6, 0xE4, 0x44, 0x06, 0x4F, 0x54, 0x98,
0xDC, 0xB2, 0xB1, 0x30, 0x04, 0x68, 0x43, 0x25,
0xD1, 0x61, 0x23, 0x83, 0xC1, 0x88, 0x93, 0x5F]);
test_encode!(LDPCCode::TM8192,
[0xF6, 0x00, 0x56, 0xCD, 0x23, 0x63, 0x1A, 0xED,
0x7D, 0x7C, 0xF0, 0x17, 0x7C, 0xF1, 0x96, 0x73,
0x8C, 0xB7, 0xE0, 0xF4, 0x34, 0xF6, 0xB7, 0x3C,
0x89, 0x44, 0x85, 0x74, 0xA3, 0x27, 0x44, 0xF7,
0x0A, 0xFC, 0xAA, 0x31, 0xDF, 0x9F, 0xE6, 0x11,
0x81, 0x80, 0x0C, 0xEB, 0x80, 0x0D, 0x6A, 0x8F,
0x70, 0x4B, 0x1C, 0x08, 0xC8, 0x0A, 0x4B, 0xC0,
0x75, 0xB8, 0x79, 0x88, 0x5F, 0xDB, 0xB8, 0x0B,
0x5E, 0x53, 0x18, 0x0C, 0xB4, 0x32, 0x45, 0x92,
0x71, 0x93, 0xFE, 0xAD, 0xDF, 0x98, 0x55, 0xE8,
0x62, 0xB5, 0xFB, 0xBB, 0x4D, 0x94, 0x01, 0x2D,
0x22, 0xAD, 0x21, 0x55, 0x44, 0xED, 0x44, 0xC2,
0x61, 0x6C, 0x27, 0x33, 0x8B, 0x0D, 0x7A, 0xAD,
0x4E, 0xAC, 0xC1, 0x92, 0xE0, 0xA7, 0x6A, 0xD7,
0x5D, 0x8A, 0xC4, 0x84, 0x72, 0xAB, 0x3E, 0x12,
0x1D, 0x92, 0x1E, 0x6A, 0x7B, 0xD2, 0x7B, 0xFD,
0x59, 0x80, 0xA5, 0x02, 0xF2, 0xDD, 0x10, 0xCD,
0x8B, 0x8F, 0x52, 0xC3, 0x00, 0x65, 0xAD, 0xF7,
0xFB, 0xDF, 0x35, 0xB9, 0xCB, 0xB1, 0x90, 0x75,
0x68, 0xFC, 0x36, 0x33, 0x9D, 0x79, 0x18, 0xD0,
0x3A, 0xE3, 0xC6, 0x61, 0x91, 0xBE, 0x73, 0xAE,
0xE8, 0xEC, 0x31, 0xA0, 0x63, 0x06, 0xCE, 0x94,
0x98, 0xBC, 0x56, 0xDA, 0xA8, 0xD2, 0xF3, 0x16,
0x0B, 0x9F, 0x55, 0x50, 0xFE, 0x1A, 0x7B, 0xB3,
0x2D, 0xF0, 0xEA, 0x30, 0x5C, 0x71, 0xE6, 0xD8,
0x21, 0xE7, 0xC4, 0x1F, 0x68, 0xA5, 0x95, 0xAC,
0x3B, 0x2F, 0x62, 0xBC, 0x72, 0xF3, 0x2F, 0x9C,
0xB5, 0x0F, 0x9A, 0x27, 0x42, 0x5B, 0x0B, 0x49,
0x50, 0x8D, 0x97, 0x4D, 0x21, 0x0C, 0x9B, 0xA5,
0x5C, 0x9A, 0xB9, 0x62, 0x15, 0xD8, 0xE8, 0xD1,
0x46, 0x52, 0x1F, 0xC1, 0x0F, 0x8E, 0x52, 0xE1,
0xC8, 0x72, 0xE7, 0x5A, 0x3F, 0x26, 0x76, 0x34,
0x2C, 0xFB, 0x43, 0xA9, 0xBD, 0x5C, 0x53, 0x87,
0xD3, 0xF9, 0x40, 0x32, 0xED, 0x43, 0x7F, 0x96,
0x85, 0xAD, 0xC9, 0x0F, 0xE4, 0x5E, 0x11, 0xFA,
0xAF, 0x32, 0xBC, 0xD2, 0xC4, 0x2B, 0xC6, 0xE6,
0x65, 0xB2, 0x0A, 0xE0, 0xF4, 0x15, 0x1A, 0xCE,
0x9A, 0xB0, 0x09, 0x7B, 0xA4, 0x0A, 0x36, 0xDF,
0xCC, 0xE4, 0x80, 0x46, 0xAD, 0x17, 0x58, 0xB3,
0xE6, 0x7B, 0xF5, 0x9B, 0x8D, 0x62, 0x8F, 0xAF,
0x35, 0x97, 0x39, 0x80, 0xDC, 0x57, 0x52, 0xD1,
0xDB, 0x00, 0x55, 0x0B, 0x5E, 0x8E, 0x5C, 0xB6,
0x15, 0xB4, 0xF8, 0x9F, 0xAB, 0xF6, 0xCD, 0x60,
0xD0, 0xE1, 0x13, 0x02, 0x1B, 0x61, 0x87, 0x73,
0x9E, 0x3C, 0x92, 0x2B, 0x77, 0xFC, 0xF9, 0x7A,
0x70, 0xAB, 0xFE, 0xA0, 0xF5, 0x25, 0xF7, 0x1D,
0xBE, 0x1F, 0x53, 0x34, 0x00, 0x5D, 0x66, 0xCB,
0x7B, 0x4A, 0xB8, 0xA9, 0xB0, 0xCA, 0x2C, 0xD8,
0x31, 0xAD, 0x0C, 0x43, 0xE1, 0xE1, 0x81, 0x9F,
0xA3, 0x69, 0x90, 0x4C, 0xE6, 0x1F, 0x41, 0x61,
0xFB, 0x82, 0xD7, 0x97, 0x29, 0xC3, 0x30, 0x15,
0xA2, 0x72, 0xE7, 0x88, 0x85, 0xAD, 0x0F, 0x98,
0x9A, 0x06, 0xA7, 0xE8, 0x4A, 0x4A, 0x2A, 0x34,
0x08, 0xC2, 0x3B, 0xE7, 0x4D, 0xB4, 0xEA, 0xCA,
0x50, 0x29, 0x7C, 0x3C, 0x82, 0x68, 0x9B, 0xBE,
0x09, 0xD9, 0x4C, 0x23, 0x2E, 0x06, 0xA4, 0x33,
0xED, 0xA5, 0x15, 0xF4, 0x69, 0x70, 0x0B, 0xFF,
0x35, 0x9C, 0x94, 0x0B, 0x2E, 0xB3, 0x47, 0xCD,
0xEB, 0xC4, 0x3F, 0xF5, 0x82, 0x98, 0xCD, 0x72,
0x78, 0xF9, 0xEA, 0x4F, 0xF7, 0x49, 0x4B, 0xC3,
0x5E, 0x16, 0xA6, 0x47, 0xDA, 0xC3, 0xB8, 0x4C,
0x86, 0x2F, 0x27, 0xB8, 0x9D, 0x00, 0xF4, 0x7E,
0x58, 0x77, 0x8C, 0x46, 0x31, 0x2B, 0x7E, 0xC1,
0xCB, 0x4A, 0x59, 0xFC, 0x44, 0xFA, 0xF8, 0x70]);
}
}<|fim▁end|> | for x in parityblock.iter_mut().rev() { |
<|file_name|>DelimitedPayloadTokenFilterFactory.java<|end_file_name|><|fim▁begin|>/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.payloads.*;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.assistedinject.Assisted;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.settings.IndexSettings;
/**<|fim▁hole|>public class DelimitedPayloadTokenFilterFactory extends AbstractTokenFilterFactory {
public static final char DEFAULT_DELIMITER = '|';
public static final PayloadEncoder DEFAULT_ENCODER = new FloatEncoder();
static final String ENCODING = "encoding";
static final String DELIMITER = "delimiter";
char delimiter;
PayloadEncoder encoder;
@Inject
public DelimitedPayloadTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name,
@Assisted Settings settings) {
super(index, indexSettings, name, settings);
String delimiterConf = settings.get(DELIMITER);
if (delimiterConf != null) {
delimiter = delimiterConf.charAt(0);
} else {
delimiter = DEFAULT_DELIMITER;
}
if (settings.get(ENCODING) != null) {
if (settings.get(ENCODING).equals("float")) {
encoder = new FloatEncoder();
} else if (settings.get(ENCODING).equals("int")) {
encoder = new IntegerEncoder();
} else if (settings.get(ENCODING).equals("identity")) {
encoder = new IdentityEncoder();
}
} else {
encoder = DEFAULT_ENCODER;
}
}
@Override
public TokenStream create(TokenStream tokenStream) {
DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter(tokenStream, delimiter, encoder);
return filter;
}
}<|fim▁end|> | *
*/ |
<|file_name|>UntagResourceResult.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iotevents.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iotevents-2018-07-27/UntagResource" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UntagResourceResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {<|fim▁hole|> if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UntagResourceResult == false)
return false;
UntagResourceResult other = (UntagResourceResult) obj;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
return hashCode;
}
@Override
public UntagResourceResult clone() {
try {
return (UntagResourceResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}<|fim▁end|> | |
<|file_name|>chunkwriting.go<|end_file_name|><|fim▁begin|>package azblob
import (
"bytes"
"context"
"encoding/base64"
"encoding/binary"
"errors"
"fmt"
"io"
"sync"
"sync/atomic"
guuid "github.com/google/uuid"
)
// blockWriter provides methods to upload blocks that represent a file to a server and commit them.
// This allows us to provide a local implementation that fakes the server for hermetic testing.
type blockWriter interface {
StageBlock(context.Context, string, io.ReadSeeker, LeaseAccessConditions, []byte, ClientProvidedKeyOptions) (*BlockBlobStageBlockResponse, error)
CommitBlockList(context.Context, []string, BlobHTTPHeaders, Metadata, BlobAccessConditions, AccessTierType, BlobTagsMap, ClientProvidedKeyOptions) (*BlockBlobCommitBlockListResponse, error)
}
// copyFromReader copies a source io.Reader to blob storage using concurrent uploads.<|fim▁hole|>// well, 4 MiB or 8 MiB, and autoscale to as many goroutines within the memory limit. This gives a single dial to tweak and we can
// choose a max value for the memory setting based on internal transfers within Azure (which will give us the maximum throughput model).
// We can even provide a utility to dial this number in for customer networks to optimize their copies.
func copyFromReader(ctx context.Context, from io.Reader, to blockWriter, o UploadStreamToBlockBlobOptions) (*BlockBlobCommitBlockListResponse, error) {
if err := o.defaults(); err != nil {
return nil, err
}
ctx, cancel := context.WithCancel(ctx)
defer cancel()
cp := &copier{
ctx: ctx,
cancel: cancel,
reader: from,
to: to,
id: newID(),
o: o,
errCh: make(chan error, 1),
}
// Send all our chunks until we get an error.
var err error
for {
if err = cp.sendChunk(); err != nil {
break
}
}
// If the error is not EOF, then we have a problem.
if err != nil && !errors.Is(err, io.EOF) {
_ = cp.waitForFinish()
return nil, err
}
// Close out our upload.
if err := cp.close(); err != nil {
return nil, err
}
return cp.result, nil
}
// copier streams a file via chunks in parallel from a reader representing a file.
// Do not use directly, instead use copyFromReader().
type copier struct {
// ctx holds the context of a copier. This is normally a faux pas to store a Context in a struct. In this case,
// the copier has the lifetime of a function call, so its fine.
ctx context.Context
cancel context.CancelFunc
// o contains our options for uploading.
o UploadStreamToBlockBlobOptions
// id provides the ids for each chunk.
id *id
// reader is the source to be written to storage.
reader io.Reader
// to is the location we are writing our chunks to.
to blockWriter
// errCh is used to hold the first error from our concurrent writers.
errCh chan error
// wg provides a count of how many writers we are waiting to finish.
wg sync.WaitGroup
// result holds the final result from blob storage after we have submitted all chunks.
result *BlockBlobCommitBlockListResponse
}
type copierChunk struct {
buffer []byte
id string
}
// getErr returns an error by priority. First, if a function set an error, it returns that error. Next, if the Context has an error
// it returns that error. Otherwise it is nil. getErr supports only returning an error once per copier.
func (c *copier) getErr() error {
select {
case err := <-c.errCh:
return err
default:
}
return c.ctx.Err()
}
// sendChunk reads data from out internal reader, creates a chunk, and sends it to be written via a channel.
// sendChunk returns io.EOF when the reader returns an io.EOF or io.ErrUnexpectedEOF.
func (c *copier) sendChunk() error {
if err := c.getErr(); err != nil {
return err
}
buffer := c.o.TransferManager.Get()
if len(buffer) == 0 {
return fmt.Errorf("TransferManager returned a 0 size buffer, this is a bug in the manager")
}
n, err := io.ReadFull(c.reader, buffer)
switch {
case err == nil && n == 0:
return nil
case err == nil:
id := c.id.next()
c.wg.Add(1)
c.o.TransferManager.Run(
func() {
defer c.wg.Done()
c.write(copierChunk{buffer: buffer[0:n], id: id})
},
)
return nil
case err != nil && (err == io.EOF || err == io.ErrUnexpectedEOF) && n == 0:
return io.EOF
}
if err == io.EOF || err == io.ErrUnexpectedEOF {
id := c.id.next()
c.wg.Add(1)
c.o.TransferManager.Run(
func() {
defer c.wg.Done()
c.write(copierChunk{buffer: buffer[0:n], id: id})
},
)
return io.EOF
}
if err := c.getErr(); err != nil {
return err
}
return err
}
// write uploads a chunk to blob storage.
func (c *copier) write(chunk copierChunk) {
defer c.o.TransferManager.Put(chunk.buffer)
if err := c.ctx.Err(); err != nil {
return
}
_, err := c.to.StageBlock(c.ctx, chunk.id, bytes.NewReader(chunk.buffer), c.o.AccessConditions.LeaseAccessConditions, nil, c.o.ClientProvidedKeyOptions)
if err != nil {
c.errCh <- fmt.Errorf("write error: %w", err)
return
}
}
// close commits our blocks to blob storage and closes our writer.
func (c *copier) close() error {
if err := c.waitForFinish(); err != nil {
return err
}
var err error
c.result, err = c.to.CommitBlockList(c.ctx, c.id.issued(), c.o.BlobHTTPHeaders, c.o.Metadata, c.o.AccessConditions, c.o.BlobAccessTier, c.o.BlobTagsMap, c.o.ClientProvidedKeyOptions)
return err
}
// waitForFinish waits for all writes to complete while combining errors from errCh
func (c *copier) waitForFinish() error {
var err error
done := make(chan struct{})
go func() {
// when write latencies are long, several errors might have occurred
// drain them all as we wait for writes to complete.
err = c.drainErrs(done)
}()
c.wg.Wait()
close(done)
return err
}
// drainErrs drains all outstanding errors from writes
func (c *copier) drainErrs(done chan struct{}) error {
var err error
for {
select {
case <-done:
return err
default:
if writeErr := c.getErr(); writeErr != nil {
err = combineErrs(err, writeErr)
}
}
}
}
// combineErrs combines err with newErr so multiple errors can be represented
func combineErrs(err, newErr error) error {
if err == nil {
return newErr
}
return fmt.Errorf("%s, %w", err.Error(), newErr)
}
// id allows the creation of unique IDs based on UUID4 + an int32. This auto-increments.
type id struct {
u [64]byte
num uint32
all []string
}
// newID constructs a new id.
func newID() *id {
uu := guuid.New()
u := [64]byte{}
copy(u[:], uu[:])
return &id{u: u}
}
// next returns the next ID.
func (id *id) next() string {
defer atomic.AddUint32(&id.num, 1)
binary.BigEndian.PutUint32((id.u[len(guuid.UUID{}):]), atomic.LoadUint32(&id.num))
str := base64.StdEncoding.EncodeToString(id.u[:])
id.all = append(id.all, str)
return str
}
// issued returns all ids that have been issued. This returned value shares the internal slice so it is not safe to modify the return.
// The value is only valid until the next time next() is called.
func (id *id) issued() []string {
return id.all
}<|fim▁end|> | // TODO(someone): The existing model provides a buffer size and buffer limit as limiting factors. The buffer size is probably
// useless other than needing to be above some number, as the network stack is going to hack up the buffer over some size. The
// max buffers is providing a cap on how much memory we use (by multiplying it times the buffer size) and how many go routines can upload
// at a time. I think having a single max memory dial would be more efficient. We can choose an internal buffer size that works |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/*
* Copyright 2016 e-UCM (http://www.e-ucm.es/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* This project has received funding from the European Union’s Horizon
* 2020 research and innovation programme under grant agreement No 644187.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0 (link is external)
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// Declare app level module which depends on filters, and services
angular.module('myApp', [
'ngRoute', 'toolbarApp', 'signupApp', 'loginApp', 'loginPluginApp', 'classApp', 'participantsApp', 'classesApp', 'activitiesApp',
'activityApp', 'gameApp', 'analysisApp', 'kibanaApp', 'gamesApp', 'activityApp', 'analyticsApp', 'devVisualizatorApp',
'services', 'xeditable', 'env-vars', 'ui.router', 'blockUI'
]).run(function (editableOptions, $localStorage, $cookies) {
editableOptions.theme = 'bs3';
if ($localStorage.user) {
$cookies.put('rageUserCookie', $localStorage.user.token, {
path: '/'
});
}
}).filter('prettyDateId', function () {
return function (_id) {
if (_id) {
return $.format.prettyDate(new Date(parseInt(_id.slice(0, 8), 16) * 1000));
}
};
}).filter('prettyDate', function () {
return function (date) {
if (date) {
return $.format.prettyDate(new Date(date));
}
};
}).filter('list', function () {
return function (list) {
if (!list || list.length === 0) {
return 'Empty list';
}
var result = '';
list.forEach(function (v) {
result += v + ', ';
});
return result;
};
}).filter('object2array', function () {
return function (input) {
var out = [];
for (var i in input) {
out.push(input[i]);
}
return out;
};
}).factory('httpRequestInterceptor', ['$localStorage',
function ($localStorage) {
return {
request: function (config) {
config.headers.Accept = 'application/json';
if ($localStorage.user) {
config.headers.Authorization = 'Bearer ' + $localStorage.user.token;
}
return config;
}
};
}
]).config(['$routeProvider', '$httpProvider', '$locationProvider', '$stateProvider', 'blockUIConfig',
function ($routeProvider, $httpProvider, $locationProvider, $stateProvider, blockUIConfig) {
$httpProvider.interceptors.push('httpRequestInterceptor');
$locationProvider.html5Mode({enabled: true, requireBase: false});
$stateProvider.state({
name: 'default',
url: '/',
templateUrl: 'view/home'
});
$stateProvider.state({
name: 'home',
url: '/home',
templateUrl: 'view/home'
});
$stateProvider.state({
name: 'login',
url: '/login',
templateUrl: 'view/login'
});
$stateProvider.state({
name: 'signup',
url: '/signup',
templateUrl: 'view/signup'
});
$stateProvider.state({
name: 'class',
url: '/class',
templateUrl: 'view/classactivity'
});
$stateProvider.state({
name: 'data',
url: '/data',
templateUrl: 'view/data'
});
$stateProvider.state({
name: 'game',
url: '/game',
templateUrl: 'view/gameactivity'
});
blockUIConfig.autoBlock = false;
blockUIConfig.message = 'Please wait...';
}
]).controller('AppCtrl', ['$rootScope', '$scope', '$location', '$http', '$timeout', '$localStorage', '$window',
'Games', 'Classes', 'Activities', 'Versions', 'Analysis', 'Role', 'CONSTANTS', 'QueryParams',
function ($rootScope, $scope, $location, $http, $timeout, $localStorage,
$window, Games, Classes, Activities, Versions, Analysis, Role, CONSTANTS, QueryParams) {
$scope.$storage = $localStorage;
$scope.DOCS = CONSTANTS.DOCS;
// Role determination
$scope.isUser = function () {
return Role.isUser();
};
$scope.isAdmin = function () {
return Role.isAdmin();
};
$scope.isStudent = function () {
return Role.isStudent();
};
$scope.isTeacher = function () {
return Role.isTeacher();
};
$scope.isOfflineActivity = function () {
return $scope.isOfflineActivityParam($scope.selectedActivity);
};
$scope.isOnlineActivity = function () {
return $scope.isOnlineActivityParam($scope.selectedActivity);
};
$scope.isOfflineActivityParam = function (activity) {
return activity && activity.offline;
};
$scope.isOnlineActivityParam = function (activity) {
return activity && !activity.offline;
};
$scope.isDeveloper = function () {
return Role.isDeveloper();
};
$scope.goToClass = function(c) {
$scope.$emit('selectClass', { class: c});
};
$scope.goToGame = function(game) {
$scope.$emit('selectGame', { game: game});
};
$scope.goToActivity = function(activity) {
$scope.$emit('selectActivity', { activity: activity});
};
var checkLogin = function() {
$scope.username = $scope.isUser() ? $scope.$storage.user.username : '';
};
checkLogin();
$scope.$on('login', checkLogin);
$scope.href = function (href) {
$window.location.href = href;
};
$scope.logout = function () {
$http.delete(CONSTANTS.APIPATH + '/logout').success(function () {
delete $scope.$storage.user;
$timeout(function () {
$location.url('login');
}, 50);
}).error(function (data, status) {
delete $scope.$storage.user;
console.error('Error on get /logout ' + JSON.stringify(data) + ', status: ' + status);
});
};
$scope.testIndex = 'default';
$scope.statementSubmitted = false;
$scope.submitStatementsFile = function () {
$scope.loadingDashboard = true;
$scope.statementsFile.contents = JSON.parse($scope.statementsFile.contents);
if ($scope.statementsFile.contents) {
$http.post(CONSTANTS.PROXY + '/activities/test/' + $scope.selectedGame._id, $scope.statementsFile.contents)
.success(function (data) {
$scope.testIndex = data.id;
$scope.statementSubmitted = true;
$scope.generateTestVisualization();
$scope.loadingDashboard = false;
}).error(function (data, status) {
$scope.statementSubmitted = true;
$scope.generateTestVisualization();
console.error('Error on post /activities/test/' + $scope.selectedGame._id + ' ' + JSON.stringify(data) + ', status: ' + status);
$scope.loadingDashboard = false;
});
}
};
if (!$scope.selectedConfigView) {
$scope.selectedConfigView = 'stormAnalysis';
}
$scope.getActiveClass = function (id) {
if (id === $scope.selectedConfigView) {
return 'active';
}
return null;
};
$scope.templateButtonMsg = function (opened) {
if (opened) {
return 'Hide default JSON';
}
return 'Show JSON';
};<|fim▁hole|> if (params.game) {
$scope.selectedGame = params.game;
Versions.forGame({gameId: params.game._id}).$promise.then(function(versions) {
$scope.selectedVersion = versions[0];
if (Role.isDeveloper()) {
$location.url('data');
} else {
$location.url('game');
}
$location.search('game', params.game._id);
$location.search('version', $scope.selectedVersion._id);
});
}
});
$scope.$on('selectClass', function (event, params) {
if (params.class) {
$scope.selectedClass = params.class;
$location.url('class');
$location.search('class', params.class._id);
}
});
$scope.$on('selectActivity', function (event, params) {
if (params.activity) {
$scope.selectedActivity = params.activity;
$scope.selectedClass = Classes.get({classId: params.activity.classId});
$scope.selectedVersion = Versions.get({gameId: gameId, versionId: params.activity.versionId});
$scope.selectedGame = Games.get({gameId: params.activity.gameId});
$location.url('data');
$location.search('activity', params.activity._id);
}
});
$scope.developer = {
name: ''
};
// Load
if ($scope.isUser()) {
var gameId = QueryParams.getQueryParam('game');
if (gameId) {
$scope.selectedGame = Games.get({gameId: gameId});
}
var versionId = QueryParams.getQueryParam('version');
if (gameId && versionId) {
$scope.selectedVersion = Versions.get({gameId: gameId, versionId: versionId});
}
var classId = QueryParams.getQueryParam('class');
if (classId) {
$scope.selectedClass = Classes.get({classId: classId});
}
var activityId = QueryParams.getQueryParam('activity');
if (activityId) {
Activities.get({activityId: activityId}).$promise.then(function(activity) {
$scope.selectedActivity = activity;
$scope.selectedClass = Classes.get({classId: activity.classId});
$scope.selectedVersion = Versions.get({gameId: gameId, versionId: activity.versionId});
$scope.selectedGame = Games.get({gameId: activity.gameId});
});
}
} else if (!$window.location.pathname.endsWith('loginbyplugin')) {
$location.url('login');
}
}
]);<|fim▁end|> |
$scope.$on('selectGame', function (event, params) { |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>var app = angular.module('AtWork', [
'atwork.system',
'atwork.users',
'atwork.posts',
'atwork.streams',
'atwork.chats',
'atwork.activities',
'atwork.notifications',
'atwork.settings',
'ngMaterial']);
app.controller('AppCtrl', [
'$scope',
'$route',
'$rootScope',
'$mdSidenav',
'$mdBottomSheet',
'$location',
'$timeout',
'appLocation',
'appAuth',
'appWebSocket',
'appSettings',
'appSettingsValid',
'appToast',
function($scope, $route, $rootScope, $mdSidenav, $mdBottomSheet, $location, $timeout, appLocation, appAuth, appWebSocket, appSettings, appSettingsValid, appToast) {
$scope.barTitle = '';
$scope.search = '';
$scope.toggleSidenav = function(menuId) {
$mdSidenav(menuId).toggle();<|fim▁hole|>
$scope.updateLoginStatus = function() {
$scope.isLoggedIn = appAuth.isLoggedIn();
$scope.user = appAuth.getUser();
};
$scope.goHome = function() {
appLocation.url('/');
};
$scope.showUserActions = function($event) {
$mdBottomSheet.show({
templateUrl: '/modules/users/views/user-list.html',
controller: 'UserSheet',
targetEvent: $event
}).then(function(clickedItem) {
$scope.alert = clickedItem.name + ' clicked!';
});
};
var initiateSettings = function(cb) {
appSettings.fetch(function(settings) {
$rootScope.systemSettings = settings;
if (cb) {
cb();
}
});
};
/**
* Scroll the view to top on route change
*/
$scope.$on('$routeChangeSuccess', function() {
angular.element('*[md-scroll-y]').animate({scrollTop: 0}, 300);
$mdSidenav('left').close();
});
$scope.$on('loggedIn', function() {
$scope.updateLoginStatus();
$scope.barTitle = '';
$scope.$broadcast('updateNotifications');
appWebSocket.conn.emit('online', {token: appAuth.getToken()});
appAuth.refreshUser(function(user) {
$scope.user = user;
});
/**
* Fetch settings and get the app ready
*/
initiateSettings(function() {
$scope.$on('$routeChangeStart', function (event, toState) {
var valid = appSettingsValid();
if (!valid) {
appToast('Please complete the setup first.');
}
});
$scope.appReady = true;
$scope.barTitle = $rootScope.systemSettings.tagline;
$timeout(appSettingsValid);
});
});
$scope.$on('loggedOut', function() {
$scope.updateLoginStatus();
appWebSocket.conn.emit('logout', {token: appAuth.getToken()});
});
appWebSocket.conn.on('connect', function() {
if (appAuth.isLoggedIn()) {
appWebSocket.conn.emit('online', {token: appAuth.getToken()});
}
});
$scope.updateLoginStatus();
$timeout(function() {
if (!appAuth.isLoggedIn()) {
if (window.location.href.indexOf('/activate/') == -1 && window.location.href.indexOf('/changePassword/') == -1) {
appLocation.url('/login');
}
initiateSettings();
$scope.appReady = true;
} else {
$scope.barTitle = '';
$scope.$broadcast('loggedIn');
}
});
}
]);<|fim▁end|> | }; |
<|file_name|>cmp.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
// macro_rules! e {<|fim▁hole|> // ($(
// $Tuple:ident {
// $(($idx:tt) -> $T:ident)+
// }
// )+) => {
// $(
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:Clone),+> Clone for ($($T,)+) {
// fn clone(&self) -> ($($T,)+) {
// ($(e!(self.$idx.clone()),)+)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:PartialEq),+> PartialEq for ($($T,)+) {
// #[inline]
// fn eq(&self, other: &($($T,)+)) -> bool {
// e!($(self.$idx == other.$idx)&&+)
// }
// #[inline]
// fn ne(&self, other: &($($T,)+)) -> bool {
// e!($(self.$idx != other.$idx)||+)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:Eq),+> Eq for ($($T,)+) {}
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:PartialOrd + PartialEq),+> PartialOrd for ($($T,)+) {
// #[inline]
// fn partial_cmp(&self, other: &($($T,)+)) -> Option<Ordering> {
// lexical_partial_cmp!($(self.$idx, other.$idx),+)
// }
// #[inline]
// fn lt(&self, other: &($($T,)+)) -> bool {
// lexical_ord!(lt, $(self.$idx, other.$idx),+)
// }
// #[inline]
// fn le(&self, other: &($($T,)+)) -> bool {
// lexical_ord!(le, $(self.$idx, other.$idx),+)
// }
// #[inline]
// fn ge(&self, other: &($($T,)+)) -> bool {
// lexical_ord!(ge, $(self.$idx, other.$idx),+)
// }
// #[inline]
// fn gt(&self, other: &($($T,)+)) -> bool {
// lexical_ord!(gt, $(self.$idx, other.$idx),+)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:Ord),+> Ord for ($($T,)+) {
// #[inline]
// fn cmp(&self, other: &($($T,)+)) -> Ordering {
// lexical_cmp!($(self.$idx, other.$idx),+)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<$($T:Default),+> Default for ($($T,)+) {
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// fn default() -> ($($T,)+) {
// ($({ let x: $T = Default::default(); x},)+)
// }
// }
// )+
// }
// }
// // Constructs an expression that performs a lexical ordering using method $rel.
// // The values are interleaved, so the macro invocation for
// // `(a1, a2, a3) < (b1, b2, b3)` would be `lexical_ord!(lt, a1, b1, a2, b2,
// // a3, b3)` (and similarly for `lexical_cmp`)
// macro_rules! lexical_ord {
// ($rel: ident, $a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => {
// if $a != $b { lexical_ord!($rel, $a, $b) }
// else { lexical_ord!($rel, $($rest_a, $rest_b),+) }
// };
// ($rel: ident, $a:expr, $b:expr) => { ($a) . $rel (& $b) };
// }
// macro_rules! lexical_partial_cmp {
// ($a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => {
// match ($a).partial_cmp(&$b) {
// Some(Equal) => lexical_partial_cmp!($($rest_a, $rest_b),+),
// ordering => ordering
// }
// };
// ($a:expr, $b:expr) => { ($a).partial_cmp(&$b) };
// }
// macro_rules! lexical_cmp {
// ($a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => {
// match ($a).cmp(&$b) {
// Equal => lexical_cmp!($($rest_a, $rest_b),+),
// ordering => ordering
// }
// };
// ($a:expr, $b:expr) => { ($a).cmp(&$b) };
// }
// tuple_impls! {
// Tuple1 {
// (0) -> A
// }
// Tuple2 {
// (0) -> A
// (1) -> B
// }
// Tuple3 {
// (0) -> A
// (1) -> B
// (2) -> C
// }
// Tuple4 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// }
// Tuple5 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// }
// Tuple6 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// }
// Tuple7 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// }
// Tuple8 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// (7) -> H
// }
// Tuple9 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// (7) -> H
// (8) -> I
// }
// Tuple10 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// (7) -> H
// (8) -> I
// (9) -> J
// }
// Tuple11 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// (7) -> H
// (8) -> I
// (9) -> J
// (10) -> K
// }
// Tuple12 {
// (0) -> A
// (1) -> B
// (2) -> C
// (3) -> D
// (4) -> E
// (5) -> F
// (6) -> G
// (7) -> H
// (8) -> I
// (9) -> J
// (10) -> K
// (11) -> L
// }
// }
#[cfg(test)]
mod tests {
use core::cmp::Ordering::{self, Less, Equal, Greater};
macro_rules! cmp_test {
(
$($T:ident)+
) => (
{
let left: ($($T,)+) = ($($T::default(),)+);
let right: ($($T,)+) = ($($T::default() + 1 as $T,)+);
let result: Ordering = left.cmp(&right);
assert_eq!(result, Less);
}
{
let left: ($($T,)+) = ($($T::default(),)+);
let right: ($($T,)+) = ($($T::default(),)+);
let result: Ordering = left.cmp(&right);
assert_eq!(result, Equal);
}
{
let left: ($($T,)+) = ($($T::default() + 1 as $T,)+);
let right: ($($T,)+) = ($($T::default(),)+);
let result: Ordering = left.cmp(&right);
assert_eq!(result, Greater);
}
)
}
type A = u8;
type B = u16;
type C = u32;
type D = u64;
#[test]
fn cmp_test1() {
cmp_test! { A B C D };
}
}<|fim▁end|> | // ($e:expr) => { $e }
// }
// macro_rules! tuple_impls { |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![crate_name = "mockstream"]
#![crate_type = "lib"]
//! A reader/writer streams to mock real streams in tests.
use std::cell::RefCell;
use std::io::{Cursor, Error, ErrorKind, Read, Result, Write};
use std::mem::swap;
use std::rc::Rc;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Mutex};
use std::thread::sleep;
use std::time;
#[cfg(test)]
mod tests;
fn find_subsequence<T>(haystack: &[T], needle: &[T]) -> Option<usize>
where
for<'a> &'a [T]: PartialEq,
{
haystack
.windows(needle.len())
.position(|window| window == needle)
}
/// MockStream is Read+Write stream that stores the data written and provides the data to be read.
#[derive(Clone)]
pub struct MockStream {
reader: Cursor<Vec<u8>>,
writer: Cursor<Vec<u8>>,
}
impl Default for MockStream {
fn default() -> Self {
MockStream::new()
}
}
fn new_cursor() -> Cursor<Vec<u8>> {
Cursor::new(Vec::new())
}
impl MockStream {
/// Create new empty stream
pub fn new() -> MockStream {
MockStream {
reader: new_cursor(),
writer: new_cursor(),
}
}
/// Extract all bytes written by Write trait calls.
pub fn peek_bytes_written(&mut self) -> &Vec<u8> {
self.writer.get_ref()
}
/// Extract all bytes written by Write trait calls.
pub fn pop_bytes_written(&mut self) -> Vec<u8> {
let mut result = Vec::new();
swap(&mut result, self.writer.get_mut());
self.writer.set_position(0);
result
}
/// Provide data to be read by Read trait calls.
pub fn push_bytes_to_read(&mut self, bytes: &[u8]) {
let avail = self.reader.get_ref().len();
if self.reader.position() == avail as u64 {
self.reader = new_cursor();
}
self.reader.get_mut().extend(bytes.iter().copied());
}
}
impl Read for MockStream {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
self.reader.read(buf)
}
}
impl Write for MockStream {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
self.writer.write(buf)
}
fn flush(&mut self) -> Result<()> {
self.writer.flush()
}
}
/// Reference-counted stream.
#[derive(Clone, Default)]
pub struct SharedMockStream {
pimpl: Rc<RefCell<MockStream>>,
}
impl SharedMockStream {
/// Create empty stream
pub fn new() -> SharedMockStream {
SharedMockStream::default()
}
/// Extract all bytes written by Write trait calls.
pub fn push_bytes_to_read(&mut self, bytes: &[u8]) {
self.pimpl.borrow_mut().push_bytes_to_read(bytes)
}
/// Provide data to be read by Read trait calls.
pub fn pop_bytes_written(&mut self) -> Vec<u8> {
self.pimpl.borrow_mut().pop_bytes_written()
}
}
impl Read for SharedMockStream {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
self.pimpl.borrow_mut().read(buf)
}
}
impl Write for SharedMockStream {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
self.pimpl.borrow_mut().write(buf)
}
fn flush(&mut self) -> Result<()> {
self.pimpl.borrow_mut().flush()
}
}
/// Thread-safe stream.
#[derive(Clone, Default)]
pub struct SyncMockStream {
pimpl: Arc<Mutex<MockStream>>,
pub waiting_for_write: Arc<AtomicBool>,
pub expected_bytes: Vec<u8>,
}<|fim▁hole|>
impl SyncMockStream {
/// Create empty stream
pub fn new() -> SyncMockStream {
SyncMockStream::default()
}
/// Block reads until expected bytes are written.
pub fn wait_for(&mut self, expected_bytes: &[u8]) {
self.expected_bytes = expected_bytes.to_vec();
self.waiting_for_write.store(true, Ordering::Relaxed);
}
/// Extract all bytes written by Write trait calls.
pub fn push_bytes_to_read(&mut self, bytes: &[u8]) {
self.pimpl.lock().unwrap().push_bytes_to_read(bytes)
}
/// Provide data to be read by Read trait calls.
pub fn pop_bytes_written(&mut self) -> Vec<u8> {
self.pimpl.lock().unwrap().pop_bytes_written()
}
}
impl Read for SyncMockStream {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
while self.waiting_for_write.load(Ordering::Relaxed) {
sleep(time::Duration::from_millis(10));
}
self.pimpl.lock().unwrap().read(buf)
}
}
impl Write for SyncMockStream {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
let mut x = self.pimpl.lock().unwrap();
match x.write(buf) {
Ok(rv) => {
if self.waiting_for_write.load(Ordering::Relaxed)
&& find_subsequence(x.peek_bytes_written(), &self.expected_bytes).is_some()
{
self.waiting_for_write.store(false, Ordering::Relaxed);
}
Ok(rv)
}
Err(rv) => Err(rv),
}
}
fn flush(&mut self) -> Result<()> {
self.pimpl.lock().unwrap().flush()
}
}
/// `FailingMockStream` mocks a stream which will fail upon read or write
///
/// # Examples
///
/// ```
/// use std::io::{Cursor, Read};
///
/// struct CountIo {}
///
/// impl CountIo {
/// fn read_data(&self, r: &mut Read) -> usize {
/// let mut count: usize = 0;
/// let mut retries = 3;
///
/// loop {
/// let mut buffer = [0; 5];
/// match r.read(&mut buffer) {
/// Err(_) => {
/// if retries == 0 { break; }
/// retries -= 1;
/// },
/// Ok(0) => break,
/// Ok(n) => count += n,
/// }
/// }
/// count
/// }
/// }
///
/// #[test]
/// fn test_io_retries() {
/// let mut c = Cursor::new(&b"1234"[..])
/// .chain(FailingMockStream::new(ErrorKind::Other, "Failing", 3))
/// .chain(Cursor::new(&b"5678"[..]));
///
/// let sut = CountIo {};
/// // this will fail unless read_data performs at least 3 retries on I/O errors
/// assert_eq!(8, sut.read_data(&mut c));
/// }
/// ```
#[derive(Clone)]
pub struct FailingMockStream {
kind: ErrorKind,
message: &'static str,
repeat_count: i32,
}
impl FailingMockStream {
/// Creates a FailingMockStream
///
/// When `read` or `write` is called, it will return an error `repeat_count` times.
/// `kind` and `message` can be specified to define the exact error.
pub fn new(kind: ErrorKind, message: &'static str, repeat_count: i32) -> FailingMockStream {
FailingMockStream {
kind,
message,
repeat_count,
}
}
fn error(&mut self) -> Result<usize> {
if self.repeat_count == 0 {
Ok(0)
} else {
if self.repeat_count > 0 {
self.repeat_count -= 1;
}
Err(Error::new(self.kind, self.message))
}
}
}
impl Read for FailingMockStream {
fn read(&mut self, _: &mut [u8]) -> Result<usize> {
self.error()
}
}
impl Write for FailingMockStream {
fn write(&mut self, _: &[u8]) -> Result<usize> {
self.error()
}
fn flush(&mut self) -> Result<()> {
Ok(())
}
}<|fim▁end|> | |
<|file_name|>select_node.go<|end_file_name|><|fim▁begin|>/*
Copyright 2018 The OpenEBS Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1alpha2
import (
ndmapis "github.com/openebs/maya/pkg/apis/openebs.io/ndm/v1alpha1"
apis "github.com/openebs/maya/pkg/apis/openebs.io/v1alpha1"
bd "github.com/openebs/maya/pkg/blockdevice/v1alpha2"
bdc "github.com/openebs/maya/pkg/blockdeviceclaim/v1alpha1"
cspc "github.com/openebs/maya/pkg/cstor/poolcluster/v1alpha1"
csp "github.com/openebs/maya/pkg/cstor/poolinstance/v1alpha3"
nodeapis "github.com/openebs/maya/pkg/kubernetes/node/v1alpha1"
"github.com/openebs/maya/pkg/volume"
"github.com/pkg/errors"
k8serror "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/klog"
)
// SelectNode returns a node where pool should be created.
func (ac *Config) SelectNode() (*apis.PoolSpec, string, error) {
usedNodes, err := ac.GetUsedNode()
if err != nil {
return nil, "", errors.Wrapf(err, "could not get used nodes list for pool creation")
}
for _, pool := range ac.CSPC.Spec.Pools {
// pin it
pool := pool
nodeName, err := GetNodeFromLabelSelector(pool.NodeSelector)
if err != nil || nodeName == "" {
klog.Errorf("could not use node for selectors {%v}", pool.NodeSelector)
continue
}
if ac.VisitedNodes[nodeName] {
continue
} else {
ac.VisitedNodes[nodeName] = true
if !usedNodes[nodeName] {
return &pool, nodeName, nil
}
}
}
return nil, "", errors.New("no node qualified for pool creation")
}
// GetNodeFromLabelSelector returns the node name selected by provided labels
// TODO : Move it to node package
func GetNodeFromLabelSelector(labels map[string]string) (string, error) {
nodeList, err := nodeapis.NewKubeClient().List(metav1.ListOptions{LabelSelector: getLabelSelectorString(labels)})
if err != nil {
return "", errors.Wrap(err, "failed to get node list from the node selector")
}
if len(nodeList.Items) != 1 {
return "", errors.Errorf("invalid no.of nodes %d from the given node selectors", len(nodeList.Items))
}
return nodeList.Items[0].Name, nil
}
// getLabelSelectorString returns a string of label selector form label map to be used in
// list options.
// TODO : Move it to node package
func getLabelSelectorString(selector map[string]string) string {
var selectorString string
for key, value := range selector {
selectorString = selectorString + key + "=" + value + ","
}
selectorString = selectorString[:len(selectorString)-len(",")]
return selectorString
}
// GetUsedNode returns a map of node for which pool has already been created.
// Note : Filter function is not used from node builder package as it needs
// CSP builder package which cam cause import loops.
func (ac *Config) GetUsedNode() (map[string]bool, error) {
usedNode := make(map[string]bool)
cspList, err := csp.
NewKubeClient().
WithNamespace(ac.Namespace).
List(
metav1.
ListOptions{LabelSelector: string(apis.CStorPoolClusterCPK) + "=" + ac.CSPC.Name},
)
if err != nil {
return nil, errors.Wrap(err, "could not list already created csp(s)")
}
for _, cspObj := range cspList.Items {
usedNode[cspObj.Labels[string(apis.HostNameCPK)]] = true
}
return usedNode, nil
}
// GetBDListForNode returns a list of BD from the pool spec.
// TODO : Move it to CStorPoolCluster packgage
func (ac *Config) GetBDListForNode(pool *apis.PoolSpec) []string {
var BDList []string
for _, group := range pool.RaidGroups {
for _, bd := range group.BlockDevices {
BDList = append(BDList, bd.BlockDeviceName)
}
}<|fim▁hole|> return BDList
}
// ClaimBDsForNode claims a given BlockDevice for node
// If the block device(s) is/are already claimed for any other CSPC it returns error.
// If the block device(s) is/are already calimed for the same CSPC -- it is left as it is and can be used for
// pool provisioning.
// If the block device(s) is/are unclaimed, then those are claimed.
func (ac *Config) ClaimBDsForNode(BD []string) error {
pendingClaim := 0
for _, bdName := range BD {
bdAPIObj, err := bd.NewKubeClient().WithNamespace(ac.Namespace).Get(bdName, metav1.GetOptions{})
if err != nil {
return errors.Wrapf(err, "error in getting details for BD {%s} whether it is claimed", bdName)
}
if bd.BuilderForAPIObject(bdAPIObj).BlockDevice.IsClaimed() {
IsClaimedBDUsable, errBD := ac.IsClaimedBDUsable(bdAPIObj)
if errBD != nil {
return errors.Wrapf(err, "error in getting details for BD {%s} for usability", bdName)
}
if !IsClaimedBDUsable {
return errors.Errorf("BD {%s} already in use", bdName)
}
continue
}
err = ac.ClaimBD(bdAPIObj)
if err != nil {
return errors.Wrapf(err, "Failed to claim BD {%s}", bdName)
}
pendingClaim++
}
if pendingClaim > 0 {
return errors.Errorf("%d block device claims are pending", pendingClaim)
}
return nil
}
// ClaimBD claims a given BlockDevice
func (ac *Config) ClaimBD(bdObj *ndmapis.BlockDevice) error {
newBDCObj, err := bdc.NewBuilder().
WithName("bdc-cstor-" + string(bdObj.UID)).
WithNamespace(ac.Namespace).
WithLabels(map[string]string{string(apis.CStorPoolClusterCPK): ac.CSPC.Name}).
WithBlockDeviceName(bdObj.Name).
WithHostName(bdObj.Labels[string(apis.HostNameCPK)]).
WithCapacity(volume.ByteCount(bdObj.Spec.Capacity.Storage)).
WithCSPCOwnerReference(ac.CSPC).
WithFinalizer(cspc.CSPCFinalizer).
Build()
if err != nil {
return errors.Wrapf(err, "failed to build block device claim for bd {%s}", bdObj.Name)
}
_, err = bdc.NewKubeClient().WithNamespace(ac.Namespace).Create(newBDCObj.Object)
if k8serror.IsAlreadyExists(err) {
klog.Infof("BDC for BD {%s} already created", bdObj.Name)
return nil
}
if err != nil {
return errors.Wrapf(err, "failed to create block device claim for bd {%s}", bdObj.Name)
}
return nil
}
// IsClaimedBDUsable returns true if the passed BD is already claimed and can be
// used for provisioning
func (ac *Config) IsClaimedBDUsable(bdAPIObj *ndmapis.BlockDevice) (bool, error) {
bdObj := bd.BuilderForAPIObject(bdAPIObj)
if bdObj.BlockDevice.IsClaimed() {
bdcName := bdObj.BlockDevice.Object.Spec.ClaimRef.Name
bdcAPIObject, err := bdc.NewKubeClient().WithNamespace(ac.Namespace).Get(bdcName, metav1.GetOptions{})
if err != nil {
return false, errors.Wrapf(err, "could not get block device claim for block device {%s}", bdAPIObj.Name)
}
bdcObj := bdc.BuilderForAPIObject(bdcAPIObject)
if bdcObj.BDC.HasLabel(string(apis.CStorPoolClusterCPK), ac.CSPC.Name) {
return true, nil
}
} else {
return false, errors.Errorf("block device {%s} is not claimed", bdAPIObj.Name)
}
return false, nil
}
// ValidatePoolSpec validates the pool spec.
// TODO: Fix following function -- (Current is mock only )
func ValidatePoolSpec(pool *apis.PoolSpec) bool {
return true
}<|fim▁end|> | |
<|file_name|>object.py<|end_file_name|><|fim▁begin|>"""
Copyright (C) 2014 Maruf Maniruzzaman
Website: http://cosmosframework.com<|fim▁hole|><|fim▁end|> | Author: Maruf Maniruzzaman
License :: OSI Approved :: MIT License
""" |
<|file_name|>func.js<|end_file_name|><|fim▁begin|>function timenow(){
var timenow1 = Date.getHours();<|fim▁hole|>}<|fim▁end|> | return timenow1; |
<|file_name|>api-functions.js<|end_file_name|><|fim▁begin|>const request = require('request-promise');
const oauth = require('./config').auth;
const rootUrl = 'https://api.twitter.com/1.1';
let allItems = [];
<|fim▁hole|> /**
* Search for tweets
* @param options {Object} Options object containing:
* - text (Required) : String
* - count (optional) : Number
* - result_type (optional) : String
* - geocode (optional) : String (lat long radius_in_miles)
* - since_id (optional) : Number - start search from this ID
* - max_id (optional) : Number - end search on this ID
*/
search: (options) => {
return new Promise((resolve, reject) => {
const {text, count = 100, result_type = 'popular', since_id = 0, max_id, geocode} = options;
let params =
`?q=${encodeURIComponent(options.text)}` +
`&count=${count}` +
`&result_type=${result_type}` +
`&since_id=${since_id}`;
if (max_id) {
params += `&max_id=${max_id}`;
}
if (geocode) {
params += `&geocode=${encodeURIComponent(geocode)}`;
}
allItems = [];
API.searchByStringParam(params).then((items) => resolve(items)).catch((err) => reject(err));
});
},
/**
* Search w/ params
* @param stringParams {String} Params as string
*/
searchByStringParam: (stringParams) =>
new Promise((resolve, reject) => {
const searchCallback = (res) => {
const result = JSON.parse(res);
if (result && result.statuses) {
result.statuses.forEach(item => allItems.push(item));
console.log('[Search] So far we have', allItems.length, 'items');
// If we have the next_results, search again for the rest (sort of a pagination)
const nextRes = result.search_metadata.next_results;
if (nextRes) {
API.searchByStringParam(nextRes).then((items) => resolve(items));
} else {
resolve(allItems);
}
} else {
resolve(null);
}
};
request.get({url: `${rootUrl}/search/tweets.json${stringParams}`, oauth})
.then(res => searchCallback(res))
.catch(err => reject(err));
})
,
/**
* Retweet a tweet
* @param tweetId {String} identifier for the tweet
*/
retweet: (tweetId) =>
new Promise((resolve, reject) =>
request.post({url: `${rootUrl}/statuses/retweet/${tweetId}.json`, oauth})
.then((res) => resolve(res))
.catch((err) => reject(err))
)
,
/**
* Like (aka favorite) a tweet
* @param tweetId {String} identifier for the tweet
*/
like: (tweetId) =>
new Promise((resolve, reject) =>
request.post({url: `${rootUrl}/favorites/create.json?id=${tweetId}`, oauth})
.then((res) => resolve(res))
.catch((err) => reject(err))
)
,
/**
* Follow a user by username
* @param userId {String} identifier for the user
*/
follow: (userId) =>
new Promise((resolve, reject) =>
request.post({url: `${rootUrl}/friendships/create.json?user_id=${userId}`, oauth})
.then((res) => resolve(res))
.catch((err) => reject(err))
)
,
/**
* Follow a user by username
* @param userName {String} username identifier for the user
*/
followByUsername: (userName) =>
new Promise((resolve, reject) =>
request.post({url: `${rootUrl}/friendships/create.json?screen_name=${userName}`, oauth})
.then((res) => resolve(res))
.catch((err) => reject(err))
)
,
/**
* Block a user
* @param userId {String} ID of the user to block
*/
blockUser: (userId) =>
new Promise((resolve, reject) =>
request.post({url: `${rootUrl}/blocks/create.json?user_id=${userId}`, oauth})
.then((res) => resolve(res))
.catch((err) => reject(err))
)
,
/** Get list of blocked users for the current user */
getBlockedUsers: () =>
new Promise((resolve, reject) =>
request.get({url: `${rootUrl}/blocks/list.json`, oauth})
.then((res) => resolve(JSON.parse(res).users.map((user) => user.id)))
.catch((err) => reject(err))
)
,
/**
* Get a user's tweets
* @param userId {String} identifier for the user
* @param count {Number} max tweets to retrieve
*/
getTweetsForUser: (userId, count) =>
new Promise((resolve, reject) =>
request.get({url: `${rootUrl}/statuses/user_timeline.json?user_id=${userId}&count=${count}`, oauth})
.then((response) => resolve(response))
.catch((err) => reject(err))
)
,
/**
* Delete a tweet
* @param tweetId {String} identifier for the tweet
*/
deleteTweet: (tweetId) =>
new Promise((resolve, reject) =>
request.post({url: `${rootUrl}/statuses/destroy/${tweetId}.json`, oauth})
.then(() => {
console.log('Deleted tweet', tweetId);
resolve();
})
.catch((err) => reject(err))
)
,
/**
* Reply to a tweet
* (The Reply on Twitter is basically a Status Update containing @username, where username is author of the original tweet)
* @param tweet {Object} The full Tweet we want to reply to
*/
replyToTweet: (tweet) =>
new Promise((resolve, reject) => {
try {
const text = encodeURIComponent(`@${tweet.user.screen_name} `);
request.post({
url: `${rootUrl}/statuses/update.json?status=${text}&in_reply_to_status_id=${tweet.id}`,
oauth
})
.then(() => resolve())
.catch(err => reject(err))
} catch (err) {
reject(err);
}
})
};
module.exports = API;<|fim▁end|> | /* API methods */
const API = {
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>if (Meteor.isClient) {
// counter starts at 0
Template.main.helpers({
});
Template.main.events({<|fim▁hole|>if (Meteor.isServer) {
Meteor.startup(function () {
// code to run on server at startup
});
}<|fim▁end|> |
});
}
|
<|file_name|>gen_rst.py<|end_file_name|><|fim▁begin|>"""
Example generation for the scikit learn
Generate the rst files for the examples by iterating over the python
example files.
Files that generate images should start with 'plot'
"""
from __future__ import division, print_function
from time import time
import ast
import os
import re
import shutil
import traceback
import glob
import sys
import gzip
import posixpath
import subprocess
import warnings
from sklearn.externals import six
# Try Python 2 first, otherwise load from Python 3
try:
from StringIO import StringIO
import cPickle as pickle
import urllib2 as urllib
from urllib2 import HTTPError, URLError
except ImportError:
from io import StringIO
import pickle
import urllib.request
import urllib.error
import urllib.parse
from urllib.error import HTTPError, URLError
try:
# Python 2 built-in
execfile
except NameError:
def execfile(filename, global_vars=None, local_vars=None):
with open(filename, encoding='utf-8') as f:
code = compile(f.read(), filename, 'exec')
exec(code, global_vars, local_vars)
try:
basestring
except NameError:
basestring = str
import token
import tokenize
import numpy as np
try:
# make sure that the Agg backend is set before importing any
# matplotlib
import matplotlib
matplotlib.use('Agg')
except ImportError:
# this script can be imported by nosetest to find tests to run: we should not
# impose the matplotlib requirement in that case.
pass
from sklearn.externals import joblib
###############################################################################
# A tee object to redict streams to multiple outputs
class Tee(object):
def __init__(self, file1, file2):
self.file1 = file1
self.file2 = file2
def write(self, data):
self.file1.write(data)
self.file2.write(data)
def flush(self):
self.file1.flush()
self.file2.flush()
###############################################################################
# Documentation link resolver objects
def _get_data(url):
"""Helper function to get data over http or from a local file"""
if url.startswith('http://'):
# Try Python 2, use Python 3 on exception
try:
resp = urllib.urlopen(url)
encoding = resp.headers.dict.get('content-encoding', 'plain')
except AttributeError:
resp = urllib.request.urlopen(url)
encoding = resp.headers.get('content-encoding', 'plain')
data = resp.read()
if encoding == 'plain':
pass
elif encoding == 'gzip':
data = StringIO(data)
data = gzip.GzipFile(fileobj=data).read()
else:
raise RuntimeError('unknown encoding')
else:
with open(url, 'r') as fid:
data = fid.read()
fid.close()
return data
mem = joblib.Memory(cachedir='_build')
get_data = mem.cache(_get_data)
def parse_sphinx_searchindex(searchindex):
"""Parse a Sphinx search index
Parameters
----------
searchindex : str
The Sphinx search index (contents of searchindex.js)
Returns
-------
filenames : list of str
The file names parsed from the search index.
objects : dict
The objects parsed from the search index.
"""
def _select_block(str_in, start_tag, end_tag):
"""Select first block delimited by start_tag and end_tag"""
start_pos = str_in.find(start_tag)
if start_pos < 0:
raise ValueError('start_tag not found')
depth = 0
for pos in range(start_pos, len(str_in)):
if str_in[pos] == start_tag:
depth += 1
elif str_in[pos] == end_tag:
depth -= 1
if depth == 0:
break
sel = str_in[start_pos + 1:pos]
return sel
def _parse_dict_recursive(dict_str):
"""Parse a dictionary from the search index"""
dict_out = dict()
pos_last = 0
pos = dict_str.find(':')
while pos >= 0:
key = dict_str[pos_last:pos]
if dict_str[pos + 1] == '[':
# value is a list
pos_tmp = dict_str.find(']', pos + 1)
if pos_tmp < 0:
raise RuntimeError('error when parsing dict')
value = dict_str[pos + 2: pos_tmp].split(',')
# try to convert elements to int
for i in range(len(value)):
try:
value[i] = int(value[i])
except ValueError:
pass
elif dict_str[pos + 1] == '{':
# value is another dictionary
subdict_str = _select_block(dict_str[pos:], '{', '}')
value = _parse_dict_recursive(subdict_str)
pos_tmp = pos + len(subdict_str)
else:
raise ValueError('error when parsing dict: unknown elem')
key = key.strip('"')
if len(key) > 0:
dict_out[key] = value
pos_last = dict_str.find(',', pos_tmp)
if pos_last < 0:
break
pos_last += 1
pos = dict_str.find(':', pos_last)
return dict_out
# Make sure searchindex uses UTF-8 encoding
if hasattr(searchindex, 'decode'):
searchindex = searchindex.decode('UTF-8')
# parse objects
query = 'objects:'
pos = searchindex.find(query)
if pos < 0:
raise ValueError('"objects:" not found in search index')
sel = _select_block(searchindex[pos:], '{', '}')
objects = _parse_dict_recursive(sel)
# parse filenames
query = 'filenames:'
pos = searchindex.find(query)
if pos < 0:
raise ValueError('"filenames:" not found in search index')
filenames = searchindex[pos + len(query) + 1:]
filenames = filenames[:filenames.find(']')]
filenames = [f.strip('"') for f in filenames.split(',')]
return filenames, objects
class SphinxDocLinkResolver(object):
""" Resolve documentation links using searchindex.js generated by Sphinx
Parameters
----------
doc_url : str
The base URL of the project website.
searchindex : str
Filename of searchindex, relative to doc_url.
extra_modules_test : list of str
List of extra module names to test.
relative : bool
Return relative links (only useful for links to documentation of this
package).
"""
def __init__(self, doc_url, searchindex='searchindex.js',
extra_modules_test=None, relative=False):
self.doc_url = doc_url
self.relative = relative
self._link_cache = {}
self.extra_modules_test = extra_modules_test
self._page_cache = {}
if doc_url.startswith('http://'):
if relative:
raise ValueError('Relative links are only supported for local '
'URLs (doc_url cannot start with "http://)"')
searchindex_url = doc_url + '/' + searchindex
else:
searchindex_url = os.path.join(doc_url, searchindex)
# detect if we are using relative links on a Windows system
if os.name.lower() == 'nt' and not doc_url.startswith('http://'):
if not relative:
raise ValueError('You have to use relative=True for the local'
' package on a Windows system.')
self._is_windows = True
else:
self._is_windows = False
# download and initialize the search index
sindex = get_data(searchindex_url)
filenames, objects = parse_sphinx_searchindex(sindex)
self._searchindex = dict(filenames=filenames, objects=objects)
def _get_link(self, cobj):
"""Get a valid link, False if not found"""
fname_idx = None
full_name = cobj['module_short'] + '.' + cobj['name']
if full_name in self._searchindex['objects']:
value = self._searchindex['objects'][full_name]
if isinstance(value, dict):
value = value[next(iter(value.keys()))]
fname_idx = value[0]
elif cobj['module_short'] in self._searchindex['objects']:
value = self._searchindex['objects'][cobj['module_short']]
if cobj['name'] in value.keys():
fname_idx = value[cobj['name']][0]
if fname_idx is not None:
fname = self._searchindex['filenames'][fname_idx] + '.html'
if self._is_windows:
fname = fname.replace('/', '\\')
link = os.path.join(self.doc_url, fname)
else:
link = posixpath.join(self.doc_url, fname)
if hasattr(link, 'decode'):
link = link.decode('utf-8', 'replace')
if link in self._page_cache:
html = self._page_cache[link]
else:
html = get_data(link)
self._page_cache[link] = html
# test if cobj appears in page
comb_names = [cobj['module_short'] + '.' + cobj['name']]
if self.extra_modules_test is not None:
for mod in self.extra_modules_test:
comb_names.append(mod + '.' + cobj['name'])
url = False
if hasattr(html, 'decode'):
# Decode bytes under Python 3
html = html.decode('utf-8', 'replace')
for comb_name in comb_names:
if hasattr(comb_name, 'decode'):
# Decode bytes under Python 3
comb_name = comb_name.decode('utf-8', 'replace')
if comb_name in html:
url = link + u'#' + comb_name
link = url
else:
link = False
return link
def resolve(self, cobj, this_url):
"""Resolve the link to the documentation, returns None if not found
Parameters
----------
cobj : dict
Dict with information about the "code object" for which we are
resolving a link.
cobi['name'] : function or class name (str)
cobj['module_short'] : shortened module name (str)
cobj['module'] : module name (str)
this_url: str
URL of the current page. Needed to construct relative URLs
(only used if relative=True in constructor).
Returns
-------
link : str | None
The link (URL) to the documentation.
"""
full_name = cobj['module_short'] + '.' + cobj['name']
link = self._link_cache.get(full_name, None)
if link is None:
# we don't have it cached
link = self._get_link(cobj)
# cache it for the future
self._link_cache[full_name] = link
if link is False or link is None:
# failed to resolve
return None
if self.relative:
link = os.path.relpath(link, start=this_url)
if self._is_windows:
# replace '\' with '/' so it on the web
link = link.replace('\\', '/')
# for some reason, the relative link goes one directory too high up
link = link[3:]
return link
###############################################################################
rst_template = """
.. _example_%(short_fname)s:
%(docstring)s
**Python source code:** :download:`%(fname)s <%(fname)s>`
.. literalinclude:: %(fname)s
:lines: %(end_row)s-
"""
plot_rst_template = """
.. _example_%(short_fname)s:
%(docstring)s
%(image_list)s
%(stdout)s
**Python source code:** :download:`%(fname)s <%(fname)s>`
.. literalinclude:: %(fname)s
:lines: %(end_row)s-
**Total running time of the example:** %(time_elapsed) .2f seconds
(%(time_m) .0f minutes %(time_s) .2f seconds)
"""
# The following strings are used when we have several pictures: we use
# an html div tag that our CSS uses to turn the lists into horizontal
# lists.
HLIST_HEADER = """
.. rst-class:: horizontal
"""
HLIST_IMAGE_TEMPLATE = """
*
.. image:: images/%s
:scale: 47
"""
SINGLE_IMAGE = """
.. image:: images/%s
:align: center
"""
# The following dictionary contains the information used to create the
# thumbnails for the front page of the scikit-learn home page.
# key: first image in set
# values: (number of plot in set, height of thumbnail)
carousel_thumbs = {'plot_classifier_comparison_001.png': (1, 600),
'plot_outlier_detection_001.png': (3, 372),
'plot_gp_regression_001.png': (2, 250),
'plot_adaboost_twoclass_001.png': (1, 372),
'plot_compare_methods_001.png': (1, 349)}
def extract_docstring(filename, ignore_heading=False):
""" Extract a module-level docstring, if any
"""
if six.PY2:
lines = open(filename).readlines()
else:
lines = open(filename, encoding='utf-8').readlines()
start_row = 0
if lines[0].startswith('#!'):
lines.pop(0)
start_row = 1
docstring = ''
first_par = ''
line_iterator = iter(lines)
tokens = tokenize.generate_tokens(lambda: next(line_iterator))
for tok_type, tok_content, _, (erow, _), _ in tokens:
tok_type = token.tok_name[tok_type]
if tok_type in ('NEWLINE', 'COMMENT', 'NL', 'INDENT', 'DEDENT'):
continue
elif tok_type == 'STRING':
docstring = eval(tok_content)
# If the docstring is formatted with several paragraphs, extract
# the first one:
paragraphs = '\n'.join(
line.rstrip() for line
in docstring.split('\n')).split('\n\n')
if paragraphs:
if ignore_heading:
if len(paragraphs) > 1:
first_par = re.sub('\n', ' ', paragraphs[1])
first_par = ((first_par[:95] + '...')
if len(first_par) > 95 else first_par)
else:
raise ValueError("Docstring not found by gallery.\n"
"Please check the layout of your"
" example file:\n {}\n and make sure"
" it's correct".format(filename))
else:
first_par = paragraphs[0]
break
return docstring, first_par, erow + 1 + start_row
def generate_example_rst(app):
""" Generate the list of examples, as well as the contents of
examples.
"""
root_dir = os.path.join(app.builder.srcdir, 'auto_examples')
example_dir = os.path.abspath(os.path.join(app.builder.srcdir, '..',
'examples'))
generated_dir = os.path.abspath(os.path.join(app.builder.srcdir,
'modules', 'generated'))
try:
plot_gallery = eval(app.builder.config.plot_gallery)
except TypeError:
plot_gallery = bool(app.builder.config.plot_gallery)
if not os.path.exists(example_dir):
os.makedirs(example_dir)
if not os.path.exists(root_dir):
os.makedirs(root_dir)
if not os.path.exists(generated_dir):
os.makedirs(generated_dir)
# we create an index.rst with all examples
fhindex = open(os.path.join(root_dir, 'index.rst'), 'w')
# Note: The sidebar button has been removed from the examples page for now
# due to how it messes up the layout. Will be fixed at a later point
fhindex.write("""\
.. raw:: html
<style type="text/css">
div#sidebarbutton {
/* hide the sidebar collapser, while ensuring vertical arrangement */
display: none;
}
</style>
.. _examples-index:
Examples
========
""")
# Here we don't use an os.walk, but we recurse only twice: flat is
# better than nested.
seen_backrefs = set()
generate_dir_rst('.', fhindex, example_dir, root_dir, plot_gallery, seen_backrefs)
for directory in sorted(os.listdir(example_dir)):
if os.path.isdir(os.path.join(example_dir, directory)):
generate_dir_rst(directory, fhindex, example_dir, root_dir, plot_gallery, seen_backrefs)
fhindex.flush()
def extract_line_count(filename, target_dir):
# Extract the line count of a file
example_file = os.path.join(target_dir, filename)
if six.PY2:
lines = open(example_file).readlines()
else:
lines = open(example_file, encoding='utf-8').readlines()
start_row = 0
if lines and lines[0].startswith('#!'):
lines.pop(0)
start_row = 1
line_iterator = iter(lines)
tokens = tokenize.generate_tokens(lambda: next(line_iterator))
check_docstring = True
erow_docstring = 0
for tok_type, _, _, (erow, _), _ in tokens:
tok_type = token.tok_name[tok_type]
if tok_type in ('NEWLINE', 'COMMENT', 'NL', 'INDENT', 'DEDENT'):
continue
elif (tok_type == 'STRING') and check_docstring:
erow_docstring = erow
check_docstring = False
return erow_docstring+1+start_row, erow+1+start_row
def line_count_sort(file_list, target_dir):
# Sort the list of examples by line-count
new_list = [x for x in file_list if x.endswith('.py')]
unsorted = np.zeros(shape=(len(new_list), 2))<|fim▁hole|> unsorted[count][0] = exmpl
index = np.lexsort((unsorted[:, 0].astype(np.str),
unsorted[:, 1].astype(np.float)))
if not len(unsorted):
return []
return np.array(unsorted[index][:, 0]).tolist()
def _thumbnail_div(subdir, full_dir, fname, snippet, is_backref=False):
"""Generates RST to place a thumbnail in a gallery"""
thumb = os.path.join(full_dir, 'images', 'thumb', fname[:-3] + '.png')
link_name = os.path.join(full_dir, fname).replace(os.path.sep, '_')
ref_name = os.path.join(subdir, fname).replace(os.path.sep, '_')
if ref_name.startswith('._'):
ref_name = ref_name[2:]
out = []
out.append("""
.. raw:: html
<div class="thumbnailContainer" tooltip="{}">
""".format(snippet))
out.append('.. only:: html\n\n')
out.append(' .. figure:: %s\n' % thumb)
if link_name.startswith('._'):
link_name = link_name[2:]
if full_dir != '.':
out.append(' :target: ./%s/%s.html\n\n' % (full_dir, fname[:-3]))
else:
out.append(' :target: ./%s.html\n\n' % link_name[:-3])
out.append(""" :ref:`example_%s`
.. raw:: html
</div>
""" % (ref_name))
if is_backref:
out.append('.. only:: not html\n\n * :ref:`example_%s`' % ref_name)
return ''.join(out)
def generate_dir_rst(directory, fhindex, example_dir, root_dir, plot_gallery, seen_backrefs):
""" Generate the rst file for an example directory.
"""
if not directory == '.':
target_dir = os.path.join(root_dir, directory)
src_dir = os.path.join(example_dir, directory)
else:
target_dir = root_dir
src_dir = example_dir
if not os.path.exists(os.path.join(src_dir, 'README.txt')):
raise ValueError('Example directory %s does not have a README.txt' %
src_dir)
fhindex.write("""
%s
""" % open(os.path.join(src_dir, 'README.txt')).read())
if not os.path.exists(target_dir):
os.makedirs(target_dir)
sorted_listdir = line_count_sort(os.listdir(src_dir),
src_dir)
if not os.path.exists(os.path.join(directory, 'images', 'thumb')):
os.makedirs(os.path.join(directory, 'images', 'thumb'))
for fname in sorted_listdir:
if fname.endswith('py'):
backrefs = generate_file_rst(fname, target_dir, src_dir, root_dir, plot_gallery)
new_fname = os.path.join(src_dir, fname)
_, snippet, _ = extract_docstring(new_fname, True)
fhindex.write(_thumbnail_div(directory, directory, fname, snippet))
fhindex.write("""
.. toctree::
:hidden:
%s/%s
""" % (directory, fname[:-3]))
for backref in backrefs:
include_path = os.path.join(root_dir, '../modules/generated/%s.examples' % backref)
seen = backref in seen_backrefs
with open(include_path, 'a' if seen else 'w') as ex_file:
if not seen:
# heading
print(file=ex_file)
print('Examples using ``%s``' % backref, file=ex_file)
print('-----------------%s--' % ('-' * len(backref)),
file=ex_file)
print(file=ex_file)
rel_dir = os.path.join('../../auto_examples', directory)
ex_file.write(_thumbnail_div(directory, rel_dir, fname, snippet, is_backref=True))
seen_backrefs.add(backref)
fhindex.write("""
.. raw:: html
<div class="clearer"></div>
""") # clear at the end of the section
# modules for which we embed links into example code
DOCMODULES = ['sklearn', 'matplotlib', 'numpy', 'scipy']
def make_thumbnail(in_fname, out_fname, width, height):
"""Make a thumbnail with the same aspect ratio centered in an
image with a given width and height
"""
# local import to avoid testing dependency on PIL:
try:
from PIL import Image
except ImportError:
import Image
img = Image.open(in_fname)
width_in, height_in = img.size
scale_w = width / float(width_in)
scale_h = height / float(height_in)
if height_in * scale_w <= height:
scale = scale_w
else:
scale = scale_h
width_sc = int(round(scale * width_in))
height_sc = int(round(scale * height_in))
# resize the image
img.thumbnail((width_sc, height_sc), Image.ANTIALIAS)
# insert centered
thumb = Image.new('RGB', (width, height), (255, 255, 255))
pos_insert = ((width - width_sc) // 2, (height - height_sc) // 2)
thumb.paste(img, pos_insert)
thumb.save(out_fname)
# Use optipng to perform lossless compression on the resized image if
# software is installed
if os.environ.get('SKLEARN_DOC_OPTIPNG', False):
try:
subprocess.call(["optipng", "-quiet", "-o", "9", out_fname])
except Exception:
warnings.warn('Install optipng to reduce the size of the generated images')
def get_short_module_name(module_name, obj_name):
""" Get the shortest possible module name """
parts = module_name.split('.')
short_name = module_name
for i in range(len(parts) - 1, 0, -1):
short_name = '.'.join(parts[:i])
try:
exec('from %s import %s' % (short_name, obj_name))
except ImportError:
# get the last working module name
short_name = '.'.join(parts[:(i + 1)])
break
return short_name
class NameFinder(ast.NodeVisitor):
"""Finds the longest form of variable names and their imports in code
Only retains names from imported modules.
"""
def __init__(self):
super(NameFinder, self).__init__()
self.imported_names = {}
self.accessed_names = set()
def visit_Import(self, node, prefix=''):
for alias in node.names:
local_name = alias.asname or alias.name
self.imported_names[local_name] = prefix + alias.name
def visit_ImportFrom(self, node):
self.visit_Import(node, node.module + '.')
def visit_Name(self, node):
self.accessed_names.add(node.id)
def visit_Attribute(self, node):
attrs = []
while isinstance(node, ast.Attribute):
attrs.append(node.attr)
node = node.value
if isinstance(node, ast.Name):
# This is a.b, not e.g. a().b
attrs.append(node.id)
self.accessed_names.add('.'.join(reversed(attrs)))
else:
# need to get a in a().b
self.visit(node)
def get_mapping(self):
for name in self.accessed_names:
local_name = name.split('.', 1)[0]
remainder = name[len(local_name):]
if local_name in self.imported_names:
# Join import path to relative path
full_name = self.imported_names[local_name] + remainder
yield name, full_name
def identify_names(code):
"""Builds a codeobj summary by identifying and resovles used names
>>> code = '''
... from a.b import c
... import d as e
... print(c)
... e.HelloWorld().f.g
... '''
>>> for name, o in sorted(identify_names(code).items()):
... print(name, o['name'], o['module'], o['module_short'])
c c a.b a.b
e.HelloWorld HelloWorld d d
"""
finder = NameFinder()
finder.visit(ast.parse(code))
example_code_obj = {}
for name, full_name in finder.get_mapping():
# name is as written in file (e.g. np.asarray)
# full_name includes resolved import path (e.g. numpy.asarray)
module, attribute = full_name.rsplit('.', 1)
# get shortened module name
module_short = get_short_module_name(module, attribute)
cobj = {'name': attribute, 'module': module,
'module_short': module_short}
example_code_obj[name] = cobj
return example_code_obj
def generate_file_rst(fname, target_dir, src_dir, root_dir, plot_gallery):
""" Generate the rst file for a given example.
Returns the set of sklearn functions/classes imported in the example.
"""
base_image_name = os.path.splitext(fname)[0]
image_fname = '%s_%%03d.png' % base_image_name
this_template = rst_template
last_dir = os.path.split(src_dir)[-1]
# to avoid leading . in file names, and wrong names in links
if last_dir == '.' or last_dir == 'examples':
last_dir = ''
else:
last_dir += '_'
short_fname = last_dir + fname
src_file = os.path.join(src_dir, fname)
example_file = os.path.join(target_dir, fname)
shutil.copyfile(src_file, example_file)
# The following is a list containing all the figure names
figure_list = []
image_dir = os.path.join(target_dir, 'images')
thumb_dir = os.path.join(image_dir, 'thumb')
if not os.path.exists(image_dir):
os.makedirs(image_dir)
if not os.path.exists(thumb_dir):
os.makedirs(thumb_dir)
image_path = os.path.join(image_dir, image_fname)
stdout_path = os.path.join(image_dir,
'stdout_%s.txt' % base_image_name)
time_path = os.path.join(image_dir,
'time_%s.txt' % base_image_name)
thumb_file = os.path.join(thumb_dir, base_image_name + '.png')
time_elapsed = 0
if plot_gallery and fname.startswith('plot'):
# generate the plot as png image if file name
# starts with plot and if it is more recent than an
# existing image.
first_image_file = image_path % 1
if os.path.exists(stdout_path):
stdout = open(stdout_path).read()
else:
stdout = ''
if os.path.exists(time_path):
time_elapsed = float(open(time_path).read())
if not os.path.exists(first_image_file) or \
os.stat(first_image_file).st_mtime <= os.stat(src_file).st_mtime:
# We need to execute the code
print('plotting %s' % fname)
t0 = time()
import matplotlib.pyplot as plt
plt.close('all')
cwd = os.getcwd()
try:
# First CD in the original example dir, so that any file
# created by the example get created in this directory
orig_stdout = sys.stdout
os.chdir(os.path.dirname(src_file))
my_buffer = StringIO()
my_stdout = Tee(sys.stdout, my_buffer)
sys.stdout = my_stdout
my_globals = {'pl': plt}
execfile(os.path.basename(src_file), my_globals)
time_elapsed = time() - t0
sys.stdout = orig_stdout
my_stdout = my_buffer.getvalue()
if '__doc__' in my_globals:
# The __doc__ is often printed in the example, we
# don't with to echo it
my_stdout = my_stdout.replace(
my_globals['__doc__'],
'')
my_stdout = my_stdout.strip().expandtabs()
if my_stdout:
stdout = '**Script output**::\n\n %s\n\n' % (
'\n '.join(my_stdout.split('\n')))
open(stdout_path, 'w').write(stdout)
open(time_path, 'w').write('%f' % time_elapsed)
os.chdir(cwd)
# In order to save every figure we have two solutions :
# * iterate from 1 to infinity and call plt.fignum_exists(n)
# (this requires the figures to be numbered
# incrementally: 1, 2, 3 and not 1, 2, 5)
# * iterate over [fig_mngr.num for fig_mngr in
# matplotlib._pylab_helpers.Gcf.get_all_fig_managers()]
fig_managers = matplotlib._pylab_helpers.Gcf.get_all_fig_managers()
for fig_mngr in fig_managers:
# Set the fig_num figure as the current figure as we can't
# save a figure that's not the current figure.
fig = plt.figure(fig_mngr.num)
kwargs = {}
to_rgba = matplotlib.colors.colorConverter.to_rgba
for attr in ['facecolor', 'edgecolor']:
fig_attr = getattr(fig, 'get_' + attr)()
default_attr = matplotlib.rcParams['figure.' + attr]
if to_rgba(fig_attr) != to_rgba(default_attr):
kwargs[attr] = fig_attr
fig.savefig(image_path % fig_mngr.num, **kwargs)
figure_list.append(image_fname % fig_mngr.num)
except:
print(80 * '_')
print('%s is not compiling:' % fname)
traceback.print_exc()
print(80 * '_')
finally:
os.chdir(cwd)
sys.stdout = orig_stdout
print(" - time elapsed : %.2g sec" % time_elapsed)
else:
figure_list = [f[len(image_dir):]
for f in glob.glob(image_path.replace("%03d",
'[0-9][0-9][0-9]'))]
figure_list.sort()
# generate thumb file
this_template = plot_rst_template
car_thumb_path = os.path.join(os.path.split(root_dir)[0], '_build/html/stable/_images/')
# Note: normaly, make_thumbnail is used to write to the path contained in `thumb_file`
# which is within `auto_examples/../images/thumbs` depending on the example.
# Because the carousel has different dimensions than those of the examples gallery,
# I did not simply reuse them all as some contained whitespace due to their default gallery
# thumbnail size. Below, for a few cases, seperate thumbnails are created (the originals can't
# just be overwritten with the carousel dimensions as it messes up the examples gallery layout).
# The special carousel thumbnails are written directly to _build/html/stable/_images/,
# as for some reason unknown to me, Sphinx refuses to copy my 'extra' thumbnails from the
# auto examples gallery to the _build folder. This works fine as is, but it would be cleaner to
# have it happen with the rest. Ideally the should be written to 'thumb_file' as well, and then
# copied to the _images folder during the `Copying Downloadable Files` step like the rest.
if not os.path.exists(car_thumb_path):
os.makedirs(car_thumb_path)
if os.path.exists(first_image_file):
# We generate extra special thumbnails for the carousel
carousel_tfile = os.path.join(car_thumb_path, base_image_name + '_carousel.png')
first_img = image_fname % 1
if first_img in carousel_thumbs:
make_thumbnail((image_path % carousel_thumbs[first_img][0]),
carousel_tfile, carousel_thumbs[first_img][1], 190)
make_thumbnail(first_image_file, thumb_file, 400, 280)
if not os.path.exists(thumb_file):
# create something to replace the thumbnail
make_thumbnail('images/no_image.png', thumb_file, 200, 140)
docstring, short_desc, end_row = extract_docstring(example_file)
# Depending on whether we have one or more figures, we're using a
# horizontal list or a single rst call to 'image'.
if len(figure_list) == 1:
figure_name = figure_list[0]
image_list = SINGLE_IMAGE % figure_name.lstrip('/')
else:
image_list = HLIST_HEADER
for figure_name in figure_list:
image_list += HLIST_IMAGE_TEMPLATE % figure_name.lstrip('/')
time_m, time_s = divmod(time_elapsed, 60)
f = open(os.path.join(target_dir, base_image_name + '.rst'), 'w')
f.write(this_template % locals())
f.flush()
# save variables so we can later add links to the documentation
if six.PY2:
example_code_obj = identify_names(open(example_file).read())
else:
example_code_obj = \
identify_names(open(example_file, encoding='utf-8').read())
if example_code_obj:
codeobj_fname = example_file[:-3] + '_codeobj.pickle'
with open(codeobj_fname, 'wb') as fid:
pickle.dump(example_code_obj, fid, pickle.HIGHEST_PROTOCOL)
backrefs = set('{module_short}.{name}'.format(**entry)
for entry in example_code_obj.values()
if entry['module'].startswith('sklearn'))
return backrefs
def embed_code_links(app, exception):
"""Embed hyperlinks to documentation into example code"""
if exception is not None:
return
print('Embedding documentation hyperlinks in examples..')
if app.builder.name == 'latex':
# Don't embed hyperlinks when a latex builder is used.
return
# Add resolvers for the packages for which we want to show links
doc_resolvers = {}
doc_resolvers['sklearn'] = SphinxDocLinkResolver(app.builder.outdir,
relative=True)
resolver_urls = {
'matplotlib': 'http://matplotlib.org',
'numpy': 'http://docs.scipy.org/doc/numpy-1.6.0',
'scipy': 'http://docs.scipy.org/doc/scipy-0.11.0/reference',
}
for this_module, url in resolver_urls.items():
try:
doc_resolvers[this_module] = SphinxDocLinkResolver(url)
except HTTPError as e:
print("The following HTTP Error has occurred:\n")
print(e.code)
except URLError as e:
print("\n...\n"
"Warning: Embedding the documentation hyperlinks requires "
"internet access.\nPlease check your network connection.\n"
"Unable to continue embedding `{0}` links due to a URL "
"Error:\n".format(this_module))
print(e.args)
example_dir = os.path.join(app.builder.srcdir, 'auto_examples')
html_example_dir = os.path.abspath(os.path.join(app.builder.outdir,
'auto_examples'))
# patterns for replacement
link_pattern = '<a href="%s">%s</a>'
orig_pattern = '<span class="n">%s</span>'
period = '<span class="o">.</span>'
for dirpath, _, filenames in os.walk(html_example_dir):
for fname in filenames:
print('\tprocessing: %s' % fname)
full_fname = os.path.join(html_example_dir, dirpath, fname)
subpath = dirpath[len(html_example_dir) + 1:]
pickle_fname = os.path.join(example_dir, subpath,
fname[:-5] + '_codeobj.pickle')
if os.path.exists(pickle_fname):
# we have a pickle file with the objects to embed links for
with open(pickle_fname, 'rb') as fid:
example_code_obj = pickle.load(fid)
fid.close()
str_repl = {}
# generate replacement strings with the links
for name, cobj in example_code_obj.items():
this_module = cobj['module'].split('.')[0]
if this_module not in doc_resolvers:
continue
try:
link = doc_resolvers[this_module].resolve(cobj,
full_fname)
except (HTTPError, URLError) as e:
print("The following error has occurred:\n")
print(repr(e))
continue
if link is not None:
parts = name.split('.')
name_html = period.join(orig_pattern % part
for part in parts)
str_repl[name_html] = link_pattern % (link, name_html)
# do the replacement in the html file
# ensure greediness
names = sorted(str_repl, key=len, reverse=True)
expr = re.compile(r'(?<!\.)\b' + # don't follow . or word
'|'.join(re.escape(name)
for name in names))
def substitute_link(match):
return str_repl[match.group()]
if len(str_repl) > 0:
with open(full_fname, 'rb') as fid:
lines_in = fid.readlines()
with open(full_fname, 'wb') as fid:
for line in lines_in:
line = line.decode('utf-8')
line = expr.sub(substitute_link, line)
fid.write(line.encode('utf-8'))
print('[done]')
def setup(app):
app.connect('builder-inited', generate_example_rst)
app.add_config_value('plot_gallery', True, 'html')
# embed links after build is finished
app.connect('build-finished', embed_code_links)
# Sphinx hack: sphinx copies generated images to the build directory
# each time the docs are made. If the desired image name already
# exists, it appends a digit to prevent overwrites. The problem is,
# the directory is never cleared. This means that each time you build
# the docs, the number of images in the directory grows.
#
# This question has been asked on the sphinx development list, but there
# was no response: http://osdir.com/ml/sphinx-dev/2011-02/msg00123.html
#
# The following is a hack that prevents this behavior by clearing the
# image build directory each time the docs are built. If sphinx
# changes their layout between versions, this will not work (though
# it should probably not cause a crash). Tested successfully
# on Sphinx 1.0.7
build_image_dir = '_build/html/_images'
if os.path.exists(build_image_dir):
filelist = os.listdir(build_image_dir)
for filename in filelist:
if filename.endswith('png'):
os.remove(os.path.join(build_image_dir, filename))
def setup_module():
# HACK: Stop nosetests running setup() above
pass<|fim▁end|> | unsorted = unsorted.astype(np.object)
for count, exmpl in enumerate(new_list):
docstr_lines, total_lines = extract_line_count(exmpl, target_dir)
unsorted[count][1] = total_lines - docstr_lines |
<|file_name|>TAnimateColor.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import string
import logging<|fim▁hole|>from .HTMLElement import HTMLElement
log = logging.getLogger("Thug")
class TAnimateColor(HTMLElement):
def __init__(self, doc, tag):
self.doc = doc
self.tag = tag
self._values = ""
def get_values(self):
return self._values
def set_values(self, values):
if all(c in string.printable for c in values) is False:
log.ThugLogging.log_exploit_event(self.doc.window.url,
"Microsoft Internet Explorer",
"Microsoft Internet Explorer CButton Object Use-After-Free Vulnerability (CVE-2012-4792)",
cve = 'CVE-2012-4792',
forward = True)
log.DFT.check_shellcode(values)
self._values = values
values = property(get_values, set_values)<|fim▁end|> | |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>//{block name="backend/create_backend_order/controller/main"}
//
Ext.define('Shopware.apps.SwagBackendOrder.controller.Main', {
/**
* extends from the standard ExtJs Controller
*/
extend: 'Ext.app.Controller',
snippets: {
error: {
customer: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/error/customer"}Please select a customer.{/s}',
billing: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/error/billing"}Please select a billing address.{/s}',
payment: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/error/payment"}Please select a payment method.{/s}',
shippingArt: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/error/shipping_art"}Please select a shipping art.{/s}',
positions: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/error/positions"}Please add positions.{/s}',
textInvalidArticle: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/error/invalid_article"}Invalid article: {/s}',
invalidArticleTitle: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/error/invalid_article_title"}Error!{/s}',
instanceText: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/error/instanceText"}You can not create more than one order at the same time.{/s}',
instanceTitle: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/error/instanceTitle"}Erro!{/s}',
title: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/error/title"}Error! Couldn\'t create order{/s}',
mailTitle: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/error/mail_title"}Error! Couldn\'t send mail{/s}'
},
success: {
text: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/success/text"}Order was created successfully. Ordernumber: {/s}',
title: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/success/title"}Success!{/s}'
},
title: '{s namespace="backend/swag_backend_order/view/main" name="swagbackendorder/title/selected_user"}Create order for{/s}'
},
/**
* A template method that is called when your application boots.
* It is called before the Application's launch function is executed
* so gives a hook point to run any code before your Viewport is created.
*
* @return void
*/
init: function () {
var me = this;
//checks if a window is already open
var createOrderWindow = Ext.getCmp('swagBackendOrderWindow');
if (createOrderWindow instanceof Ext.window.Window) {
Ext.Msg.alert(me.snippets.error.instanceTitle, me.snippets.error.instanceText);
return;
}
me.control({
'createbackendorder-customer-billing': {
selectBillingAddress: me.onSelectBillingAddress
},
'createbackendorder-customer-shipping': {
selectShippingAddress: me.onSelectShippingAddress,
selectBillingAsShippingAddress: me.onSelectBillingAsShippingAddress,
calculateTax: me.onCalculateTax
},
'createbackendorder-customer-payment': {
selectPayment: me.onSelectPayment
},
'createbackendorder-additional': {
changeAttrField: me.onChangeAttrField,
changeDesktopType: me.onChangeDesktopType
},
'createbackendorder-customersearch': {
selectCustomer: me.onSelectCustomer
},
'createbackendorder-toolbar': {
openCustomer: me.onOpenCustomer,
createCustomer: me.onCreateCustomer,
changeCurrency: me.onChangeCurrency,
changeCustomer: me.onChangeCustomer,
changeLanguage: me.onChangeLanguage
},
'createbackendorder-mainwindow': {
createOrder: me.onCreateOrder
},
'createbackendorder-shippingcosts': {
addShippingCosts: me.onAddShippingCosts
},
'createbackendorder-position-grid': {
openArticle: me.onOpenArticle,
articleNameSelect: me.onArticleSelect,
articleNumberSelect: me.onArticleSelect,
cancelEdit: me.onCancelEdit
},
'createbackendorder-totalcostsoverview': {
calculateTax: me.onCalculateTax,
changeNetCheckbox: me.onChangeNetCheckbox
}
});
me.getPluginConfig();
/**
* holds the actual orderData
*/
me.orderModel = Ext.create('Shopware.apps.SwagBackendOrder.model.CreateBackendOrder', {});
me.orderAttributeModel = Ext.create('Shopware.apps.SwagBackendOrder.model.OrderAttribute', {});
me.createBackendOrderStore = me.subApplication.getStore('CreateBackendOrder');
me.orderModel.set('currencyFactor', 1);
me.currencyStore = me.subApplication.getStore('Currency').load();
//passed a user id
if (typeof me.subApplication.params !== 'undefined') {
if (me.subApplication.params.userId) {
me.onSelectCustomer(null, me.subApplication.params.userId);
}
}
/**
* initializes the window
*/
me.window = me.getView('main.Window').create({
id: 'swagBackendOrderWindow',
subApplication: me.subApplication,
orderModel: me.orderModel,
createBackendOrderStore: me.createBackendOrderStore
}).show();
me.callParent(arguments);
},
/**
* creates the order
*/
onCreateOrder: function (positionsGridContainer, modus) {
var me = this,
errmsg = "";
me.modus = modus;
me.window.disable(true);
/**
* get the grid component for the position listing
*/
me.positionsGrid = positionsGridContainer.getComponent('positionsGrid');
/**
* checks if all required fields was setted
*/
var customerStore = me.subApplication.getStore('Customer');
if (customerStore.count() > 0) {
if (me.orderModel.get('billingAddressId') == 0) {
errmsg += me.snippets.error.billing + '<br>';
}
if (me.orderModel.get('paymentId') == 0) {
errmsg += me.snippets.error.payment + '<br>';
}
if (me.orderModel.get('dispatchId') == 0) {
errmsg += me.snippets.error.shippingArt + '<br>';
}
} else {
errmsg += me.snippets.error.customer + '<br>';
}
var positionsStore = me.positionsGrid.getStore();
if (positionsStore.count() == 0) {
errmsg += me.snippets.error.positions + '<br>'
}
if (errmsg.length > 0) {
me.window.enable(true);
Shopware.Notification.createGrowlMessage(me.snippets.error.title, errmsg);
return;
}
//gets the positionModel which belongs to the actual orderModel
var positionOrderStore = me.orderModel.position();
if (positionOrderStore.count() > 0) {
positionOrderStore.removeAll();
}
//iterates the created positions and adds every record to the positionModel
positionsStore.each(
function (record) {
positionOrderStore.add(record);
}
);
var orderAttributeStore = me.orderModel.orderAttribute();
orderAttributeStore.add(me.orderAttributeModel);
/**
* sends the request to the php controller
*/
me.orderModel.set('total', me.totalCostsModel.get('total'));
me.orderModel.set('totalWithoutTax', me.totalCostsModel.get('totalWithoutTax'));
me.createBackendOrderStore.sync({
success: function (response) {
me.orderId = response.proxy.reader.rawData.orderId;
me.ordernumber = response.proxy.reader.rawData.ordernumber;
me.mailErrorMessage = response.proxy.reader.rawData.mail;
switch (me.modus) {
case 'new':
me.window.close();
if (response.proxy.reader.rawData.mail) {
Shopware.Notification.createGrowlMessage(me.snippets.error.mailTitle, me.mailErrorMessage);
}
Shopware.Notification.createGrowlMessage(me.snippets.success.title, me.snippets.success.text + me.ordernumber);
Shopware.app.Application.addSubApplication({
name: 'Shopware.apps.SwagBackendOrder',
action: 'detail'
});
break;
case 'close':
me.window.close();
break;
case 'detail':
if (me.orderId > 0) {
Shopware.app.Application.addSubApplication({
name: 'Shopware.apps.Order',
action: 'detail',
params: {
orderId: me.orderId
}
});
}
if (response.proxy.reader.rawData.mail) {
Shopware.Notification.createGrowlMessage(me.snippets.error.mailTitle, me.mailErrorMessage);
}
Shopware.Notification.createGrowlMessage(me.snippets.success.title, me.snippets.success.text + ' - ' + me.ordernumber);
me.window.close();
break;
default:
break;
}
},
failure: function (response) {
var article = response.proxy.reader.rawData.article;
if (article || article == "") {
Shopware.Notification.createGrowlMessage(me.snippets.error.title, me.snippets.error.textInvalidArticle + ' ' + article);
} else {
Shopware.Notification.createGrowlMessage(me.snippets.error.title, response.proxy.reader.rawData.message);
}
me.window.enable(true);
}
});
},
/**
* event which is fired by the shipping combobox and the number fields
*
* @param shippingCosts
* @param shippingCostsNet
* @param dispatchId
* @param shippingCostsFields
*/
onAddShippingCosts: function (shippingCosts, shippingCostsNet, dispatchId, shippingCostsFields) {
var me = this;
if (shippingCostsFields !== undefined) {
me.shippingCostsFields = shippingCostsFields;
shippingCosts = me.calculateCurrency(shippingCosts);
shippingCostsNet = me.calculateCurrency(shippingCostsNet);
shippingCostsFields[0].setValue(shippingCosts);
shippingCostsFields[1].setValue(shippingCostsNet);
}
me.orderModel.set('shippingCosts', shippingCosts);
me.orderModel.set('shippingCostsNet', shippingCostsNet);
if (me.orderModel.get('dispatchId') != undefined && me.orderModel.get('dispatchId') != dispatchId) {
me.orderModel.set('dispatchId', dispatchId);
}
me.createBackendOrderStore.add(me.orderModel);
me.totalCostsModel = me.subApplication.getStore('TotalCosts').getAt(0);
me.totalCostsModel.set('shippingCosts', shippingCosts);
},
/**
* selects the correct billing address and updates it to the default address
*
* @param record
* @param billing
*/
onSelectBillingAddress: function (record, billing) {
var me = this;
record = record[0].data;
Ext.Ajax.request({
url: '{url action="setBillingAddress"}',
params: {
salutation: record.salutation,
company: record.company,
firstName: record.firstName,
lastName: record.lastName,
city: record.city,
zipCode: record.zipCode,
countyId: record.countryId,
phone: record.phone,
street: record.street,
vatId: record.vatId,
additionalAddressLine1: record.additionalAddressLine1,
additionalAddressLine2: record.additionalAddressLine2,
department: record.department,
userId: me.customerStore.getAt(0).get('id')
},
success: function (response) {
var responseObj = Ext.JSON.decode(response.responseText);
me.orderModel.set('billingAddressId', responseObj.billingAddressId);
}
});
},
/**
* selects the correct shipping address and updates it to the default address
*
* @param record false for no selected record, otherwise a single data model
*/
onSelectShippingAddress: function (record) {
var me = this;
if (record === false) { // No shipping address selected.
var EMPTY_SHIPPING_ADDRESS_ID = 0; // Magic constant
me.orderModel.set('shippingAddressId', EMPTY_SHIPPING_ADDRESS_ID);
return;
}
record = record.data;
Ext.Ajax.request({
url: '{url action="setShippingAddress"}',
params: {
salutation: record.salutation,
company: record.company,
firstName: record.firstName,
lastName: record.lastName,
city: record.city,
zipCode: record.zipCode,
countyId: record.countryId,
street: record.street,
additionalAddressLine1: record.additionalAddressLine1,
additionalAddressLine2: record.additionalAddressLine2,
department: record.department,
userId: me.customerStore.getAt(0).get('id')
},
success: function (response) {
var responseObj = Ext.JSON.decode(response.responseText);
me.orderModel.set('shippingAddressId', responseObj.shippingAddressId);
}
});
},
/**
* fired when the user selects a payment
* sets the payment in the orderModel
*
* @param record
*/
onSelectPayment: function (record) {
var me = this;
me.orderModel.set('paymentId', record[0].data.id);
},
/**
* Event will be fired when the user search for an article number in the row editor
* and selects an article in the drop down menu.
*
* @param [object] editor - Ext.grid.plugin.RowEditing
* @param [string] value - Value of the Ext.form.field.Trigger
* @param [object] record - Selected record
*/
onArticleSelect: function (editor, value, record) {
var me = this;
var columns = editor.editor.items.items,
updateButton = editor.editor.floatingButtons.items.items[0];
updateButton.setDisabled(true);
//sends a request to get the price for the customer group
Ext.Ajax.request({
url: '{url action="getCustomerGroupPriceByOrdernumber"}',
params: {
ordernumber: record.get('number'),
customerId: me.orderModel.get('customerId')
},
success: function (response) {
var responseObj = Ext.JSON.decode(response.responseText);
var result = responseObj.data;
var price = 0;
if (responseObj.success == true) {
price = me.calculateCurrency(result.price);
} else {
price = me.calculateCurrency(record.get('price'));
}
if (me.orderModel.get('net')) {
price = me.calculateNetPrice(price, record.get('tax'));
}
/**
* columns[0] -> selected
* columns[1] -> articlenumber
* columns[2] -> articlename
* columns[3] -> quantity
* columns[4] -> price
* columns[5] -> total
* columns[6] -> tax
* columns[7] -> instock
*/
columns[1].setValue(record.get('number'));
columns[2].setValue(record.get('name'));
columns[3].setValue(1);
columns[4].setValue(price);
columns[7].setValue(record.get('inStock'));
var taxComboStore = columns[6].store;<|fim▁hole|>
var recordNumber = taxComboStore.findExact(valueField, record.get('taxId'), 0);
var displayValue = taxComboStore.getAt(recordNumber).data[displayField];
columns[6].setValue(record.get('taxRate'));
columns[6].setRawValue(displayValue);
columns[6].selectedIndex = recordNumber;
updateButton.setDisabled(false);
}
});
},
/**
* Event listener method which is fired when the user cancel the row editing in the position grid
* on the detail page. If the edited record is a new position, the position will be removed.
*
* @param grid
* @param eOpts
*/
onCancelEdit: function (grid, eOpts) {
var record = eOpts.record,
store = eOpts.store;
if (!(record instanceof Ext.data.Model) || !(store instanceof Ext.data.Store)) {
return;
}
if (record.get('articleId') === 0 && record.get('articleNumber') === '') {
store.remove(record);
}
},
/**
* fires only when a new value was selected from the drop down menu to select the correct customer by his id
*
* @param newValue
* @param customerId
*/
onSelectCustomer: function (newValue, customerId) {
var me = this;
me.customerStore = me.subApplication.getStore('Customer').load({
params: { searchParam: customerId }
});
me.customerStore.on('load', function () {
if (Ext.isObject(me.customerStore)) {
me.orderModel.set('customerId', customerId);
me.customerSelected = true;
if (typeof me.customerStore.getAt(0) === 'undefined') {
return;
}
var billingModel = me.customerStore.getAt(0).billing().getAt(0);
var title = me.snippets.title + ' ' +
billingModel.get('firstName') +
' ' +
billingModel.get('lastName');
if (billingModel.get('number')) {
title += ' - ' + billingModel.get('number');
}
if (billingModel.get('company')) {
title += ' - ' + billingModel.get('company');
}
me.window.setTitle(title);
}
});
},
/**
* opens an article from the positions grid
*
* @param record
*/
onOpenArticle: function (record) {
Shopware.app.Application.addSubApplication({
name: 'Shopware.apps.Article',
action: 'detail',
params: {
articleId: record.get('articleId')
}
});
},
/**
* opens the selected customer
*/
onOpenCustomer: function () {
var me = this,
customerId = me.subApplication.getStore('Customer').getAt(0).get('id');
Shopware.app.Application.addSubApplication({
name: 'Shopware.apps.Customer',
action: 'detail',
params: {
customerId: customerId
}
});
},
/**
* @param createGuest
*/
onCreateCustomer: function (createGuest) {
var me = this,
email = '',
guest = false;
if (createGuest) {
email = me.validationMail;
guest = true;
}
Shopware.app.Application.addSubApplication({
name: 'Shopware.apps.Customer',
action: 'detail',
params: {
guest: guest,
email: email
}
});
},
/**
* calculates the new price for another currency
*
* @param comboBox
* @param newValue
* @param oldValue
* @param eOpts
*/
onChangeCurrency: function (comboBox, newValue, oldValue, eOpts) {
var me = this,
shippingCosts = 0,
shippingCostsNet = 0;
me.orderModel.set('currencyId', newValue);
var currencyIndex = me.currencyStore.findExact('id', newValue);
var newCurrency = me.currencyStore.getAt(currencyIndex);
newCurrency.set('selected', 1);
if (oldValue !== undefined) {
currencyIndex = me.currencyStore.findExact('id', oldValue);
var oldCurrency = me.currencyStore.getAt(currencyIndex);
oldCurrency.set('selected', 0);
}
var positionJsonString = '';
if (me.positionStore instanceof Ext.data.Store) {
var positionArray = [];
me.positionStore.each(function (record) {
positionArray.push(record.data);
});
positionJsonString = Ext.JSON.encode(positionArray);
}
Ext.Ajax.request({
url: '{url action="calculateCurrency"}',
params: {
positions: positionJsonString,
oldCurrencyId: oldValue,
newCurrencyId: newValue,
shippingCosts: me.orderModel.get('shippingCosts'),
shippingCostsNet: me.orderModel.get('shippingCostsNet')
},
success: function (response) {
var responseObj = Ext.JSON.decode(response.responseText).data;
if (typeof responseObj === 'undefined') {
return;
}
for (var i = 0; i < responseObj.positions.length; i++) {
var position = me.positionStore.getAt(i);
position.set('price', responseObj.positions[i].price);
position.set('total', responseObj.positions[i].total);
}
me.orderModel.set('shippingCosts', responseObj.shippingCosts);
me.orderModel.set('shippingCostsNet', responseObj.shippingCostsNet);
if (me.shippingCostsFields !== undefined) {
me.shippingCostsFields[0].setValue(me.orderModel.get('shippingCosts'));
me.shippingCostsFields[1].setValue(me.orderModel.get('shippingCostsNet'));
}
}
});
},
/**
* @param price
* @returns
*/
calculateCurrency: function (price) {
var me = this;
var index = me.currencyStore.findExact('selected', 1);
price = price * me.currencyStore.getAt(index).get('factor');
return price;
},
/**
* saves the attribute fields in the correct store field
*
* @param field
* @param newValue
* @param oldValue
*/
onChangeAttrField: function (field, newValue, oldValue) {
var me = this;
switch (field.name) {
case 'attr1TxtBox':
me.orderAttributeModel.set('attribute1', field.getValue());
break;
case 'attr2TxtBox':
me.orderAttributeModel.set('attribute2', field.getValue());
break;
case 'attr3TxtBox':
me.orderAttributeModel.set('attribute3', field.getValue());
break;
case 'attr4TxtBox':
me.orderAttributeModel.set('attribute4', field.getValue());
break;
case 'attr5TxtBox':
me.orderAttributeModel.set('attribute5', field.getValue());
break;
case 'attr6TxtBox':
me.orderAttributeModel.set('attribute6', field.getValue());
break;
default:
break;
}
me.subApplication.getStore("OrderAttribute").add(me.orderAttributeModel);
},
/**
* event fires when the desktop type combox changes the data index
*
* @param comboBox
* @param newValue
*/
onChangeDesktopType: function (comboBox, newValue) {
var me = this;
var desktopType = comboBox.findRecordByValue(newValue);
me.orderModel.set('desktopType', desktopType.data.name);
},
/**
* reads the plugin configuration
*/
getPluginConfig: function () {
var me = this;
Ext.Ajax.request({
url: '{url action=getPluginConfig}',
success: function (response, opts) {
var pluginConfigObj = Ext.decode(response.responseText);
me.validationMail = pluginConfigObj.data.validationMail;
me.desktopTypes = pluginConfigObj.data.desktopTypes;
me.subApplication.getStore('DesktopTypes').loadData(me.desktopTypes, false);
}
});
},
/**
* deselects the shipping address
*/
onSelectBillingAsShippingAddress: function () {
var me = this;
me.orderModel.set('shippingAddressId', null);
},
/**
* calculates the tax costs for every tax rate and the shipping tax
*/
onCalculateTax: function () {
var me = this;
me.positionStore = me.subApplication.getStore('Position');
me.totalCostsStore = me.subApplication.getStore('TotalCosts');
me.totalCostsModel = me.totalCostsStore.getAt(0);
var positionArray = [];
me.positionStore.each(function (record) {
positionArray.push(record.data);
});
var positionJsonString = Ext.JSON.encode(positionArray);
Ext.Ajax.request({
url: '{url action="calculateTax"}',
params: {
positions: positionJsonString,
shippingCosts: me.orderModel.get('shippingCosts'),
net: me.orderModel.get('net')
},
success: function (response) {
var totalCostsJson = Ext.JSON.decode(response.responseText);
var record = totalCostsJson.data;
me.totalCostsModel.beginEdit();
try {
me.totalCostsModel.set('totalWithoutTax', record.totalWithoutTax);
me.totalCostsModel.set('sum', record.sum);
me.totalCostsModel.set('total', record.total);
me.totalCostsModel.set('shippingCosts', record.shippingCosts);
me.totalCostsModel.set('shippingCostsNet', record.shippingCostsNet);
me.totalCostsModel.set('taxSum', record.taxSum);
} finally {
me.totalCostsModel.endEdit();
}
me.orderModel.set('shippingCostsNet', record.shippingCostsNet);
}
});
},
/**
*
* @param value
* @param taxRate
* @returns number
*/
calculateNetPrice: function (value, taxRate) {
taxRate = parseInt(taxRate);
return value / ((100 + taxRate) / 100);
},
/**
* resets all setted data which belongs to the customer which was selected by the user
*/
onChangeCustomer: function () {
var me = this;
me.orderModel.set('billingAddressId', null);
me.orderModel.set('shippingAddressId', null);
me.orderModel.set('shippingCosts', null);
me.orderModel.set('shippingCostsNet', null);
me.orderModel.set('paymentId', null);
},
/**
* calculates the new prices and sets the net flag to true
* @param net
*/
onChangeNetCheckbox: function (net) {
var me = this;
me.orderModel.set('net', net);
if (me.totalCostsModel) {
me.totalCostsModel.set('net', net);
}
var positionStore = me.subApplication.getStore('Position');
if (!(positionStore instanceof Ext.data.Store)) return;
var positionArray = [];
positionStore.each(function (record) {
positionArray.push(record.data);
});
var positionJsonString = Ext.JSON.encode(positionArray);
/**
* ajax request to calculate the new prices
*/
Ext.Ajax.request({
url: '{url action="changedNetBox"}',
params: {
positions: positionJsonString,
net: net
},
success: function (response) {
var responseObj = Ext.JSON.decode(response.responseText);
var positions = responseObj.data;
for (var index = 0; index < positionStore.count(); index++) {
var actualPosition = positionStore.getAt(index);
actualPosition.beginEdit();
try {
actualPosition.set('price', positions[index].price);
actualPosition.set('total', positions[index].total);
} finally {
actualPosition.endEdit();
}
}
}
});
},
/**
* changes the actual language for the confirmation mail
*
* @param languageShopId
*/
onChangeLanguage: function (languageShopId) {
var me = this;
me.orderModel.set('languageShopId', languageShopId);
}
});
//
//{/block}<|fim▁end|> | var valueField = columns[6].valueField;
var displayField = columns[6].displayField; |
<|file_name|>executor_test.go<|end_file_name|><|fim▁begin|>package gb
import (
"errors"
"fmt"
"io"
"path/filepath"
"reflect"
"testing"
)
func TestExecuteBuildAction(t *testing.T) {
tests := []struct {
pkg string
err error
}{{
pkg: "a",
err: nil,
}, {
pkg: "b", // actually command
err: nil,
}, {
pkg: "c",
err: nil,
}, {
pkg: "d.v1",
err: nil,
}, {
pkg: "x",
err: errors.New("import cycle detected: x -> y -> x"),
}, {
pkg: "h", // imports "blank", which is blank, see issue #131
err: fmt.Errorf("no buildable Go source files in %s", filepath.Join(getwd(t), "testdata", "src", "blank")),
}}
for _, tt := range tests {
ctx := testContext(t)
pkg, err := ctx.ResolvePackage(tt.pkg)
if !sameErr(err, tt.err) {
t.Errorf("ctx.ResolvePackage(%v): want %v, got %v", tt.pkg, tt.err, err)
continue
}
if err != nil {
continue
}
action, err := BuildPackages(pkg)
if err != nil {
t.Errorf("BuildAction(%v): ", tt.pkg, err)
continue
}
if err := Execute(action); !sameErr(err, tt.err) {
t.Errorf("Execute(%v): want: %v, got %v", action.Name, tt.err, err)
}
ctx.Destroy()
}
}
var niltask = TaskFn(func() error { return nil })
var executorTests = []struct {
action *Action // root action
err error // expected error
}{{
action: &Action{
Name: "no error",
Task: niltask,
},
}, {
action: &Action{
Name: "root error",
Task: TaskFn(func() error {
return io.EOF
}),
},
err: io.EOF,
}, {
action: &Action{
Name: "child, child, error",
Task: TaskFn(func() error {
return fmt.Errorf("I should not have been called")
}),
Deps: []*Action{&Action{
Name: "child, error",
Task: niltask,
Deps: []*Action{&Action{
Name: "error",
Task: TaskFn(func() error {
return io.EOF
}),
}},
}},
},
err: io.EOF,
}, {
action: &Action{
Name: "once only",
Task: TaskFn(func() error {
if c1 != 1 || c2 != 1 || c3 != 1 {
return fmt.Errorf("unexpected count, c1: %v, c2: %v, c3: %v", c1, c2, c3)
}
return nil
}),
Deps: []*Action{createDag()},
},
}, {
action: &Action{
Name: "failure count",
Task: TaskFn(func() error {
return fmt.Errorf("I should not have been called")
}),
Deps: []*Action{createFailDag()},
},
err: fmt.Errorf("task3 called 1 time"),
}}
func createDag() *Action {
task1 := TaskFn(func() error { c1++; return nil })
task2 := TaskFn(func() error { c2++; return nil })
task3 := TaskFn(func() error { c3++; return nil })
action1 := Action{Name: "c1", Task: task1}
action2 := Action{Name: "c2", Task: task2}
action3 := Action{Name: "c3", Task: task3}
action1.Deps = append(action1.Deps, &action2, &action3)
action2.Deps = append(action2.Deps, &action3)<|fim▁hole|> return &action1
}
func createFailDag() *Action {
task1 := TaskFn(func() error { c1++; return nil })
task2 := TaskFn(func() error { c2++; return fmt.Errorf("task2 called %v time", c2) })
task3 := TaskFn(func() error { c3++; return fmt.Errorf("task3 called %v time", c3) })
action1 := Action{Name: "c1", Task: task1}
action2 := Action{Name: "c2", Task: task2}
action3 := Action{Name: "c3", Task: task3}
action1.Deps = append(action1.Deps, &action2, &action3)
action2.Deps = append(action2.Deps, &action3)
return &action1
}
var c1, c2, c3 int
func executeReset() {
c1 = 0
c2 = 0
c3 = 0
// reset executor test variables
}
func TestExecute(t *testing.T) {
for _, tt := range executorTests {
executeReset()
got := Execute(tt.action)
if !reflect.DeepEqual(got, tt.err) {
t.Errorf("Execute: %v: want err: %v, got err %v", tt.action.Name, tt.err, got)
}
}
}
func testExecuteConcurrentN(t *testing.T, n int) {
for _, tt := range executorTests {
executeReset()
got := ExecuteConcurrent(tt.action, n)
if !reflect.DeepEqual(got, tt.err) {
t.Errorf("ExecuteConcurrent(%v): %v: want err: %v, got err %v", n, tt.action.Name, tt.err, got)
}
}
}
func TestExecuteConcurrent1(t *testing.T) { testExecuteConcurrentN(t, 1) }
func TestExecuteConcurrent2(t *testing.T) { testExecuteConcurrentN(t, 2) }
func TestExecuteConcurrent4(t *testing.T) { testExecuteConcurrentN(t, 4) }
func TestExecuteConcurrent7(t *testing.T) { testExecuteConcurrentN(t, 7) }<|fim▁end|> | |
<|file_name|>get_universe_planets_planet_id_position.go<|end_file_name|><|fim▁begin|>/*
* EVE Swagger Interface
*
* An OpenAPI for EVE Online
*
* OpenAPI spec version: 0.5.5
*
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<|fim▁hole|>
/* A list of GetUniversePlanetsPlanetIdPosition. */
//easyjson:json
type GetUniversePlanetsPlanetIdPositionList []GetUniversePlanetsPlanetIdPosition
/* position object */
//easyjson:json
type GetUniversePlanetsPlanetIdPosition struct {
X float32 `json:"x,omitempty"` /* x number */
Y float32 `json:"y,omitempty"` /* y number */
Z float32 `json:"z,omitempty"` /* z number */
}<|fim▁end|> | package esi |
<|file_name|>test_comb_factorials.py<|end_file_name|><|fim▁begin|>from sympy import (Symbol, symbols, factorial, factorial2, binomial,
rf, ff, gamma, polygamma, EulerGamma, O, pi, nan,
oo, simplify, expand_func)
from sympy.functions.combinatorial.factorials import subfactorial
from sympy.utilities.pytest import XFAIL, raises
def test_rf_eval_apply():
x, y = symbols('x,y')
assert rf(nan, y) == nan
assert rf(x, y) == rf(x, y)
assert rf(oo, 0) == 1
assert rf(-oo, 0) == 1
assert rf(oo, 6) == oo
assert rf(-oo, 7) == -oo
assert rf(oo, -6) == oo
assert rf(-oo, -7) == oo
assert rf(x, 0) == 1
assert rf(x, 1) == x
assert rf(x, 2) == x*(x + 1)
assert rf(x, 3) == x*(x + 1)*(x + 2)
assert rf(x, 5) == x*(x + 1)*(x + 2)*(x + 3)*(x + 4)
assert rf(x, -1) == 1/(x - 1)
assert rf(x, -2) == 1/((x - 1)*(x - 2))
assert rf(x, -3) == 1/((x - 1)*(x - 2)*(x - 3))
assert rf(1, 100) == factorial(100)
def test_ff_eval_apply():
x, y = symbols('x,y')
assert ff(nan, y) == nan
assert ff(x, y) == ff(x, y)
assert ff(oo, 0) == 1
assert ff(-oo, 0) == 1
assert ff(oo, 6) == oo
assert ff(-oo, 7) == -oo<|fim▁hole|> assert ff(oo, -6) == oo
assert ff(-oo, -7) == oo
assert ff(x, 0) == 1
assert ff(x, 1) == x
assert ff(x, 2) == x*(x - 1)
assert ff(x, 3) == x*(x - 1)*(x - 2)
assert ff(x, 5) == x*(x - 1)*(x - 2)*(x - 3)*(x - 4)
assert ff(x, -1) == 1/(x + 1)
assert ff(x, -2) == 1/((x + 1)*(x + 2))
assert ff(x, -3) == 1/((x + 1)*(x + 2)*(x + 3))
assert ff(100, 100) == factorial(100)
def test_factorial():
n = Symbol('n', integer=True)
k = Symbol('k', integer=True, positive=True)
assert factorial(-2) == 0
assert factorial(0) == 1
assert factorial(7) == 5040
assert factorial(n).func == factorial
assert factorial(2*n).func == factorial
assert factorial(n).is_integer
assert factorial(n).is_positive is None
assert factorial(k).is_positive
def test_factorial_diff():
n = Symbol('n', integer=True)
assert factorial(n).diff(n) == \
gamma(1 + n)*polygamma(0, 1 + n)
assert factorial(n**2).diff(n) == \
2*n*gamma(1 + n**2)*polygamma(0, 1 + n**2)
def test_factorial_series():
n = Symbol('n', integer=True)
assert factorial(n).series(n, 0, 3) == \
1 - n*EulerGamma + n**2*(EulerGamma**2/2 + pi**2/12) + O(n**3)
def test_factorial_rewrite():
n = Symbol('n', integer=True)
assert factorial(n).rewrite(gamma) == gamma(n + 1)
def test_factorial2():
n = Symbol('n', integer=True)
assert factorial2(-1) == 1
assert factorial2(0) == 1
assert factorial2(7) == 105
assert factorial2(8) == 384
assert factorial2(n).func == factorial2
def test_binomial():
n = Symbol('n', integer=True)
k = Symbol('k', integer=True)
u = Symbol('v', negative=True)
v = Symbol('m', positive=True)
assert binomial(0, 0) == 1
assert binomial(1, 1) == 1
assert binomial(10, 10) == 1
assert binomial(1, 2) == 0
assert binomial(1, -1) == 0
assert binomial(-1, 1) == -1
assert binomial(-10, 1) == -10
assert binomial(-10, 7) == -11440
assert binomial(n, -1) == 0
assert binomial(n, 0) == 1
assert expand_func(binomial(n, 1)) == n
assert expand_func(binomial(n, 2)) == n*(n - 1)/2
assert expand_func(binomial(n, n - 2)) == n*(n - 1)/2
assert expand_func(binomial(n, n - 1)) == n
assert binomial(n, 3).func == binomial
assert binomial(n, 3).expand(func=True) == n**3/6 - n**2/2 + n/3
assert expand_func(binomial(n, 3)) == n*(n - 2)*(n - 1)/6
assert binomial(n, n) == 1
assert binomial(n, n + 1) == 0
assert binomial(n, u) == 0
assert binomial(n, v).func == binomial
assert binomial(n, k).func == binomial
assert binomial(n, n + v) == 0
assert expand_func(binomial(n, n-3)) == n*(n - 2)*(n - 1)/6
def test_binomial_diff():
n = Symbol('n', integer=True)
k = Symbol('k', integer=True)
assert binomial(n, k).diff(n) == \
(-polygamma(0, 1 + n - k) + polygamma(0, 1 + n))*binomial(n, k)
assert binomial(n**2, k**3).diff(n) == \
2*n*(-polygamma(
0, 1 + n**2 - k**3) + polygamma(0, 1 + n**2))*binomial(n**2, k**3)
assert binomial(n, k).diff(k) == \
(-polygamma(0, 1 + k) + polygamma(0, 1 + n - k))*binomial(n, k)
assert binomial(n**2, k**3).diff(k) == \
3*k**2*(-polygamma(
0, 1 + k**3) + polygamma(0, 1 + n**2 - k**3))*binomial(n**2, k**3)
def test_binomial_rewrite():
n = Symbol('n', integer=True)
k = Symbol('k', integer=True)
assert binomial(n, k).rewrite(
factorial) == factorial(n)/(factorial(k)*factorial(n - k))
assert binomial(
n, k).rewrite(gamma) == gamma(n + 1)/(gamma(k + 1)*gamma(n - k + 1))
@XFAIL
def test_factorial_simplify_fail():
# simplify(factorial(x + 1).diff(x) - ((x + 1)*factorial(x)).diff(x))) == 0
from sympy.abc import x
assert simplify(x*polygamma(0, x + 1) - x*polygamma(0, x + 2) +
polygamma(0, x + 1) - polygamma(0, x + 2) + 1) == 0
def test_subfactorial():
assert all(subfactorial(i) == ans for i, ans in enumerate(
[1, 0, 1, 2, 9, 44, 265, 1854, 14833, 133496]))
raises(ValueError, lambda: subfactorial(0.1))
raises(ValueError, lambda: subfactorial(-2))<|fim▁end|> | |
<|file_name|>js_epbrEI_0goJi4R57mtzv1h04XFGpS-jRxZzMGykmego.js<|end_file_name|><|fim▁begin|>(function ($) {
/**
* Drag and drop table rows with field manipulation.
*
* Using the drupal_add_tabledrag() function, any table with weights or parent
* relationships may be made into draggable tables. Columns containing a field
* may optionally be hidden, providing a better user experience.
*
* Created tableDrag instances may be modified with custom behaviors by
* overriding the .onDrag, .onDrop, .row.onSwap, and .row.onIndent methods.
* See blocks.js for an example of adding additional functionality to tableDrag.
*/
Drupal.behaviors.tableDrag = {
attach: function (context, settings) {
for (var base in settings.tableDrag) {
$('#' + base, context).once('tabledrag', function () {
// Create the new tableDrag instance. Save in the Drupal variable
// to allow other scripts access to the object.
Drupal.tableDrag[base] = new Drupal.tableDrag(this, settings.tableDrag[base]);
});
}
}
};
/**
* Constructor for the tableDrag object. Provides table and field manipulation.
*
* @param table
* DOM object for the table to be made draggable.
* @param tableSettings
* Settings for the table added via drupal_add_dragtable().
*/
Drupal.tableDrag = function (table, tableSettings) {
var self = this;
// Required object variables.
this.table = table;
this.tableSettings = tableSettings;
this.dragObject = null; // Used to hold information about a current drag operation.
this.rowObject = null; // Provides operations for row manipulation.
this.oldRowElement = null; // Remember the previous element.
this.oldY = 0; // Used to determine up or down direction from last mouse move.
this.changed = false; // Whether anything in the entire table has changed.
this.maxDepth = 0; // Maximum amount of allowed parenting.
this.rtl = $(this.table).css('direction') == 'rtl' ? -1 : 1; // Direction of the table.
// Configure the scroll settings.
this.scrollSettings = { amount: 4, interval: 50, trigger: 70 };
this.scrollInterval = null;
this.scrollY = 0;
this.windowHeight = 0;
// Check this table's settings to see if there are parent relationships in
// this table. For efficiency, large sections of code can be skipped if we
// don't need to track horizontal movement and indentations.
this.indentEnabled = false;
for (var group in tableSettings) {
for (var n in tableSettings[group]) {
if (tableSettings[group][n].relationship == 'parent') {
this.indentEnabled = true;
}
if (tableSettings[group][n].limit > 0) {
this.maxDepth = tableSettings[group][n].limit;
}
}
}
if (this.indentEnabled) {
this.indentCount = 1; // Total width of indents, set in makeDraggable.
// Find the width of indentations to measure mouse movements against.
// Because the table doesn't need to start with any indentations, we
// manually append 2 indentations in the first draggable row, measure
// the offset, then remove.
var indent = Drupal.theme('tableDragIndentation');
var testRow = $('<tr/>').addClass('draggable').appendTo(table);
var testCell = $('<td/>').appendTo(testRow).prepend(indent).prepend(indent);
this.indentAmount = $('.indentation', testCell).get(1).offsetLeft - $('.indentation', testCell).get(0).offsetLeft;
testRow.remove();
}
// Make each applicable row draggable.
// Match immediate children of the parent element to allow nesting.
$('> tr.draggable, > tbody > tr.draggable', table).each(function () { self.makeDraggable(this); });
// Add a link before the table for users to show or hide weight columns.
$(table).before($('<a href="#" class="tabledrag-toggle-weight"></a>')
.attr('title', Drupal.t('Re-order rows by numerical weight instead of dragging.'))
.click(function () {
if ($.cookie('Drupal.tableDrag.showWeight') == 1) {
self.hideColumns();
}
else {
self.showColumns();
}
return false;
})
.wrap('<div class="tabledrag-toggle-weight-wrapper"></div>')
.parent()
);
// Initialize the specified columns (for example, weight or parent columns)
// to show or hide according to user preference. This aids accessibility
// so that, e.g., screen reader users can choose to enter weight values and
// manipulate form elements directly, rather than using drag-and-drop..
self.initColumns();
// Add mouse bindings to the document. The self variable is passed along
// as event handlers do not have direct access to the tableDrag object.
$(document).bind('mousemove', function (event) { return self.dragRow(event, self); });
$(document).bind('mouseup', function (event) { return self.dropRow(event, self); });
};
/**
* Initialize columns containing form elements to be hidden by default,
* according to the settings for this tableDrag instance.
*
* Identify and mark each cell with a CSS class so we can easily toggle
* show/hide it. Finally, hide columns if user does not have a
* 'Drupal.tableDrag.showWeight' cookie.
*/
Drupal.tableDrag.prototype.initColumns = function () {
for (var group in this.tableSettings) {
// Find the first field in this group.
for (var d in this.tableSettings[group]) {
var field = $('.' + this.tableSettings[group][d].target + ':first', this.table);
if (field.size() && this.tableSettings[group][d].hidden) {
var hidden = this.tableSettings[group][d].hidden;
var cell = field.parents('td:first');
break;
}
}
// Mark the column containing this field so it can be hidden.
if (hidden && cell[0]) {
// Add 1 to our indexes. The nth-child selector is 1 based, not 0 based.
// Match immediate children of the parent element to allow nesting.
var columnIndex = $('> td', cell.parent()).index(cell.get(0)) + 1;
$('> thead > tr, > tbody > tr, > tr', this.table).each(function () {
// Get the columnIndex and adjust for any colspans in this row.
var index = columnIndex;
var cells = $(this).children();
cells.each(function (n) {
if (n < index && this.colSpan && this.colSpan > 1) {
index -= this.colSpan - 1;
}
});
if (index > 0) {
cell = cells.filter(':nth-child(' + index + ')');
if (cell[0].colSpan && cell[0].colSpan > 1) {
// If this cell has a colspan, mark it so we can reduce the colspan.
cell.addClass('tabledrag-has-colspan');
}
else {
// Mark this cell so we can hide it.
cell.addClass('tabledrag-hide');
}
}
});
}
}
// Now hide cells and reduce colspans unless cookie indicates previous choice.
// Set a cookie if it is not already present.
if ($.cookie('Drupal.tableDrag.showWeight') === null) {
$.cookie('Drupal.tableDrag.showWeight', 0, {
path: Drupal.settings.basePath,
// The cookie expires in one year.
expires: 365
});
this.hideColumns();
}
// Check cookie value and show/hide weight columns accordingly.
else {
if ($.cookie('Drupal.tableDrag.showWeight') == 1) {
this.showColumns();
}
else {
this.hideColumns();
}
}
};
/**
* Hide the columns containing weight/parent form elements.
* Undo showColumns().
*/
Drupal.tableDrag.prototype.hideColumns = function () {
// Hide weight/parent cells and headers.
$('.tabledrag-hide', 'table.tabledrag-processed').css('display', 'none');
// Show TableDrag handles.
$('.tabledrag-handle', 'table.tabledrag-processed').css('display', '');
// Reduce the colspan of any effected multi-span columns.
$('.tabledrag-has-colspan', 'table.tabledrag-processed').each(function () {
this.colSpan = this.colSpan - 1;
});
// Change link text.<|fim▁hole|> // Change cookie.
$.cookie('Drupal.tableDrag.showWeight', 0, {
path: Drupal.settings.basePath,
// The cookie expires in one year.
expires: 365
});
};
/**
* Show the columns containing weight/parent form elements
* Undo hideColumns().
*/
Drupal.tableDrag.prototype.showColumns = function () {
// Show weight/parent cells and headers.
$('.tabledrag-hide', 'table.tabledrag-processed').css('display', '');
// Hide TableDrag handles.
$('.tabledrag-handle', 'table.tabledrag-processed').css('display', 'none');
// Increase the colspan for any columns where it was previously reduced.
$('.tabledrag-has-colspan', 'table.tabledrag-processed').each(function () {
this.colSpan = this.colSpan + 1;
});
// Change link text.
$('.tabledrag-toggle-weight').text(Drupal.t('Hide row weights'));
// Change cookie.
$.cookie('Drupal.tableDrag.showWeight', 1, {
path: Drupal.settings.basePath,
// The cookie expires in one year.
expires: 365
});
};
/**
* Find the target used within a particular row and group.
*/
Drupal.tableDrag.prototype.rowSettings = function (group, row) {
var field = $('.' + group, row);
for (var delta in this.tableSettings[group]) {
var targetClass = this.tableSettings[group][delta].target;
if (field.is('.' + targetClass)) {
// Return a copy of the row settings.
var rowSettings = {};
for (var n in this.tableSettings[group][delta]) {
rowSettings[n] = this.tableSettings[group][delta][n];
}
return rowSettings;
}
}
};
/**
* Take an item and add event handlers to make it become draggable.
*/
Drupal.tableDrag.prototype.makeDraggable = function (item) {
var self = this;
// Create the handle.
var handle = $('<a href="#" class="tabledrag-handle"><div class="handle"> </div></a>').attr('title', Drupal.t('Drag to re-order'));
// Insert the handle after indentations (if any).
if ($('td:first .indentation:last', item).length) {
$('td:first .indentation:last', item).after(handle);
// Update the total width of indentation in this entire table.
self.indentCount = Math.max($('.indentation', item).size(), self.indentCount);
}
else {
$('td:first', item).prepend(handle);
}
// Add hover action for the handle.
handle.hover(function () {
self.dragObject == null ? $(this).addClass('tabledrag-handle-hover') : null;
}, function () {
self.dragObject == null ? $(this).removeClass('tabledrag-handle-hover') : null;
});
// Add the mousedown action for the handle.
handle.mousedown(function (event) {
// Create a new dragObject recording the event information.
self.dragObject = {};
self.dragObject.initMouseOffset = self.getMouseOffset(item, event);
self.dragObject.initMouseCoords = self.mouseCoords(event);
if (self.indentEnabled) {
self.dragObject.indentMousePos = self.dragObject.initMouseCoords;
}
// If there's a lingering row object from the keyboard, remove its focus.
if (self.rowObject) {
$('a.tabledrag-handle', self.rowObject.element).blur();
}
// Create a new rowObject for manipulation of this row.
self.rowObject = new self.row(item, 'mouse', self.indentEnabled, self.maxDepth, true);
// Save the position of the table.
self.table.topY = $(self.table).offset().top;
self.table.bottomY = self.table.topY + self.table.offsetHeight;
// Add classes to the handle and row.
$(this).addClass('tabledrag-handle-hover');
$(item).addClass('drag');
// Set the document to use the move cursor during drag.
$('body').addClass('drag');
if (self.oldRowElement) {
$(self.oldRowElement).removeClass('drag-previous');
}
// Hack for IE6 that flickers uncontrollably if select lists are moved.
if (navigator.userAgent.indexOf('MSIE 6.') != -1) {
$('select', this.table).css('display', 'none');
}
// Hack for Konqueror, prevent the blur handler from firing.
// Konqueror always gives links focus, even after returning false on mousedown.
self.safeBlur = false;
// Call optional placeholder function.
self.onDrag();
return false;
});
// Prevent the anchor tag from jumping us to the top of the page.
handle.click(function () {
return false;
});
// Similar to the hover event, add a class when the handle is focused.
handle.focus(function () {
$(this).addClass('tabledrag-handle-hover');
self.safeBlur = true;
});
// Remove the handle class on blur and fire the same function as a mouseup.
handle.blur(function (event) {
$(this).removeClass('tabledrag-handle-hover');
if (self.rowObject && self.safeBlur) {
self.dropRow(event, self);
}
});
// Add arrow-key support to the handle.
handle.keydown(function (event) {
// If a rowObject doesn't yet exist and this isn't the tab key.
if (event.keyCode != 9 && !self.rowObject) {
self.rowObject = new self.row(item, 'keyboard', self.indentEnabled, self.maxDepth, true);
}
var keyChange = false;
switch (event.keyCode) {
case 37: // Left arrow.
case 63234: // Safari left arrow.
keyChange = true;
self.rowObject.indent(-1 * self.rtl);
break;
case 38: // Up arrow.
case 63232: // Safari up arrow.
var previousRow = $(self.rowObject.element).prev('tr').get(0);
while (previousRow && $(previousRow).is(':hidden')) {
previousRow = $(previousRow).prev('tr').get(0);
}
if (previousRow) {
self.safeBlur = false; // Do not allow the onBlur cleanup.
self.rowObject.direction = 'up';
keyChange = true;
if ($(item).is('.tabledrag-root')) {
// Swap with the previous top-level row.
var groupHeight = 0;
while (previousRow && $('.indentation', previousRow).size()) {
previousRow = $(previousRow).prev('tr').get(0);
groupHeight += $(previousRow).is(':hidden') ? 0 : previousRow.offsetHeight;
}
if (previousRow) {
self.rowObject.swap('before', previousRow);
// No need to check for indentation, 0 is the only valid one.
window.scrollBy(0, -groupHeight);
}
}
else if (self.table.tBodies[0].rows[0] != previousRow || $(previousRow).is('.draggable')) {
// Swap with the previous row (unless previous row is the first one
// and undraggable).
self.rowObject.swap('before', previousRow);
self.rowObject.interval = null;
self.rowObject.indent(0);
window.scrollBy(0, -parseInt(item.offsetHeight, 10));
}
handle.get(0).focus(); // Regain focus after the DOM manipulation.
}
break;
case 39: // Right arrow.
case 63235: // Safari right arrow.
keyChange = true;
self.rowObject.indent(1 * self.rtl);
break;
case 40: // Down arrow.
case 63233: // Safari down arrow.
var nextRow = $(self.rowObject.group).filter(':last').next('tr').get(0);
while (nextRow && $(nextRow).is(':hidden')) {
nextRow = $(nextRow).next('tr').get(0);
}
if (nextRow) {
self.safeBlur = false; // Do not allow the onBlur cleanup.
self.rowObject.direction = 'down';
keyChange = true;
if ($(item).is('.tabledrag-root')) {
// Swap with the next group (necessarily a top-level one).
var groupHeight = 0;
nextGroup = new self.row(nextRow, 'keyboard', self.indentEnabled, self.maxDepth, false);
if (nextGroup) {
$(nextGroup.group).each(function () {
groupHeight += $(this).is(':hidden') ? 0 : this.offsetHeight;
});
nextGroupRow = $(nextGroup.group).filter(':last').get(0);
self.rowObject.swap('after', nextGroupRow);
// No need to check for indentation, 0 is the only valid one.
window.scrollBy(0, parseInt(groupHeight, 10));
}
}
else {
// Swap with the next row.
self.rowObject.swap('after', nextRow);
self.rowObject.interval = null;
self.rowObject.indent(0);
window.scrollBy(0, parseInt(item.offsetHeight, 10));
}
handle.get(0).focus(); // Regain focus after the DOM manipulation.
}
break;
}
if (self.rowObject && self.rowObject.changed == true) {
$(item).addClass('drag');
if (self.oldRowElement) {
$(self.oldRowElement).removeClass('drag-previous');
}
self.oldRowElement = item;
self.restripeTable();
self.onDrag();
}
// Returning false if we have an arrow key to prevent scrolling.
if (keyChange) {
return false;
}
});
// Compatibility addition, return false on keypress to prevent unwanted scrolling.
// IE and Safari will suppress scrolling on keydown, but all other browsers
// need to return false on keypress. http://www.quirksmode.org/js/keys.html
handle.keypress(function (event) {
switch (event.keyCode) {
case 37: // Left arrow.
case 38: // Up arrow.
case 39: // Right arrow.
case 40: // Down arrow.
return false;
}
});
};
/**
* Mousemove event handler, bound to document.
*/
Drupal.tableDrag.prototype.dragRow = function (event, self) {
if (self.dragObject) {
self.currentMouseCoords = self.mouseCoords(event);
var y = self.currentMouseCoords.y - self.dragObject.initMouseOffset.y;
var x = self.currentMouseCoords.x - self.dragObject.initMouseOffset.x;
// Check for row swapping and vertical scrolling.
if (y != self.oldY) {
self.rowObject.direction = y > self.oldY ? 'down' : 'up';
self.oldY = y; // Update the old value.
// Check if the window should be scrolled (and how fast).
var scrollAmount = self.checkScroll(self.currentMouseCoords.y);
// Stop any current scrolling.
clearInterval(self.scrollInterval);
// Continue scrolling if the mouse has moved in the scroll direction.
if (scrollAmount > 0 && self.rowObject.direction == 'down' || scrollAmount < 0 && self.rowObject.direction == 'up') {
self.setScroll(scrollAmount);
}
// If we have a valid target, perform the swap and restripe the table.
var currentRow = self.findDropTargetRow(x, y);
if (currentRow) {
if (self.rowObject.direction == 'down') {
self.rowObject.swap('after', currentRow, self);
}
else {
self.rowObject.swap('before', currentRow, self);
}
self.restripeTable();
}
}
// Similar to row swapping, handle indentations.
if (self.indentEnabled) {
var xDiff = self.currentMouseCoords.x - self.dragObject.indentMousePos.x;
// Set the number of indentations the mouse has been moved left or right.
var indentDiff = Math.round(xDiff / self.indentAmount * self.rtl);
// Indent the row with our estimated diff, which may be further
// restricted according to the rows around this row.
var indentChange = self.rowObject.indent(indentDiff);
// Update table and mouse indentations.
self.dragObject.indentMousePos.x += self.indentAmount * indentChange * self.rtl;
self.indentCount = Math.max(self.indentCount, self.rowObject.indents);
}
return false;
}
};
/**
* Mouseup event handler, bound to document.
* Blur event handler, bound to drag handle for keyboard support.
*/
Drupal.tableDrag.prototype.dropRow = function (event, self) {
// Drop row functionality shared between mouseup and blur events.
if (self.rowObject != null) {
var droppedRow = self.rowObject.element;
// The row is already in the right place so we just release it.
if (self.rowObject.changed == true) {
// Update the fields in the dropped row.
self.updateFields(droppedRow);
// If a setting exists for affecting the entire group, update all the
// fields in the entire dragged group.
for (var group in self.tableSettings) {
var rowSettings = self.rowSettings(group, droppedRow);
if (rowSettings.relationship == 'group') {
for (var n in self.rowObject.children) {
self.updateField(self.rowObject.children[n], group);
}
}
}
self.rowObject.markChanged();
if (self.changed == false) {
$(Drupal.theme('tableDragChangedWarning')).insertBefore(self.table).hide().fadeIn('slow');
self.changed = true;
}
}
if (self.indentEnabled) {
self.rowObject.removeIndentClasses();
}
if (self.oldRowElement) {
$(self.oldRowElement).removeClass('drag-previous');
}
$(droppedRow).removeClass('drag').addClass('drag-previous');
self.oldRowElement = droppedRow;
self.onDrop();
self.rowObject = null;
}
// Functionality specific only to mouseup event.
if (self.dragObject != null) {
$('.tabledrag-handle', droppedRow).removeClass('tabledrag-handle-hover');
self.dragObject = null;
$('body').removeClass('drag');
clearInterval(self.scrollInterval);
// Hack for IE6 that flickers uncontrollably if select lists are moved.
if (navigator.userAgent.indexOf('MSIE 6.') != -1) {
$('select', this.table).css('display', 'block');
}
}
};
/**
* Get the mouse coordinates from the event (allowing for browser differences).
*/
Drupal.tableDrag.prototype.mouseCoords = function (event) {
if (event.pageX || event.pageY) {
return { x: event.pageX, y: event.pageY };
}
return {
x: event.clientX + document.body.scrollLeft - document.body.clientLeft,
y: event.clientY + document.body.scrollTop - document.body.clientTop
};
};
/**
* Given a target element and a mouse event, get the mouse offset from that
* element. To do this we need the element's position and the mouse position.
*/
Drupal.tableDrag.prototype.getMouseOffset = function (target, event) {
var docPos = $(target).offset();
var mousePos = this.mouseCoords(event);
return { x: mousePos.x - docPos.left, y: mousePos.y - docPos.top };
};
/**
* Find the row the mouse is currently over. This row is then taken and swapped
* with the one being dragged.
*
* @param x
* The x coordinate of the mouse on the page (not the screen).
* @param y
* The y coordinate of the mouse on the page (not the screen).
*/
Drupal.tableDrag.prototype.findDropTargetRow = function (x, y) {
var rows = $(this.table.tBodies[0].rows).not(':hidden');
for (var n = 0; n < rows.length; n++) {
var row = rows[n];
var indentDiff = 0;
var rowY = $(row).offset().top;
// Because Safari does not report offsetHeight on table rows, but does on
// table cells, grab the firstChild of the row and use that instead.
// http://jacob.peargrove.com/blog/2006/technical/table-row-offsettop-bug-in-safari.
if (row.offsetHeight == 0) {
var rowHeight = parseInt(row.firstChild.offsetHeight, 10) / 2;
}
// Other browsers.
else {
var rowHeight = parseInt(row.offsetHeight, 10) / 2;
}
// Because we always insert before, we need to offset the height a bit.
if ((y > (rowY - rowHeight)) && (y < (rowY + rowHeight))) {
if (this.indentEnabled) {
// Check that this row is not a child of the row being dragged.
for (var n in this.rowObject.group) {
if (this.rowObject.group[n] == row) {
return null;
}
}
}
else {
// Do not allow a row to be swapped with itself.
if (row == this.rowObject.element) {
return null;
}
}
// Check that swapping with this row is allowed.
if (!this.rowObject.isValidSwap(row)) {
return null;
}
// We may have found the row the mouse just passed over, but it doesn't
// take into account hidden rows. Skip backwards until we find a draggable
// row.
while ($(row).is(':hidden') && $(row).prev('tr').is(':hidden')) {
row = $(row).prev('tr').get(0);
}
return row;
}
}
return null;
};
/**
* After the row is dropped, update the table fields according to the settings
* set for this table.
*
* @param changedRow
* DOM object for the row that was just dropped.
*/
Drupal.tableDrag.prototype.updateFields = function (changedRow) {
for (var group in this.tableSettings) {
// Each group may have a different setting for relationship, so we find
// the source rows for each separately.
this.updateField(changedRow, group);
}
};
/**
* After the row is dropped, update a single table field according to specific
* settings.
*
* @param changedRow
* DOM object for the row that was just dropped.
* @param group
* The settings group on which field updates will occur.
*/
Drupal.tableDrag.prototype.updateField = function (changedRow, group) {
var rowSettings = this.rowSettings(group, changedRow);
// Set the row as its own target.
if (rowSettings.relationship == 'self' || rowSettings.relationship == 'group') {
var sourceRow = changedRow;
}
// Siblings are easy, check previous and next rows.
else if (rowSettings.relationship == 'sibling') {
var previousRow = $(changedRow).prev('tr').get(0);
var nextRow = $(changedRow).next('tr').get(0);
var sourceRow = changedRow;
if ($(previousRow).is('.draggable') && $('.' + group, previousRow).length) {
if (this.indentEnabled) {
if ($('.indentations', previousRow).size() == $('.indentations', changedRow)) {
sourceRow = previousRow;
}
}
else {
sourceRow = previousRow;
}
}
else if ($(nextRow).is('.draggable') && $('.' + group, nextRow).length) {
if (this.indentEnabled) {
if ($('.indentations', nextRow).size() == $('.indentations', changedRow)) {
sourceRow = nextRow;
}
}
else {
sourceRow = nextRow;
}
}
}
// Parents, look up the tree until we find a field not in this group.
// Go up as many parents as indentations in the changed row.
else if (rowSettings.relationship == 'parent') {
var previousRow = $(changedRow).prev('tr');
while (previousRow.length && $('.indentation', previousRow).length >= this.rowObject.indents) {
previousRow = previousRow.prev('tr');
}
// If we found a row.
if (previousRow.length) {
sourceRow = previousRow[0];
}
// Otherwise we went all the way to the left of the table without finding
// a parent, meaning this item has been placed at the root level.
else {
// Use the first row in the table as source, because it's guaranteed to
// be at the root level. Find the first item, then compare this row
// against it as a sibling.
sourceRow = $(this.table).find('tr.draggable:first').get(0);
if (sourceRow == this.rowObject.element) {
sourceRow = $(this.rowObject.group[this.rowObject.group.length - 1]).next('tr.draggable').get(0);
}
var useSibling = true;
}
}
// Because we may have moved the row from one category to another,
// take a look at our sibling and borrow its sources and targets.
this.copyDragClasses(sourceRow, changedRow, group);
rowSettings = this.rowSettings(group, changedRow);
// In the case that we're looking for a parent, but the row is at the top
// of the tree, copy our sibling's values.
if (useSibling) {
rowSettings.relationship = 'sibling';
rowSettings.source = rowSettings.target;
}
var targetClass = '.' + rowSettings.target;
var targetElement = $(targetClass, changedRow).get(0);
// Check if a target element exists in this row.
if (targetElement) {
var sourceClass = '.' + rowSettings.source;
var sourceElement = $(sourceClass, sourceRow).get(0);
switch (rowSettings.action) {
case 'depth':
// Get the depth of the target row.
targetElement.value = $('.indentation', $(sourceElement).parents('tr:first')).size();
break;
case 'match':
// Update the value.
targetElement.value = sourceElement.value;
break;
case 'order':
var siblings = this.rowObject.findSiblings(rowSettings);
if ($(targetElement).is('select')) {
// Get a list of acceptable values.
var values = [];
$('option', targetElement).each(function () {
values.push(this.value);
});
var maxVal = values[values.length - 1];
// Populate the values in the siblings.
$(targetClass, siblings).each(function () {
// If there are more items than possible values, assign the maximum value to the row.
if (values.length > 0) {
this.value = values.shift();
}
else {
this.value = maxVal;
}
});
}
else {
// Assume a numeric input field.
var weight = parseInt($(targetClass, siblings[0]).val(), 10) || 0;
$(targetClass, siblings).each(function () {
this.value = weight;
weight++;
});
}
break;
}
}
};
/**
* Copy all special tableDrag classes from one row's form elements to a
* different one, removing any special classes that the destination row
* may have had.
*/
Drupal.tableDrag.prototype.copyDragClasses = function (sourceRow, targetRow, group) {
var sourceElement = $('.' + group, sourceRow);
var targetElement = $('.' + group, targetRow);
if (sourceElement.length && targetElement.length) {
targetElement[0].className = sourceElement[0].className;
}
};
Drupal.tableDrag.prototype.checkScroll = function (cursorY) {
var de = document.documentElement;
var b = document.body;
var windowHeight = this.windowHeight = window.innerHeight || (de.clientHeight && de.clientWidth != 0 ? de.clientHeight : b.offsetHeight);
var scrollY = this.scrollY = (document.all ? (!de.scrollTop ? b.scrollTop : de.scrollTop) : (window.pageYOffset ? window.pageYOffset : window.scrollY));
var trigger = this.scrollSettings.trigger;
var delta = 0;
// Return a scroll speed relative to the edge of the screen.
if (cursorY - scrollY > windowHeight - trigger) {
delta = trigger / (windowHeight + scrollY - cursorY);
delta = (delta > 0 && delta < trigger) ? delta : trigger;
return delta * this.scrollSettings.amount;
}
else if (cursorY - scrollY < trigger) {
delta = trigger / (cursorY - scrollY);
delta = (delta > 0 && delta < trigger) ? delta : trigger;
return -delta * this.scrollSettings.amount;
}
};
Drupal.tableDrag.prototype.setScroll = function (scrollAmount) {
var self = this;
this.scrollInterval = setInterval(function () {
// Update the scroll values stored in the object.
self.checkScroll(self.currentMouseCoords.y);
var aboveTable = self.scrollY > self.table.topY;
var belowTable = self.scrollY + self.windowHeight < self.table.bottomY;
if (scrollAmount > 0 && belowTable || scrollAmount < 0 && aboveTable) {
window.scrollBy(0, scrollAmount);
}
}, this.scrollSettings.interval);
};
Drupal.tableDrag.prototype.restripeTable = function () {
// :even and :odd are reversed because jQuery counts from 0 and
// we count from 1, so we're out of sync.
// Match immediate children of the parent element to allow nesting.
$('> tbody > tr.draggable:visible, > tr.draggable:visible', this.table)
.removeClass('odd even')
.filter(':odd').addClass('even').end()
.filter(':even').addClass('odd');
};
/**
* Stub function. Allows a custom handler when a row begins dragging.
*/
Drupal.tableDrag.prototype.onDrag = function () {
return null;
};
/**
* Stub function. Allows a custom handler when a row is dropped.
*/
Drupal.tableDrag.prototype.onDrop = function () {
return null;
};
/**
* Constructor to make a new object to manipulate a table row.
*
* @param tableRow
* The DOM element for the table row we will be manipulating.
* @param method
* The method in which this row is being moved. Either 'keyboard' or 'mouse'.
* @param indentEnabled
* Whether the containing table uses indentations. Used for optimizations.
* @param maxDepth
* The maximum amount of indentations this row may contain.
* @param addClasses
* Whether we want to add classes to this row to indicate child relationships.
*/
Drupal.tableDrag.prototype.row = function (tableRow, method, indentEnabled, maxDepth, addClasses) {
this.element = tableRow;
this.method = method;
this.group = [tableRow];
this.groupDepth = $('.indentation', tableRow).size();
this.changed = false;
this.table = $(tableRow).parents('table:first').get(0);
this.indentEnabled = indentEnabled;
this.maxDepth = maxDepth;
this.direction = ''; // Direction the row is being moved.
if (this.indentEnabled) {
this.indents = $('.indentation', tableRow).size();
this.children = this.findChildren(addClasses);
this.group = $.merge(this.group, this.children);
// Find the depth of this entire group.
for (var n = 0; n < this.group.length; n++) {
this.groupDepth = Math.max($('.indentation', this.group[n]).size(), this.groupDepth);
}
}
};
/**
* Find all children of rowObject by indentation.
*
* @param addClasses
* Whether we want to add classes to this row to indicate child relationships.
*/
Drupal.tableDrag.prototype.row.prototype.findChildren = function (addClasses) {
var parentIndentation = this.indents;
var currentRow = $(this.element, this.table).next('tr.draggable');
var rows = [];
var child = 0;
while (currentRow.length) {
var rowIndentation = $('.indentation', currentRow).length;
// A greater indentation indicates this is a child.
if (rowIndentation > parentIndentation) {
child++;
rows.push(currentRow[0]);
if (addClasses) {
$('.indentation', currentRow).each(function (indentNum) {
if (child == 1 && (indentNum == parentIndentation)) {
$(this).addClass('tree-child-first');
}
if (indentNum == parentIndentation) {
$(this).addClass('tree-child');
}
else if (indentNum > parentIndentation) {
$(this).addClass('tree-child-horizontal');
}
});
}
}
else {
break;
}
currentRow = currentRow.next('tr.draggable');
}
if (addClasses && rows.length) {
$('.indentation:nth-child(' + (parentIndentation + 1) + ')', rows[rows.length - 1]).addClass('tree-child-last');
}
return rows;
};
/**
* Ensure that two rows are allowed to be swapped.
*
* @param row
* DOM object for the row being considered for swapping.
*/
Drupal.tableDrag.prototype.row.prototype.isValidSwap = function (row) {
if (this.indentEnabled) {
var prevRow, nextRow;
if (this.direction == 'down') {
prevRow = row;
nextRow = $(row).next('tr').get(0);
}
else {
prevRow = $(row).prev('tr').get(0);
nextRow = row;
}
this.interval = this.validIndentInterval(prevRow, nextRow);
// We have an invalid swap if the valid indentations interval is empty.
if (this.interval.min > this.interval.max) {
return false;
}
}
// Do not let an un-draggable first row have anything put before it.
if (this.table.tBodies[0].rows[0] == row && $(row).is(':not(.draggable)')) {
return false;
}
return true;
};
/**
* Perform the swap between two rows.
*
* @param position
* Whether the swap will occur 'before' or 'after' the given row.
* @param row
* DOM element what will be swapped with the row group.
*/
Drupal.tableDrag.prototype.row.prototype.swap = function (position, row) {
Drupal.detachBehaviors(this.group, Drupal.settings, 'move');
$(row)[position](this.group);
Drupal.attachBehaviors(this.group, Drupal.settings);
this.changed = true;
this.onSwap(row);
};
/**
* Determine the valid indentations interval for the row at a given position
* in the table.
*
* @param prevRow
* DOM object for the row before the tested position
* (or null for first position in the table).
* @param nextRow
* DOM object for the row after the tested position
* (or null for last position in the table).
*/
Drupal.tableDrag.prototype.row.prototype.validIndentInterval = function (prevRow, nextRow) {
var minIndent, maxIndent;
// Minimum indentation:
// Do not orphan the next row.
minIndent = nextRow ? $('.indentation', nextRow).size() : 0;
// Maximum indentation:
if (!prevRow || $(prevRow).is(':not(.draggable)') || $(this.element).is('.tabledrag-root')) {
// Do not indent:
// - the first row in the table,
// - rows dragged below a non-draggable row,
// - 'root' rows.
maxIndent = 0;
}
else {
// Do not go deeper than as a child of the previous row.
maxIndent = $('.indentation', prevRow).size() + ($(prevRow).is('.tabledrag-leaf') ? 0 : 1);
// Limit by the maximum allowed depth for the table.
if (this.maxDepth) {
maxIndent = Math.min(maxIndent, this.maxDepth - (this.groupDepth - this.indents));
}
}
return { 'min': minIndent, 'max': maxIndent };
};
/**
* Indent a row within the legal bounds of the table.
*
* @param indentDiff
* The number of additional indentations proposed for the row (can be
* positive or negative). This number will be adjusted to nearest valid
* indentation level for the row.
*/
Drupal.tableDrag.prototype.row.prototype.indent = function (indentDiff) {
// Determine the valid indentations interval if not available yet.
if (!this.interval) {
prevRow = $(this.element).prev('tr').get(0);
nextRow = $(this.group).filter(':last').next('tr').get(0);
this.interval = this.validIndentInterval(prevRow, nextRow);
}
// Adjust to the nearest valid indentation.
var indent = this.indents + indentDiff;
indent = Math.max(indent, this.interval.min);
indent = Math.min(indent, this.interval.max);
indentDiff = indent - this.indents;
for (var n = 1; n <= Math.abs(indentDiff); n++) {
// Add or remove indentations.
if (indentDiff < 0) {
$('.indentation:first', this.group).remove();
this.indents--;
}
else {
$('td:first', this.group).prepend(Drupal.theme('tableDragIndentation'));
this.indents++;
}
}
if (indentDiff) {
// Update indentation for this row.
this.changed = true;
this.groupDepth += indentDiff;
this.onIndent();
}
return indentDiff;
};
/**
* Find all siblings for a row, either according to its subgroup or indentation.
* Note that the passed-in row is included in the list of siblings.
*
* @param settings
* The field settings we're using to identify what constitutes a sibling.
*/
Drupal.tableDrag.prototype.row.prototype.findSiblings = function (rowSettings) {
var siblings = [];
var directions = ['prev', 'next'];
var rowIndentation = this.indents;
for (var d = 0; d < directions.length; d++) {
var checkRow = $(this.element)[directions[d]]();
while (checkRow.length) {
// Check that the sibling contains a similar target field.
if ($('.' + rowSettings.target, checkRow)) {
// Either add immediately if this is a flat table, or check to ensure
// that this row has the same level of indentation.
if (this.indentEnabled) {
var checkRowIndentation = $('.indentation', checkRow).length;
}
if (!(this.indentEnabled) || (checkRowIndentation == rowIndentation)) {
siblings.push(checkRow[0]);
}
else if (checkRowIndentation < rowIndentation) {
// No need to keep looking for siblings when we get to a parent.
break;
}
}
else {
break;
}
checkRow = $(checkRow)[directions[d]]();
}
// Since siblings are added in reverse order for previous, reverse the
// completed list of previous siblings. Add the current row and continue.
if (directions[d] == 'prev') {
siblings.reverse();
siblings.push(this.element);
}
}
return siblings;
};
/**
* Remove indentation helper classes from the current row group.
*/
Drupal.tableDrag.prototype.row.prototype.removeIndentClasses = function () {
for (var n in this.children) {
$('.indentation', this.children[n])
.removeClass('tree-child')
.removeClass('tree-child-first')
.removeClass('tree-child-last')
.removeClass('tree-child-horizontal');
}
};
/**
* Add an asterisk or other marker to the changed row.
*/
Drupal.tableDrag.prototype.row.prototype.markChanged = function () {
var marker = Drupal.theme('tableDragChangedMarker');
var cell = $('td:first', this.element);
if ($('span.tabledrag-changed', cell).length == 0) {
cell.append(marker);
}
};
/**
* Stub function. Allows a custom handler when a row is indented.
*/
Drupal.tableDrag.prototype.row.prototype.onIndent = function () {
return null;
};
/**
* Stub function. Allows a custom handler when a row is swapped.
*/
Drupal.tableDrag.prototype.row.prototype.onSwap = function (swappedRow) {
return null;
};
Drupal.theme.prototype.tableDragChangedMarker = function () {
return '<span class="warning tabledrag-changed">*</span>';
};
Drupal.theme.prototype.tableDragIndentation = function () {
return '<div class="indentation"> </div>';
};
Drupal.theme.prototype.tableDragChangedWarning = function () {
return '<div class="tabledrag-changed-warning messages warning">' + Drupal.theme('tableDragChangedMarker') + ' ' + Drupal.t('Changes made in this table will not be saved until the form is submitted.') + '</div>';
};
})(jQuery);
;
(function ($) {
Drupal.viewsSlideshow = Drupal.viewsSlideshow || {};
/**
* Views Slideshow Controls
*/
Drupal.viewsSlideshowControls = Drupal.viewsSlideshowControls || {};
/**
* Implement the play hook for controls.
*/
Drupal.viewsSlideshowControls.play = function (options) {
// Route the control call to the correct control type.
// Need to use try catch so we don't have to check to make sure every part
// of the object is defined.
try {
if (typeof Drupal.settings.viewsSlideshowControls[options.slideshowID].top.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowControls[options.slideshowID].top.type].play == 'function') {
Drupal[Drupal.settings.viewsSlideshowControls[options.slideshowID].top.type].play(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
try {
if (typeof Drupal.settings.viewsSlideshowControls[options.slideshowID].bottom.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowControls[options.slideshowID].bottom.type].play == 'function') {
Drupal[Drupal.settings.viewsSlideshowControls[options.slideshowID].bottom.type].play(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
};
/**
* Implement the pause hook for controls.
*/
Drupal.viewsSlideshowControls.pause = function (options) {
// Route the control call to the correct control type.
// Need to use try catch so we don't have to check to make sure every part
// of the object is defined.
try {
if (typeof Drupal.settings.viewsSlideshowControls[options.slideshowID].top.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowControls[options.slideshowID].top.type].pause == 'function') {
Drupal[Drupal.settings.viewsSlideshowControls[options.slideshowID].top.type].pause(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
try {
if (typeof Drupal.settings.viewsSlideshowControls[options.slideshowID].bottom.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowControls[options.slideshowID].bottom.type].pause == 'function') {
Drupal[Drupal.settings.viewsSlideshowControls[options.slideshowID].bottom.type].pause(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
};
/**
* Views Slideshow Text Controls
*/
// Add views slieshow api calls for views slideshow text controls.
Drupal.behaviors.viewsSlideshowControlsText = {
attach: function (context) {
// Process previous link
$('.views_slideshow_controls_text_previous:not(.views-slideshow-controls-text-previous-processed)', context).addClass('views-slideshow-controls-text-previous-processed').each(function() {
var uniqueID = $(this).attr('id').replace('views_slideshow_controls_text_previous_', '');
$(this).click(function() {
Drupal.viewsSlideshow.action({ "action": 'previousSlide', "slideshowID": uniqueID });
return false;
});
});
// Process next link
$('.views_slideshow_controls_text_next:not(.views-slideshow-controls-text-next-processed)', context).addClass('views-slideshow-controls-text-next-processed').each(function() {
var uniqueID = $(this).attr('id').replace('views_slideshow_controls_text_next_', '');
$(this).click(function() {
Drupal.viewsSlideshow.action({ "action": 'nextSlide', "slideshowID": uniqueID });
return false;
});
});
// Process pause link
$('.views_slideshow_controls_text_pause:not(.views-slideshow-controls-text-pause-processed)', context).addClass('views-slideshow-controls-text-pause-processed').each(function() {
var uniqueID = $(this).attr('id').replace('views_slideshow_controls_text_pause_', '');
$(this).click(function() {
if (Drupal.settings.viewsSlideshow[uniqueID].paused) {
Drupal.viewsSlideshow.action({ "action": 'play', "slideshowID": uniqueID, "force": true });
}
else {
Drupal.viewsSlideshow.action({ "action": 'pause', "slideshowID": uniqueID, "force": true });
}
return false;
});
});
}
};
Drupal.viewsSlideshowControlsText = Drupal.viewsSlideshowControlsText || {};
/**
* Implement the pause hook for text controls.
*/
Drupal.viewsSlideshowControlsText.pause = function (options) {
var pauseText = Drupal.theme.prototype['viewsSlideshowControlsPause'] ? Drupal.theme('viewsSlideshowControlsPause') : '';
$('#views_slideshow_controls_text_pause_' + options.slideshowID + ' a').text(pauseText);
};
/**
* Implement the play hook for text controls.
*/
Drupal.viewsSlideshowControlsText.play = function (options) {
var playText = Drupal.theme.prototype['viewsSlideshowControlsPlay'] ? Drupal.theme('viewsSlideshowControlsPlay') : '';
$('#views_slideshow_controls_text_pause_' + options.slideshowID + ' a').text(playText);
};
// Theme the resume control.
Drupal.theme.prototype.viewsSlideshowControlsPause = function () {
return Drupal.t('Resume');
};
// Theme the pause control.
Drupal.theme.prototype.viewsSlideshowControlsPlay = function () {
return Drupal.t('Pause');
};
/**
* Views Slideshow Pager
*/
Drupal.viewsSlideshowPager = Drupal.viewsSlideshowPager || {};
/**
* Implement the transitionBegin hook for pagers.
*/
Drupal.viewsSlideshowPager.transitionBegin = function (options) {
// Route the pager call to the correct pager type.
// Need to use try catch so we don't have to check to make sure every part
// of the object is defined.
try {
if (typeof Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type].transitionBegin == 'function') {
Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type].transitionBegin(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
try {
if (typeof Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type].transitionBegin == 'function') {
Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type].transitionBegin(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
};
/**
* Implement the goToSlide hook for pagers.
*/
Drupal.viewsSlideshowPager.goToSlide = function (options) {
// Route the pager call to the correct pager type.
// Need to use try catch so we don't have to check to make sure every part
// of the object is defined.
try {
if (typeof Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type].goToSlide == 'function') {
Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type].goToSlide(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
try {
if (typeof Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type].goToSlide == 'function') {
Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type].goToSlide(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
};
/**
* Implement the previousSlide hook for pagers.
*/
Drupal.viewsSlideshowPager.previousSlide = function (options) {
// Route the pager call to the correct pager type.
// Need to use try catch so we don't have to check to make sure every part
// of the object is defined.
try {
if (typeof Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type].previousSlide == 'function') {
Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type].previousSlide(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
try {
if (typeof Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type].previousSlide == 'function') {
Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type].previousSlide(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
};
/**
* Implement the nextSlide hook for pagers.
*/
Drupal.viewsSlideshowPager.nextSlide = function (options) {
// Route the pager call to the correct pager type.
// Need to use try catch so we don't have to check to make sure every part
// of the object is defined.
try {
if (typeof Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type].nextSlide == 'function') {
Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].top.type].nextSlide(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
try {
if (typeof Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type != "undefined" && typeof Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type].nextSlide == 'function') {
Drupal[Drupal.settings.viewsSlideshowPager[options.slideshowID].bottom.type].nextSlide(options);
}
}
catch(err) {
// Don't need to do anything on error.
}
};
/**
* Views Slideshow Pager Fields
*/
// Add views slieshow api calls for views slideshow pager fields.
Drupal.behaviors.viewsSlideshowPagerFields = {
attach: function (context) {
// Process pause on hover.
$('.views_slideshow_pager_field:not(.views-slideshow-pager-field-processed)', context).addClass('views-slideshow-pager-field-processed').each(function() {
// Parse out the location and unique id from the full id.
var pagerInfo = $(this).attr('id').split('_');
var location = pagerInfo[2];
pagerInfo.splice(0, 3);
var uniqueID = pagerInfo.join('_');
// Add the activate and pause on pager hover event to each pager item.
if (Drupal.settings.viewsSlideshowPagerFields[uniqueID][location].activatePauseOnHover) {
$(this).children().each(function(index, pagerItem) {
var mouseIn = function() {
Drupal.viewsSlideshow.action({ "action": 'goToSlide', "slideshowID": uniqueID, "slideNum": index });
Drupal.viewsSlideshow.action({ "action": 'pause', "slideshowID": uniqueID });
}
var mouseOut = function() {
Drupal.viewsSlideshow.action({ "action": 'play', "slideshowID": uniqueID });
}
if (jQuery.fn.hoverIntent) {
$(pagerItem).hoverIntent(mouseIn, mouseOut);
}
else {
$(pagerItem).hover(mouseIn, mouseOut);
}
});
}
else {
$(this).children().each(function(index, pagerItem) {
$(pagerItem).click(function() {
Drupal.viewsSlideshow.action({ "action": 'goToSlide', "slideshowID": uniqueID, "slideNum": index });
});
});
}
});
}
};
Drupal.viewsSlideshowPagerFields = Drupal.viewsSlideshowPagerFields || {};
/**
* Implement the transitionBegin hook for pager fields pager.
*/
Drupal.viewsSlideshowPagerFields.transitionBegin = function (options) {
for (pagerLocation in Drupal.settings.viewsSlideshowPager[options.slideshowID]) {
// Remove active class from pagers
$('[id^="views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '"]').removeClass('active');
// Add active class to active pager.
$('#views_slideshow_pager_field_item_'+ pagerLocation + '_' + options.slideshowID + '_' + options.slideNum).addClass('active');
}
};
/**
* Implement the goToSlide hook for pager fields pager.
*/
Drupal.viewsSlideshowPagerFields.goToSlide = function (options) {
for (pagerLocation in Drupal.settings.viewsSlideshowPager[options.slideshowID]) {
// Remove active class from pagers
$('[id^="views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '"]').removeClass('active');
// Add active class to active pager.
$('#views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '_' + options.slideNum).addClass('active');
}
};
/**
* Implement the previousSlide hook for pager fields pager.
*/
Drupal.viewsSlideshowPagerFields.previousSlide = function (options) {
for (pagerLocation in Drupal.settings.viewsSlideshowPager[options.slideshowID]) {
// Get the current active pager.
var pagerNum = $('[id^="views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '"].active').attr('id').replace('views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '_', '');
// If we are on the first pager then activate the last pager.
// Otherwise activate the previous pager.
if (pagerNum == 0) {
pagerNum = $('[id^="views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '"]').length() - 1;
}
else {
pagerNum--;
}
// Remove active class from pagers
$('[id^="views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '"]').removeClass('active');
// Add active class to active pager.
$('#views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '_' + pagerNum).addClass('active');
}
};
/**
* Implement the nextSlide hook for pager fields pager.
*/
Drupal.viewsSlideshowPagerFields.nextSlide = function (options) {
for (pagerLocation in Drupal.settings.viewsSlideshowPager[options.slideshowID]) {
// Get the current active pager.
var pagerNum = $('[id^="views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '"].active').attr('id').replace('views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '_', '');
var totalPagers = $('[id^="views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '"]').length();
// If we are on the last pager then activate the first pager.
// Otherwise activate the next pager.
pagerNum++;
if (pagerNum == totalPagers) {
pagerNum = 0;
}
// Remove active class from pagers
$('[id^="views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '"]').removeClass('active');
// Add active class to active pager.
$('#views_slideshow_pager_field_item_' + pagerLocation + '_' + options.slideshowID + '_' + slideNum).addClass('active');
}
};
/**
* Views Slideshow Slide Counter
*/
Drupal.viewsSlideshowSlideCounter = Drupal.viewsSlideshowSlideCounter || {};
/**
* Implement the transitionBegin for the slide counter.
*/
Drupal.viewsSlideshowSlideCounter.transitionBegin = function (options) {
$('#views_slideshow_slide_counter_' + options.slideshowID + ' .num').text(options.slideNum + 1);
};
/**
* This is used as a router to process actions for the slideshow.
*/
Drupal.viewsSlideshow.action = function (options) {
// Set default values for our return status.
var status = {
'value': true,
'text': ''
}
// If an action isn't specified return false.
if (typeof options.action == 'undefined' || options.action == '') {
status.value = false;
status.text = Drupal.t('There was no action specified.');
return error;
}
// If we are using pause or play switch paused state accordingly.
if (options.action == 'pause') {
Drupal.settings.viewsSlideshow[options.slideshowID].paused = 1;
// If the calling method is forcing a pause then mark it as such.
if (options.force) {
Drupal.settings.viewsSlideshow[options.slideshowID].pausedForce = 1;
}
}
else if (options.action == 'play') {
// If the slideshow isn't forced pause or we are forcing a play then play
// the slideshow.
// Otherwise return telling the calling method that it was forced paused.
if (!Drupal.settings.viewsSlideshow[options.slideshowID].pausedForce || options.force) {
Drupal.settings.viewsSlideshow[options.slideshowID].paused = 0;
Drupal.settings.viewsSlideshow[options.slideshowID].pausedForce = 0;
}
else {
status.value = false;
status.text += ' ' + Drupal.t('This slideshow is forced paused.');
return status;
}
}
// We use a switch statement here mainly just to limit the type of actions
// that are available.
switch (options.action) {
case "goToSlide":
case "transitionBegin":
case "transitionEnd":
// The three methods above require a slide number. Checking if it is
// defined and it is a number that is an integer.
if (typeof options.slideNum == 'undefined' || typeof options.slideNum !== 'number' || parseInt(options.slideNum) != (options.slideNum - 0)) {
status.value = false;
status.text = Drupal.t('An invalid integer was specified for slideNum.');
}
case "pause":
case "play":
case "nextSlide":
case "previousSlide":
// Grab our list of methods.
var methods = Drupal.settings.viewsSlideshow[options.slideshowID]['methods'];
// if the calling method specified methods that shouldn't be called then
// exclude calling them.
var excludeMethodsObj = {};
if (typeof options.excludeMethods !== 'undefined') {
// We need to turn the excludeMethods array into an object so we can use the in
// function.
for (var i=0; i < excludeMethods.length; i++) {
excludeMethodsObj[excludeMethods[i]] = '';
}
}
// Call every registered method and don't call excluded ones.
for (i = 0; i < methods[options.action].length; i++) {
if (Drupal[methods[options.action][i]] != undefined && typeof Drupal[methods[options.action][i]][options.action] == 'function' && !(methods[options.action][i] in excludeMethodsObj)) {
Drupal[methods[options.action][i]][options.action](options);
}
}
break;
// If it gets here it's because it's an invalid action.
default:
status.value = false;
status.text = Drupal.t('An invalid action "!action" was specified.', { "!action": options.action });
}
return status;
};
})(jQuery);
;<|fim▁end|> | $('.tabledrag-toggle-weight').text(Drupal.t('Show row weights')); |
<|file_name|>test_script_optimizer2.py<|end_file_name|><|fim▁begin|>from FireGirlOptimizer import *
FGPO = FireGirlPolicyOptimizer()
###To create, uncomment the following two lines:
FGPO.createFireGirlPathways(10,50)
#FGPO.saveFireGirlPathways("FG_pathways_20x50.fgl")
<|fim▁hole|>
#Setting Flags
FGPO.NORMALIZED_WEIGHTS_OBJ_FN = False
FGPO.NORMALIZED_WEIGHTS_F_PRIME = False
FGPO.AVERAGED_WEIGHTS_OBJ_FN = True
FGPO.AVERAGED_WEIGHTS_F_PRIME = True
print(" ")
print("Initial Values")
print("objfn: " + str(FGPO.calcObjFn()))
print("fprme: " + str(FGPO.calcObjFPrime()))
print("weights: " + str(FGPO.pathway_weights))
print("net values: " + str(FGPO.pathway_net_values))
#setting new policy
b = [0,0,0,0,0,0,0,0,0,0,0]
pol = FireGirlPolicy(b)
FGPO.setPolicy(pol)
print(" ")
###To Optimize, uncomment the following
print("Beginning Optimization Routine")
FGPO.USE_AVE_PROB = False
output=FGPO.optimizePolicy()
FGPO.printOptOutput(output)
print(" ")
print("Final Values")
print("objfn: " + str(FGPO.calcObjFn()))
print("fprme: " + str(FGPO.calcObjFPrime()))
print("weights: " + str(FGPO.pathway_weights))
print("net values: " + str(FGPO.pathway_net_values))<|fim▁end|> | ###To load (already created data), uncomment the following line
#FGPO.loadFireGirlPathways("FG_pathways_20x50.fgl") |
<|file_name|>package-info.java<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | package net.sf.esfinge.metadata.validate.minValue; |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
<|fim▁hole|>urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^list/$', views.list, name='list'),
url(r'^search/$', views.search, name='search'),
url(r'^add/$', views.add, name='add'),
url(r'^restaurant/(?P<id>[0-9]+)$', views.restaurant, name='restaurant'),
url(r'^images/(?P<id>[0-9]+)$', views.show_image, name='show_image')
]<|fim▁end|> | from . import views
|
<|file_name|>line.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The CGMath Developers. For a full listing of the authors,
// refer to the Cargo.toml file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate cgmath;
use cgmath::*;
#[test]
fn test_line_intersection() {<|fim▁hole|> let r1 = Ray::new(Point2::new(0.0f32, 0.0), Vector2::new(0.25, 0.0));
let l1 = Line::new(Point2::new(1.5f32, 0.0), Point2::new(0.5, 0.0));
assert_eq!((r1, l1).intersection(), Some(Point2::new(0.5, 0.0)));
// collinear, intersection is at ray origin
let r2 = Ray::new(Point2::new(0.0f32, 0.0), Vector2::new(5.0, 0.0));
let l2 = Line::new(Point2::new(-11.0f32, 0.0), Point2::new(1.0, 0.0));
assert_eq!((r2, l2).intersection(), Some(Point2::new(0.0, 0.0)));
// collinear, intersection is line origin
let r3 = Ray::new(Point2::new(0.0f32, 1.0), Vector2::new(0.0, -0.25));
let l3 = Line::new(Point2::new(0.0f32, 0.5), Point2::new(0.0, -0.5));
assert_eq!((r3, l3).intersection(), Some(Point2::new(0.0, 0.5)));
// collinear, no overlap
let r4 = Ray::new(Point2::new(0.0f32, 0.0), Vector2::new(3.0, 0.0));
let l4 = Line::new(Point2::new(-10.0f32, 0.0), Point2::new(-5.0, 0.0));
assert_eq!((r4, l4).intersection(), None);
// no intersection
let r5 = Ray::new(Point2::new(5.0f32, 5.0), Vector2::new(40.0, 8.0));
let l5 = Line::new(Point2::new(5.0f32, 4.8), Point2::new(10.0, 4.1));
assert_eq!((r5, l5).intersection(), None); // no intersection
// non-collinear intersection
let r6 = Ray::new(Point2::new(0.0f32, 0.0), Vector2::new(10.0, 10.0));
let l6 = Line::new(Point2::new(0.0f32, 10.0), Point2::new(10.0, 0.0));
assert_eq!((r6, l6).intersection(), Some(Point2::new(5.0, 5.0)));
// line is a point that does not intersect
let r7 = Ray::new(Point2::new(0.0f32, 0.0), Vector2::new(1.0, 1.0));
let l7 = Line::new(Point2::new(1.0f32, 0.0), Point2::new(1.0, 0.0));
assert_eq!((r7, l7).intersection(), None);
// line is a point that does intersect
let r8 = Ray::new(Point2::new(0.0f32, 0.0), Vector2::new(1.0, 0.0));
let l8 = Line::new(Point2::new(3.0f32, 0.0), Point2::new(3.0, 0.0));
assert_eq!((r8, l8).intersection(), Some(Point2::new(3.0, 0.0)));
// line is a collinear point but no intersection
let r9 = Ray::new(Point2::new(0.0f32, 0.0), Vector2::new(1.0, 0.0));
let l9 = Line::new(Point2::new(-1.0f32, 0.0), Point2::new(-1.0, 0.0));
assert_eq!((r9, l9).intersection(), None);
}<|fim▁end|> | // collinear, intersection is line dest |
<|file_name|>util.js<|end_file_name|><|fim▁begin|>"use strict";
var GLOBAL_MOUNT_POINT_MAX = Math.pow(2, 53);
var util = {
uuid: function(){
return Math.ceil(Math.random() * GLOBAL_MOUNT_POINT_MAX);
}
};
<|fim▁hole|>module.exports = util;<|fim▁end|> | |
<|file_name|>plugin.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2013, Yahoo! Inc. All rights reserved.
* Copyrights licensed under the New BSD License.
* See the accompanying LICENSE.txt file for terms.
*/
/*jslint nomen:true, node:true */
'use strict';
var core = require('./core');
module.exports = {
describe: {
summary: 'Compile dust templates to yui modules',
extensions: ['dust'],
nameParser: core.name
},
fileUpdated: function (evt, api) {
var self = this,
file = evt.file,
source_path = file.fullPath,
bundleName = file.bundleName,
templateName = this.describe.nameParser(source_path),
moduleName = bundleName + '-templates-' + templateName,
destination_path = moduleName + '.js';
return api.promise(function (fulfill, reject) {
var compiled,
partials;
try {
partials = core.partials(source_path);
compiled = core.compile(source_path, templateName);
} catch (e) {
reject(e);
}<|fim▁hole|> self._wrapAsYUI(bundleName, templateName, moduleName, compiled, partials))
.then(function () {
// provisioning the module to be used on the server side automatically
evt.bundle.useServerModules = evt.bundle.useServerModules || [];
evt.bundle.useServerModules.push(moduleName);
// we are now ready to roll
fulfill();
}, reject);
});
},
_wrapAsYUI: function (bundleName, templateName, moduleName, compiled, partials) {
// base dependency
var dependencies = ["template-base", "template-dust"];
// each partial should be provisioned thru another yui module
// and the name of the partial should translate into a yui module
// to become a dependency
partials = partials || [];
partials.forEach(function (name) {
// adding prefix to each partial
dependencies.push(bundleName + '-templates-' + name);
});
return [
'YUI.add("' + moduleName + '",function(Y, NAME){',
' var dust = Y.config.global.dust;',
'',
compiled,
'',
' Y.Template.register("' + bundleName + '/' + templateName + '", function (data) {',
' var content;',
' dust.render("' + templateName + '", data, function (err, content) {',
' result = content;',
' });',
' return result; // hack to make dust sync',
' });',
'}, "", {requires: ' + JSON.stringify(dependencies) + '});'
].join('\n');
}
};<|fim▁end|> |
// trying to write the destination file which will fulfill or reject the initial promise
api.writeFileInBundle(bundleName, destination_path, |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages
import sys, os
setup(name='nanoweb',
version="1.0",
description="The nano web framework",
long_description="""\
The nano framework provides some glue for Webob and Routes.""",
classifiers=[],
keywords='WSGI',<|fim▁hole|> packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"routes",
"webob",
"json-schema-validator",
],
entry_points="""
# -*- Entry points: -*-
""",
)<|fim▁end|> | author='Eric Moritz',
author_email='[email protected]',
url='https://github.com/ericmoritz/nanoweb/',
license='BSD', |
<|file_name|>char.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::{char,str};
use std::convert::TryFrom;
use std::str::FromStr;
#[test]
fn test_convert() {
assert_eq!(u32::from('a'), 0x61);
assert_eq!(char::from(b'\0'), '\0');
assert_eq!(char::from(b'a'), 'a');
assert_eq!(char::from(b'\xFF'), '\u{FF}');
assert_eq!(char::try_from(0_u32), Ok('\0'));
assert_eq!(char::try_from(0x61_u32), Ok('a'));
assert_eq!(char::try_from(0xD7FF_u32), Ok('\u{D7FF}'));
assert!(char::try_from(0xD800_u32).is_err());
assert!(char::try_from(0xDFFF_u32).is_err());
assert_eq!(char::try_from(0xE000_u32), Ok('\u{E000}'));
assert_eq!(char::try_from(0x10FFFF_u32), Ok('\u{10FFFF}'));
assert!(char::try_from(0x110000_u32).is_err());
assert!(char::try_from(0xFFFF_FFFF_u32).is_err());
}
#[test]
fn test_from_str() {
assert_eq!(char::from_str("a").unwrap(), 'a');
assert_eq!(char::from_str("\0").unwrap(), '\0');
assert_eq!(char::from_str("\u{D7FF}").unwrap(), '\u{d7FF}');
assert!(char::from_str("").is_err());
assert!(char::from_str("abc").is_err());
}
#[test]
fn test_is_lowercase() {
assert!('a'.is_lowercase());
assert!('ö'.is_lowercase());
assert!('ß'.is_lowercase());
assert!(!'Ü'.is_lowercase());
assert!(!'P'.is_lowercase());
}
#[test]
fn test_is_uppercase() {
assert!(!'h'.is_uppercase());
assert!(!'ä'.is_uppercase());
assert!(!'ß'.is_uppercase());
assert!('Ö'.is_uppercase());
assert!('T'.is_uppercase());
}
#[test]
fn test_is_whitespace() {
assert!(' '.is_whitespace());
assert!('\u{2007}'.is_whitespace());
assert!('\t'.is_whitespace());
assert!('\n'.is_whitespace());
assert!(!'a'.is_whitespace());
assert!(!'_'.is_whitespace());
assert!(!'\u{0}'.is_whitespace());
}
#[test]
fn test_to_digit() {
assert_eq!('0'.to_digit(10), Some(0));
assert_eq!('1'.to_digit(2), Some(1));
assert_eq!('2'.to_digit(3), Some(2));
assert_eq!('9'.to_digit(10), Some(9));
assert_eq!('a'.to_digit(16), Some(10));
assert_eq!('A'.to_digit(16), Some(10));
assert_eq!('b'.to_digit(16), Some(11));
assert_eq!('B'.to_digit(16), Some(11));
assert_eq!('z'.to_digit(36), Some(35));
assert_eq!('Z'.to_digit(36), Some(35));
assert_eq!(' '.to_digit(10), None);
assert_eq!('$'.to_digit(36), None);
}
#[test]
fn test_to_lowercase() {
fn lower(c: char) -> String {
let iter: String = c.to_lowercase().collect();
let disp: String = c.to_lowercase().to_string();
assert_eq!(iter, disp);
iter
}
assert_eq!(lower('A'), "a");
assert_eq!(lower('Ö'), "ö");
assert_eq!(lower('ß'), "ß");
assert_eq!(lower('Ü'), "ü");
assert_eq!(lower('💩'), "💩");
assert_eq!(lower('Σ'), "σ");
assert_eq!(lower('Τ'), "τ");
assert_eq!(lower('Ι'), "ι");
assert_eq!(lower('Γ'), "γ");
assert_eq!(lower('Μ'), "μ");
assert_eq!(lower('Α'), "α");
assert_eq!(lower('Σ'), "σ");
assert_eq!(lower('Dž'), "dž");
assert_eq!(lower('fi'), "fi");
assert_eq!(lower('İ'), "i\u{307}");
}
#[test]
fn test_to_uppercase() {
fn upper(c: char) -> String {
let iter: String = c.to_uppercase().collect();<|fim▁hole|> let disp: String = c.to_uppercase().to_string();
assert_eq!(iter, disp);
iter
}
assert_eq!(upper('a'), "A");
assert_eq!(upper('ö'), "Ö");
assert_eq!(upper('ß'), "SS"); // not ẞ: Latin capital letter sharp s
assert_eq!(upper('ü'), "Ü");
assert_eq!(upper('💩'), "💩");
assert_eq!(upper('σ'), "Σ");
assert_eq!(upper('τ'), "Τ");
assert_eq!(upper('ι'), "Ι");
assert_eq!(upper('γ'), "Γ");
assert_eq!(upper('μ'), "Μ");
assert_eq!(upper('α'), "Α");
assert_eq!(upper('ς'), "Σ");
assert_eq!(upper('Dž'), "DŽ");
assert_eq!(upper('fi'), "FI");
assert_eq!(upper('ᾀ'), "ἈΙ");
}
#[test]
fn test_is_control() {
assert!('\u{0}'.is_control());
assert!('\u{3}'.is_control());
assert!('\u{6}'.is_control());
assert!('\u{9}'.is_control());
assert!('\u{7f}'.is_control());
assert!('\u{92}'.is_control());
assert!(!'\u{20}'.is_control());
assert!(!'\u{55}'.is_control());
assert!(!'\u{68}'.is_control());
}
#[test]
fn test_is_numeric() {
assert!('2'.is_numeric());
assert!('7'.is_numeric());
assert!('¾'.is_numeric());
assert!(!'c'.is_numeric());
assert!(!'i'.is_numeric());
assert!(!'z'.is_numeric());
assert!(!'Q'.is_numeric());
}
#[test]
fn test_escape_debug() {
fn string(c: char) -> String {
let iter: String = c.escape_debug().collect();
let disp: String = c.escape_debug().to_string();
assert_eq!(iter, disp);
iter
}
assert_eq!(string('\n'), "\\n");
assert_eq!(string('\r'), "\\r");
assert_eq!(string('\''), "\\'");
assert_eq!(string('"'), "\\\"");
assert_eq!(string(' '), " ");
assert_eq!(string('a'), "a");
assert_eq!(string('~'), "~");
assert_eq!(string('é'), "é");
assert_eq!(string('文'), "文");
assert_eq!(string('\x00'), "\\u{0}");
assert_eq!(string('\x1f'), "\\u{1f}");
assert_eq!(string('\x7f'), "\\u{7f}");
assert_eq!(string('\u{80}'), "\\u{80}");
assert_eq!(string('\u{ff}'), "\u{ff}");
assert_eq!(string('\u{11b}'), "\u{11b}");
assert_eq!(string('\u{1d4b6}'), "\u{1d4b6}");
assert_eq!(string('\u{301}'), "\\u{301}"); // combining character
assert_eq!(string('\u{200b}'),"\\u{200b}"); // zero width space
assert_eq!(string('\u{e000}'), "\\u{e000}"); // private use 1
assert_eq!(string('\u{100000}'), "\\u{100000}"); // private use 2
}
#[test]
fn test_escape_default() {
fn string(c: char) -> String {
let iter: String = c.escape_default().collect();
let disp: String = c.escape_default().to_string();
assert_eq!(iter, disp);
iter
}
assert_eq!(string('\n'), "\\n");
assert_eq!(string('\r'), "\\r");
assert_eq!(string('\''), "\\'");
assert_eq!(string('"'), "\\\"");
assert_eq!(string(' '), " ");
assert_eq!(string('a'), "a");
assert_eq!(string('~'), "~");
assert_eq!(string('é'), "\\u{e9}");
assert_eq!(string('\x00'), "\\u{0}");
assert_eq!(string('\x1f'), "\\u{1f}");
assert_eq!(string('\x7f'), "\\u{7f}");
assert_eq!(string('\u{80}'), "\\u{80}");
assert_eq!(string('\u{ff}'), "\\u{ff}");
assert_eq!(string('\u{11b}'), "\\u{11b}");
assert_eq!(string('\u{1d4b6}'), "\\u{1d4b6}");
assert_eq!(string('\u{200b}'), "\\u{200b}"); // zero width space
assert_eq!(string('\u{e000}'), "\\u{e000}"); // private use 1
assert_eq!(string('\u{100000}'), "\\u{100000}"); // private use 2
}
#[test]
fn test_escape_unicode() {
fn string(c: char) -> String {
let iter: String = c.escape_unicode().collect();
let disp: String = c.escape_unicode().to_string();
assert_eq!(iter, disp);
iter
}
assert_eq!(string('\x00'), "\\u{0}");
assert_eq!(string('\n'), "\\u{a}");
assert_eq!(string(' '), "\\u{20}");
assert_eq!(string('a'), "\\u{61}");
assert_eq!(string('\u{11b}'), "\\u{11b}");
assert_eq!(string('\u{1d4b6}'), "\\u{1d4b6}");
}
#[test]
fn test_encode_utf8() {
fn check(input: char, expect: &[u8]) {
let mut buf = [0; 4];
let ptr = buf.as_ptr();
let s = input.encode_utf8(&mut buf);
assert_eq!(s.as_ptr() as usize, ptr as usize);
assert!(str::from_utf8(s.as_bytes()).is_ok());
assert_eq!(s.as_bytes(), expect);
}
check('x', &[0x78]);
check('\u{e9}', &[0xc3, 0xa9]);
check('\u{a66e}', &[0xea, 0x99, 0xae]);
check('\u{1f4a9}', &[0xf0, 0x9f, 0x92, 0xa9]);
}
#[test]
fn test_encode_utf16() {
fn check(input: char, expect: &[u16]) {
let mut buf = [0; 2];
let ptr = buf.as_mut_ptr();
let b = input.encode_utf16(&mut buf);
assert_eq!(b.as_mut_ptr() as usize, ptr as usize);
assert_eq!(b, expect);
}
check('x', &[0x0078]);
check('\u{e9}', &[0x00e9]);
check('\u{a66e}', &[0xa66e]);
check('\u{1f4a9}', &[0xd83d, 0xdca9]);
}
#[test]
fn test_len_utf16() {
assert!('x'.len_utf16() == 1);
assert!('\u{e9}'.len_utf16() == 1);
assert!('\u{a66e}'.len_utf16() == 1);
assert!('\u{1f4a9}'.len_utf16() == 2);
}
#[test]
fn test_decode_utf16() {
fn check(s: &[u16], expected: &[Result<char, u16>]) {
let v = char::decode_utf16(s.iter().cloned())
.map(|r| r.map_err(|e| e.unpaired_surrogate()))
.collect::<Vec<_>>();
assert_eq!(v, expected);
}
check(&[0xD800, 0x41, 0x42], &[Err(0xD800), Ok('A'), Ok('B')]);
check(&[0xD800, 0], &[Err(0xD800), Ok('\0')]);
}
#[test]
fn ed_iterator_specializations() {
// Check counting
assert_eq!('\n'.escape_default().count(), 2);
assert_eq!('c'.escape_default().count(), 1);
assert_eq!(' '.escape_default().count(), 1);
assert_eq!('\\'.escape_default().count(), 2);
assert_eq!('\''.escape_default().count(), 2);
// Check nth
// Check that OoB is handled correctly
assert_eq!('\n'.escape_default().nth(2), None);
assert_eq!('c'.escape_default().nth(1), None);
assert_eq!(' '.escape_default().nth(1), None);
assert_eq!('\\'.escape_default().nth(2), None);
assert_eq!('\''.escape_default().nth(2), None);
// Check the first char
assert_eq!('\n'.escape_default().nth(0), Some('\\'));
assert_eq!('c'.escape_default().nth(0), Some('c'));
assert_eq!(' '.escape_default().nth(0), Some(' '));
assert_eq!('\\'.escape_default().nth(0), Some('\\'));
assert_eq!('\''.escape_default().nth(0), Some('\\'));
// Check the second char
assert_eq!('\n'.escape_default().nth(1), Some('n'));
assert_eq!('\\'.escape_default().nth(1), Some('\\'));
assert_eq!('\''.escape_default().nth(1), Some('\''));
// Check the last char
assert_eq!('\n'.escape_default().last(), Some('n'));
assert_eq!('c'.escape_default().last(), Some('c'));
assert_eq!(' '.escape_default().last(), Some(' '));
assert_eq!('\\'.escape_default().last(), Some('\\'));
assert_eq!('\''.escape_default().last(), Some('\''));
}
#[test]
fn eu_iterator_specializations() {
fn check(c: char) {
let len = c.escape_unicode().count();
// Check OoB
assert_eq!(c.escape_unicode().nth(len), None);
// For all possible in-bound offsets
let mut iter = c.escape_unicode();
for offset in 0..len {
// Check last
assert_eq!(iter.clone().last(), Some('}'));
// Check len
assert_eq!(iter.len(), len - offset);
// Check size_hint (= len in ExactSizeIterator)
assert_eq!(iter.size_hint(), (iter.len(), Some(iter.len())));
// Check counting
assert_eq!(iter.clone().count(), len - offset);
// Check nth
assert_eq!(c.escape_unicode().nth(offset), iter.next());
}
// Check post-last
assert_eq!(iter.clone().last(), None);
assert_eq!(iter.clone().count(), 0);
}
check('\u{0}');
check('\u{1}');
check('\u{12}');
check('\u{123}');
check('\u{1234}');
check('\u{12340}');
check('\u{10FFFF}');
}<|fim▁end|> | |
<|file_name|>nugetrestore.ts<|end_file_name|><|fim▁begin|>import * as path from "path";
import * as tl from "vsts-task-lib/task";
import {IExecOptions, IExecSyncResult} from "vsts-task-lib/toolrunner";
import * as auth from "packaging-common/nuget/Authentication";<|fim▁hole|>import peParser = require("packaging-common/pe-parser/index");
import {VersionInfo} from "packaging-common/pe-parser/VersionResource";
import * as nutil from "packaging-common/nuget/Utility";
import * as pkgLocationUtils from "packaging-common/locationUtilities";
import * as telemetry from "utility-common/telemetry";
import INuGetCommandOptions from "packaging-common/nuget/INuGetCommandOptions2";
class RestoreOptions implements INuGetCommandOptions {
constructor(
public nuGetPath: string,
public configFile: string,
public noCache: boolean,
public disableParallelProcessing: boolean,
public verbosity: string,
public packagesDirectory: string,
public environment: ngToolRunner.NuGetEnvironmentSettings,
public authInfo: auth.NuGetExtendedAuthInfo,
) { }
}
export async function run(nuGetPath: string): Promise<void> {
let packagingLocation: pkgLocationUtils.PackagingLocation;
try {
packagingLocation = await pkgLocationUtils.getPackagingUris(pkgLocationUtils.ProtocolType.NuGet);
} catch (error) {
tl.debug("Unable to get packaging URIs, using default collection URI");
tl.debug(JSON.stringify(error));
const collectionUrl = tl.getVariable("System.TeamFoundationCollectionUri");
packagingLocation = {
PackagingUris: [collectionUrl],
DefaultPackagingUri: collectionUrl};
}
const buildIdentityDisplayName: string = null;
const buildIdentityAccount: string = null;
try {
nutil.setConsoleCodePage();
// Reading inputs
const solutionPattern = tl.getPathInput("solution", true, false);
const useLegacyFind: boolean = tl.getVariable("NuGet.UseLegacyFindFiles") === "true";
let filesList: string[] = [];
if (!useLegacyFind) {
const findOptions: tl.FindOptions = <tl.FindOptions>{};
const matchOptions: tl.MatchOptions = <tl.MatchOptions>{};
const searchPatterns: string[] = nutil.getPatternsArrayFromInput(solutionPattern);
filesList = tl.findMatch(undefined, searchPatterns, findOptions, matchOptions);
}
else {
filesList = nutil.resolveFilterSpec(
solutionPattern,
tl.getVariable("System.DefaultWorkingDirectory") || process.cwd());
}
filesList.forEach((solutionFile) => {
if (!tl.stats(solutionFile).isFile()) {
throw new Error(tl.loc("NotARegularFile", solutionFile));
}
});
const noCache = tl.getBoolInput("noCache");
const disableParallelProcessing = tl.getBoolInput("disableParallelProcessing");
const verbosity = tl.getInput("verbosityRestore");
let packagesDirectory = tl.getPathInput("packagesDirectory");
if (!tl.filePathSupplied("packagesDirectory")) {
packagesDirectory = null;
}
const nuGetVersion: VersionInfo = await peParser.getFileVersionInfoAsync(nuGetPath);
// Discovering NuGet quirks based on the version
tl.debug("Getting NuGet quirks");
const quirks = await ngToolRunner.getNuGetQuirksAsync(nuGetPath);
// Clauses ordered in this way to avoid short-circuit evaluation, so the debug info printed by the functions
// is unconditionally displayed
const useV1CredProvider: boolean = ngToolRunner.isCredentialProviderEnabled(quirks);
const useV2CredProvider: boolean = ngToolRunner.isCredentialProviderV2Enabled(quirks);
const credProviderPath: string = nutil.locateCredentialProvider(useV2CredProvider);
const useCredConfig = ngToolRunner.isCredentialConfigEnabled(quirks)
&& (!useV1CredProvider && !useV2CredProvider);
// Setting up auth-related variables
tl.debug("Setting up auth");
let urlPrefixes = packagingLocation.PackagingUris;
tl.debug(`Discovered URL prefixes: ${urlPrefixes}`);
// Note to readers: This variable will be going away once we have a fix for the location service for
// customers behind proxies
const testPrefixes = tl.getVariable("NuGetTasks.ExtraUrlPrefixesForTesting");
if (testPrefixes) {
urlPrefixes = urlPrefixes.concat(testPrefixes.split(";"));
tl.debug(`All URL prefixes: ${urlPrefixes}`);
}
const accessToken = pkgLocationUtils.getSystemAccessToken();
const externalAuthArr: auth.ExternalAuthInfo[] = commandHelper.GetExternalAuthInfoArray("externalEndpoints");
const authInfo = new auth.NuGetExtendedAuthInfo(
new auth.InternalAuthInfo(
urlPrefixes,
accessToken,
((useV1CredProvider || useV2CredProvider) ? credProviderPath : null),
useCredConfig),
externalAuthArr);
const environmentSettings: ngToolRunner.NuGetEnvironmentSettings = {
credProviderFolder: useV2CredProvider === false ? credProviderPath : null,
V2CredProviderPath: useV2CredProvider === true ? credProviderPath : null,
extensionsDisabled: true,
};
// Setting up sources, either from provided config file or from feed selection
tl.debug("Setting up sources");
let nuGetConfigPath : string = undefined;
let configFile: string = undefined;
let selectOrConfig = tl.getInput("selectOrConfig");
// This IF is here in order to provide a value to nuGetConfigPath (if option selected, if user provided it)
// and then pass it into the config helper
if (selectOrConfig === "config") {
nuGetConfigPath = tl.getPathInput("nugetConfigPath", false, true);
if (!tl.filePathSupplied("nugetConfigPath")) {
nuGetConfigPath = undefined;
}
// If using NuGet version 4.8 or greater and nuget.config was provided,
// do not create temp config file
if (useV2CredProvider && nuGetConfigPath) {
configFile = nuGetConfigPath;
}
}
// If there was no nuGetConfigPath, NuGetConfigHelper will create a temp one
const nuGetConfigHelper = new NuGetConfigHelper2(
nuGetPath,
nuGetConfigPath,
authInfo,
environmentSettings,
null);
let credCleanup = () => { return; };
// Now that the NuGetConfigHelper was initialized with all the known information we can proceed
// and check if the user picked the 'select' option to fill out the config file if needed
if (selectOrConfig === "select") {
const sources: auth.IPackageSource[] = new Array<auth.IPackageSource>();
const feed = tl.getInput("feedRestore");
if (feed) {
const feedUrl: string = await nutil.getNuGetFeedRegistryUrl(
packagingLocation.DefaultPackagingUri,
feed,
nuGetVersion,
accessToken);
sources.push({
feedName: feed,
feedUri: feedUrl,
isInternal: true,
});
}
const includeNuGetOrg = tl.getBoolInput("includeNuGetOrg", false);
if (includeNuGetOrg) {
const nuGetSource: auth.IPackageSource = nuGetVersion.productVersion.a < 3
? auth.NuGetOrgV2PackageSource
: auth.NuGetOrgV2PackageSource;
sources.push(nuGetSource);
}
// Creating NuGet.config for the user
if (sources.length > 0)
{
// tslint:disable-next-line:max-line-length
tl.debug(`Adding the following sources to the config file: ${sources.map((x) => x.feedName).join(";")}`);
nuGetConfigHelper.addSourcesToTempNuGetConfig(sources);
credCleanup = () => tl.rmRF(nuGetConfigHelper.tempNugetConfigPath);
nuGetConfigPath = nuGetConfigHelper.tempNugetConfigPath;
}
else {
tl.debug("No sources were added to the temp NuGet.config file");
}
}
if (!useV2CredProvider && !configFile) {
// Setting creds in the temp NuGet.config if needed
await nuGetConfigHelper.setAuthForSourcesInTempNuGetConfigAsync();
tl.debug('Setting nuget.config auth');
} else {
// In case of !!useV2CredProvider, V2 credential provider will handle external credentials
tl.debug('No temp nuget.config auth');
}
// if configfile has already been set, let it be
if (!configFile) {
// Use config file if:
// - User selected "Select feeds" option
// - User selected "NuGet.config" option and the nuGetConfig input has a value
let useConfigFile: boolean = selectOrConfig === "select" || (selectOrConfig === "config" && !!nuGetConfigPath);
configFile = useConfigFile ? nuGetConfigHelper.tempNugetConfigPath : undefined;
if (useConfigFile)
{
credCleanup = () => tl.rmRF(nuGetConfigHelper.tempNugetConfigPath);
}
}
tl.debug(`ConfigFile: ${configFile}`);
try {
const restoreOptions = new RestoreOptions(
nuGetPath,
configFile,
noCache,
disableParallelProcessing,
verbosity,
packagesDirectory,
environmentSettings,
authInfo);
for (const solutionFile of filesList) {
restorePackages(solutionFile, restoreOptions);
}
} finally {
credCleanup();
}
tl.setResult(tl.TaskResult.Succeeded, tl.loc("PackagesInstalledSuccessfully"));
} catch (err) {
tl.error(err);
if (buildIdentityDisplayName || buildIdentityAccount) {
tl.warning(tl.loc("BuildIdentityPermissionsHint", buildIdentityDisplayName, buildIdentityAccount));
}
tl.setResult(tl.TaskResult.Failed, tl.loc("PackagesFailedToInstall"));
}
}
function restorePackages(solutionFile: string, options: RestoreOptions): IExecSyncResult {
const nugetTool = ngToolRunner.createNuGetToolRunner(options.nuGetPath, options.environment, options.authInfo);
nugetTool.arg("restore");
nugetTool.arg(solutionFile);
if (options.packagesDirectory) {
nugetTool.arg("-PackagesDirectory");
nugetTool.arg(options.packagesDirectory);
}
if (options.noCache) {
nugetTool.arg("-NoCache");
}
if (options.disableParallelProcessing) {
nugetTool.arg("-DisableParallelProcessing");
}
if (options.verbosity && options.verbosity !== "-") {
nugetTool.arg("-Verbosity");
nugetTool.arg(options.verbosity);
}
nugetTool.arg("-NonInteractive");
if (options.configFile) {
nugetTool.arg("-ConfigFile");
nugetTool.arg(options.configFile);
}
const execResult = nugetTool.execSync({ cwd: path.dirname(solutionFile) } as IExecOptions);
if (execResult.code !== 0) {
telemetry.logResult("Packaging", "NuGetCommand", execResult.code);
throw tl.loc("Error_NugetFailedWithCodeAndErr",
execResult.code,
execResult.stderr ? execResult.stderr.trim() : execResult.stderr);
}
return execResult;
}<|fim▁end|> | import * as commandHelper from "packaging-common/nuget/CommandHelper";
import {NuGetConfigHelper2} from "packaging-common/nuget/NuGetConfigHelper2";
import * as ngToolRunner from "packaging-common/nuget/NuGetToolRunner2"; |
<|file_name|>product.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2017 Houssine BAKKALI - Coop IT Easy
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import fields, models
class ProductTemplate(models.Model):
_inherit = "product.template"
<|fim▁hole|><|fim▁end|> | default_code = fields.Char(related='product_variant_ids.default_code', string='Internal Reference', store=True) |
<|file_name|>views.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.http import HttpResponsePermanentRedirect
from django.conf import settings
from django.core.urlresolvers import reverse
from readthedocs.builds.models import Build, Version
from readthedocs.builds.filters import BuildFilter
from readthedocs.projects.models import Project
from redis import Redis, ConnectionError
log = logging.getLogger(__name__)
class BuildBase(object):
model = Build
def get_queryset(self):
self.project_slug = self.kwargs.get('project_slug', None)
self.project = get_object_or_404(
Project.objects.protected(self.request.user),
slug=self.project_slug
)
queryset = Build.objects.public(user=self.request.user, project=self.project)
return queryset
class BuildList(BuildBase, ListView):
def get_context_data(self, **kwargs):
context = super(BuildList, self).get_context_data(**kwargs)
filter = BuildFilter(self.request.GET, queryset=self.get_queryset())
active_builds = self.get_queryset().exclude(state="finished").values('id')
context['project'] = self.project
context['filter'] = filter
context['active_builds'] = active_builds
context['versions'] = Version.objects.public(user=self.request.user, project=self.project)
try:
redis = Redis.from_url(settings.BROKER_URL)
context['queue_length'] = redis.llen('celery')
except ConnectionError:
context['queue_length'] = None
return context
class BuildDetail(BuildBase, DetailView):
pk_url_kwarg = 'build_pk'
def get_context_data(self, **kwargs):
context = super(BuildDetail, self).get_context_data(**kwargs)
context['project'] = self.project
return context
# Old build view redirects
def builds_redirect_list(request, project_slug):
return HttpResponsePermanentRedirect(reverse('builds_project_list', args=[project_slug]))
def builds_redirect_detail(request, project_slug, pk):
return HttpResponsePermanentRedirect(reverse('builds_detail', args=[project_slug, pk]))<|fim▁end|> | import logging
from django.shortcuts import get_object_or_404
from django.views.generic import ListView, DetailView |
<|file_name|>List.react.js<|end_file_name|><|fim▁begin|><|fim▁hole|> render: function() {
var listItems = [];
_.each(this.props.value, function(data, index) {
listItems.push(<li>{JSON.stringify(data)}</li>);
});
return (
<div>
<strong>{this.props.title}:</strong>
<ol>{listItems}</ol>
</div>
);
}
});
module.exports = List;<|fim▁end|> | var React = require('react');
var _ = require('underscore');
var List = React.createClass({ |
<|file_name|>comb_sort.py<|end_file_name|><|fim▁begin|>"""
This is pure Python implementation of comb sort algorithm.
Comb sort is a relatively simple sorting algorithm originally designed by Wlodzimierz
Dobosiewicz in 1980. It was rediscovered by Stephen Lacey and Richard Box in 1991.
Comb sort improves on bubble sort algorithm.
In bubble sort, distance (or gap) between two compared elements is always one.
Comb sort improvement is that gap can be much more than 1, in order to prevent slowing
down by small values
at the end of a list.
More info on: https://en.wikipedia.org/wiki/Comb_sort
For doctests run following command:
python -m doctest -v comb_sort.py
or
python3 -m doctest -v comb_sort.py
For manual testing run:
python comb_sort.py
"""
def comb_sort(data: list) -> list:
"""Pure implementation of comb sort algorithm in Python
:param data: mutable collection with comparable items
:return: the same collection in ascending order
Examples:
>>> comb_sort([0, 5, 3, 2, 2])
[0, 2, 2, 3, 5]
>>> comb_sort([])
[]
>>> comb_sort([99, 45, -7, 8, 2, 0, -15, 3])
[-15, -7, 0, 2, 3, 8, 45, 99]
"""
shrink_factor = 1.3
gap = len(data)
completed = False
while not completed:
# Update the gap value for a next comb
gap = int(gap / shrink_factor)
if gap <= 1:
completed = True
index = 0
while index + gap < len(data):
if data[index] > data[index + gap]:
# Swap values
data[index], data[index + gap] = data[index + gap], data[index]
completed = False
index += 1
return data
<|fim▁hole|>if __name__ == "__main__":
import doctest
doctest.testmod()
user_input = input("Enter numbers separated by a comma:\n").strip()
unsorted = [int(item) for item in user_input.split(",")]
print(comb_sort(unsorted))<|fim▁end|> | |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|># Django
from administrator.models import Administrator
# local Django
from django.contrib import admin<|fim▁hole|>
class AdministratorAdmin(admin.ModelAdmin):
pass
admin.site.register(Administrator, AdministratorAdmin)<|fim▁end|> | |
<|file_name|>encoder.go<|end_file_name|><|fim▁begin|>package encoding
import (
"bytes"
"encoding/binary"
"errors"
"fmt"
"io"
"math"
"github.com/SermoDigital/golang-neo4j-bolt-driver/structures"
)
const (
// NilMarker represents the encoding marker byte for a nil object
NilMarker = 0xC0
// TrueMarker represents the encoding marker byte for a true boolean object
TrueMarker = 0xC3
// FalseMarker represents the encoding marker byte for a false boolean object
FalseMarker = 0xC2
// Int8Marker represents the encoding marker byte for a int8 object
Int8Marker = 0xC8
// Int16Marker represents the encoding marker byte for a int16 object
Int16Marker = 0xC9
// Int32Marker represents the encoding marker byte for a int32 object
Int32Marker = 0xCA
// Int64Marker represents the encoding marker byte for a int64 object
Int64Marker = 0xCB
// FloatMarker represents the encoding marker byte for a float32/64 object
FloatMarker = 0xC1
// TinyStringMarker represents the encoding marker byte for a string object
TinyStringMarker = 0x80
// String8Marker represents the encoding marker byte for a string object
String8Marker = 0xD0
// String16Marker represents the encoding marker byte for a string object
String16Marker = 0xD1
// String32Marker represents the encoding marker byte for a string object
String32Marker = 0xD2
// TinySliceMarker represents the encoding marker byte for a slice object
TinySliceMarker = 0x90
// Slice8Marker represents the encoding marker byte for a slice object
Slice8Marker = 0xD4
// Slice16Marker represents the encoding marker byte for a slice object
Slice16Marker = 0xD5
// Slice32Marker represents the encoding marker byte for a slice object
Slice32Marker = 0xD6
// TinyMapMarker represents the encoding marker byte for a map object
TinyMapMarker = 0xA0
// Map8Marker represents the encoding marker byte for a map object
Map8Marker = 0xD8
// Map16Marker represents the encoding marker byte for a map object
Map16Marker = 0xD9
// Map32Marker represents the encoding marker byte for a map object
Map32Marker = 0xDA
// TinyStructMarker represents the encoding marker byte for a struct object
TinyStructMarker = 0xB0
// Struct8Marker represents the encoding marker byte for a struct object
Struct8Marker = 0xDC
// Struct16Marker represents the encoding marker byte for a struct object
Struct16Marker = 0xDD
)
// EndMessage is the data to send to end a message
var EndMessage = []byte{byte(0x00), byte(0x00)}
// Encoder encodes objects of different types to the given stream.
// Attempts to support all builtin golang types, when it can be confidently
// mapped to a data type from:
// http://alpha.neohq.net/docs/server-manual/bolt-serialization.html#bolt-packstream-structures
// (version v3.1.0-M02 at the time of writing this.
//
// Maps and Slices are a special case, where only map[string]interface{} and
// []interface{} are supported. The interface for maps and slices may be more
// permissive in the future.
type Encoder struct {
w *chunkWriter
}
const DefaultChunkSize = math.MaxUint16
// NewEncoder initializes a new Encoder with the provided chunk size.
func NewEncoder(w io.Writer) *Encoder {
const size = DefaultChunkSize
return &Encoder{w: &chunkWriter{w: w, buf: make([]byte, size), size: size}}
}
// SetChunkSize sets the Encoder's chunk size. It flushes any pending writes
// using the new chunk size.
func (e *Encoder) SetChunkSize(size uint16) error {
if e.w.size == int(size) {
return nil
}
e.w.size = int(size)
// Create a new buffer if necessary.
if e.w.size > len(e.w.buf) {
e.w.buf = make([]byte, e.w.size)
return nil
}
// Flush what we have so far if our current chunk is >= size.
for e.w.n >= e.w.size {
e.w.n = e.w.size
err := e.w.writeChunk()
if err != nil {
return err
}
// Slide our buffer down.
e.w.n = copy(e.w.buf[:e.w.size], e.w.buf[e.w.n:])
}
return nil
}
// Marshal is used to marshal an object to the bolt interface encoded bytes.
func Marshal(v interface{}) ([]byte, error) {
var b bytes.Buffer
err := NewEncoder(&b).Encode(v)
return b.Bytes(), err
}
type chunkWriter struct {
w io.Writer
buf []byte
n int
size int
}
// Write writes to the Encoder. Writes are not necessarily written to the
// underlying Writer until Flush is called.
func (w *chunkWriter) Write(p []byte) (n int, err error) {
var m int
for n < len(p) {
m = copy(w.buf[w.n:], p[n:])
w.n += m
n += m
if w.n == w.size {
err = w.writeChunk()
if err != nil {
return n, err
}
}
}
return n, nil
}
// Write writes a string to the Encoder. Writes are not necessarily written to
// the underlying Writer until Flush is called.
func (w *chunkWriter) WriteString(s string) (n int, err error) {
var m int
for n < len(s) {
m = copy(w.buf[w.n:], s[n:])
w.n += m
n += m
if w.n == w.size {
err = w.writeChunk()
if err != nil {
return n, err
}
}
}
return n, nil
}
// Flush writes the existing data to the underlying writer and then ends
// the stream.
func (w *chunkWriter) Flush() error {
err := w.writeChunk()
if err != nil {
return err
}
_, err = w.w.Write(EndMessage)
return err
}
func (w *chunkWriter) writeMarker(marker uint8) error {
w.buf[w.n] = marker
w.n++
if w.n == w.size {
return w.writeChunk()
}
return nil
}
func (w *chunkWriter) writeChunk() error {
if w.n == 0 {
return nil
}
err := binary.Write(w.w, binary.BigEndian, uint16(w.n))
if err != nil {
return err
}
_, err = w.w.Write(w.buf[:w.n])
w.n = 0
return err
}
func (e *Encoder) write(v interface{}) error {
return binary.Write(e.w, binary.BigEndian, v)
}
// Encode encodes an object to the stream
func (e *Encoder) Encode(val interface{}) error {
err := e.encode(val)
if err != nil {
return err
}
return e.w.Flush()
}
// Encode encodes an object to the stream
func (e *Encoder) encode(val interface{}) error {
switch val := val.(type) {
case nil:
return e.w.writeMarker(NilMarker)
case bool:
if val {
return e.w.writeMarker(TrueMarker)
}
return e.w.writeMarker(FalseMarker)
case int:
return e.encodeInt(int64(val))
case int8:
return e.encodeInt(int64(val))
case int16:
return e.encodeInt(int64(val))
case int32:
return e.encodeInt(int64(val))
case int64:
return e.encodeInt(val)
case uint:
if ^uint(0) > math.MaxUint64 && val > math.MaxInt64 {
return fmt.Errorf("integer too big: %d. Max integer supported: %d", val, math.MaxInt64)
}
return e.encodeInt(int64(val))
case uint8:
return e.encodeInt(int64(val))
case uint16:
return e.encodeInt(int64(val))
case uint32:
return e.encodeInt(int64(val))
case uint64:
if val > math.MaxInt64 {
return fmt.Errorf("integer too big: %d. Max integer supported: %d", val, math.MaxInt64)
}
return e.encodeInt(int64(val))
case float32:
return e.encodeFloat(float64(val))
case float64:
return e.encodeFloat(val)
case string:
return e.encodeString(val)
case []interface{}:
return e.encodeSlice(val)
case map[string]interface{}:
return e.encodeMap(val)
case structures.Structure:
return e.encodeStructure(val)
default:
return fmt.Errorf("unrecognized type when encoding data for Bolt transport: %T %+v", val, val)
}<|fim▁hole|>}
func (e *Encoder) encodeInt(val int64) (err error) {
switch {
case val < math.MinInt32:
// Write as INT_64
if err = e.w.writeMarker(Int64Marker); err != nil {
return err
}
return e.write(val)
case val < math.MinInt16:
// Write as INT_32
if err = e.w.writeMarker(Int32Marker); err != nil {
return err
}
return e.write(int32(val))
case val < math.MinInt8:
// Write as INT_16
if err = e.w.writeMarker(Int16Marker); err != nil {
return err
}
return e.write(int16(val))
case val < -16:
// Write as INT_8
if err = e.w.writeMarker(Int8Marker); err != nil {
return err
}
return e.write(int8(val))
case val < math.MaxInt8:
// Write as TINY_INT
return e.write(int8(val))
case val < math.MaxInt16:
// Write as INT_16
if err = e.w.writeMarker(Int16Marker); err != nil {
return err
}
return e.write(int16(val))
case val < math.MaxInt32:
// Write as INT_32
if err = e.w.writeMarker(Int32Marker); err != nil {
return err
}
return e.write(int32(val))
case val <= math.MaxInt64:
// Write as INT_64
if err = e.w.writeMarker(Int64Marker); err != nil {
return err
}
return e.write(val)
default:
return fmt.Errorf("Int too long to write: %d", val)
}
}
func (e *Encoder) encodeFloat(val float64) error {
if err := e.w.writeMarker(FloatMarker); err != nil {
return err
}
return e.write(val)
}
func (e *Encoder) encodeString(str string) (err error) {
length := len(str)
switch {
case length <= 15:
err = e.w.writeMarker(TinyStringMarker + uint8(length))
if err != nil {
return err
}
_, err = e.w.WriteString(str)
return err
case length <= math.MaxUint8:
if err = e.w.writeMarker(String8Marker); err != nil {
return err
}
if err = e.write(uint8(length)); err != nil {
return err
}
_, err = e.w.WriteString(str)
return err
case length <= math.MaxUint16:
if err = e.w.writeMarker(String16Marker); err != nil {
return err
}
if err = e.write(uint16(length)); err != nil {
return err
}
_, err = e.w.WriteString(str)
return err
case length > math.MaxUint16 && length <= math.MaxUint32:
if err = e.w.writeMarker(String32Marker); err != nil {
return err
}
if err = e.write(uint32(length)); err != nil {
return err
}
_, err = e.w.WriteString(str)
return err
default:
return errors.New("string too long to write")
}
}
func (e *Encoder) encodeSlice(val []interface{}) (err error) {
length := len(val)
switch {
case length <= 15:
err = e.w.writeMarker(TinySliceMarker + uint8(length))
if err != nil {
return err
}
case length <= math.MaxUint8:
if err = e.w.writeMarker(Slice8Marker); err != nil {
return err
}
if err = e.write(uint8(length)); err != nil {
return err
}
case length <= math.MaxUint16:
if err = e.w.writeMarker(Slice16Marker); err != nil {
return err
}
if err = e.write(uint16(length)); err != nil {
return err
}
case length <= math.MaxUint32:
if err := e.w.writeMarker(Slice32Marker); err != nil {
return err
}
if err = e.write(uint32(length)); err != nil {
return err
}
default:
return errors.New("slice too long to write")
}
// Encode Slice values
for _, item := range val {
err = e.encode(item)
if err != nil {
return err
}
}
return nil
}
func (e *Encoder) encodeMap(val map[string]interface{}) (err error) {
length := len(val)
switch {
case length <= 15:
err = e.w.writeMarker(TinyMapMarker + uint8(length))
if err != nil {
return err
}
case length <= math.MaxUint8:
if err = e.w.writeMarker(Map8Marker); err != nil {
return err
}
if err = e.write(uint8(length)); err != nil {
return err
}
case length <= math.MaxUint16:
if err = e.w.writeMarker(Map16Marker); err != nil {
return err
}
if err = e.write(uint16(length)); err != nil {
return err
}
case length <= math.MaxUint32:
if err = e.w.writeMarker(Map32Marker); err != nil {
return err
}
if err = e.write(uint32(length)); err != nil {
return err
}
default:
return errors.New("map too long to write")
}
// Encode Map values
for k, v := range val {
if err := e.encode(k); err != nil {
return err
}
if err := e.encode(v); err != nil {
return err
}
}
return nil
}
func (e *Encoder) encodeStructure(val structures.Structure) (err error) {
fields := val.AllFields()
length := len(fields)
switch {
case length <= 15:
err = e.w.writeMarker(TinyStructMarker + uint8(length))
if err != nil {
return err
}
case length <= math.MaxUint8:
if err = e.w.writeMarker(Struct8Marker); err != nil {
return err
}
if err = e.write(uint8(length)); err != nil {
return err
}
case length <= math.MaxUint16:
if err = e.w.writeMarker(Struct16Marker); err != nil {
return err
}
if err = e.write(uint16(length)); err != nil {
return err
}
default:
return errors.New("structure too large to write")
}
err = e.w.writeMarker(uint8(val.Signature()))
if err != nil {
return err
}
for _, field := range fields {
err = e.encode(field)
if err != nil {
return err
}
}
return nil
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# (c) 2014 Rajat Agarwal
import os, sys
import unittest
import sqoot
if 'PUBLIC_API_KEY' in os.environ and 'PRIVATE_API_KEY' in os.environ:
PUBLIC_API_KEY = os.environ['PUBLIC_API_KEY']
PRIVATE_API_KEY = os.environ['PRIVATE_API_KEY']
else:
try:
from _creds import *
except ImportError:
print "Please create a creds.py file in this package, based upon creds.example.py"
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'testdata')
sys.path.append('/home/ragarwal/sqoot')
<|fim▁hole|>class BaseEndpointTestCase(unittest.TestCase):
def setUp(self):
self.api = sqoot.Sqoot(
privateApiKey=PRIVATE_API_KEY,
publicApiKey=PUBLIC_API_KEY,
)<|fim▁end|> | |
<|file_name|>job.go<|end_file_name|><|fim▁begin|>package module
import (
"bufio"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"strings"
"sync"
"time"
. "github.com/logrusorgru/aurora"
homeDir "github.com/mitchellh/go-homedir"
apiv1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
_ "k8s.io/client-go/plugin/pkg/client/auth/gcp"
"k8s.io/client-go/tools/clientcmd"
"github.com/Huawei/containerops/common/utils"
"github.com/Huawei/containerops/pilotage/model"
)
var (
RWlock sync.RWMutex
GlobalOutputs map[string]string
)
// Job is
type Job struct {
ID int64 `json:"-" yaml:"-"`
T string `json:"type" yaml:"type"`
Name string `json:"name" yaml:"name,omitempty"`
Kubectl string `json:"kubectl" yaml:"kubectl"`
Endpoint string `json:"endpoint" yaml:"endpoint"`
Timeout int64 `json:"timeout" yaml:"timeout"`
Status string `json:"status,omitempty" yaml:"status,omitempty"`
Resources Resource `json:"resources" yaml:"resources"`
Logs []string `json:"logs,omitempty" yaml:"logs,omitempty"`
Environments []map[string]string `json:"environments" yaml:"environments"`
Outputs []string `json:"outputs,omitempty" yaml:"outputs,omitempty"`
Subscriptions []map[string]string `json:"subscriptions,omitempty" yaml:"subscriptions,omitempty"`
}
// Resources is
type Resource struct {
CPU string `json:"cpu" yaml:"cpu"`
Memory string `json:"memory" yaml:"memory"`
}
func init() {
GlobalOutputs = make(map[string]string)
}
// TODO filter the log print with different color.
func (j *Job) Log(log string, verbose, timestamp bool) {
j.Logs = append(j.Logs, fmt.Sprintf("[%s] %s", time.Now().String(), log))
l := new(model.LogV1)
l.Create(model.INFO, model.JOB, j.ID, log)
if verbose == true {
if timestamp == true {
fmt.Println(Cyan(fmt.Sprintf("[%s] %s", time.Now().String(), strings.TrimSpace(log))))
} else {
fmt.Println(Cyan(log))
}
}
}
func (j *Job) Run(name string, verbose, timestamp bool, f *Flow, stageIndex, actionIndex int) (string, error) {
j.SaveDatabase(verbose, timestamp, f, stageIndex, actionIndex)
randomContainerName := fmt.Sprintf("%s-%s", name, utils.RandomString(10))
podTemplate := j.PodTemplates(randomContainerName, f)
if err := j.InvokePod(podTemplate, randomContainerName, verbose, timestamp, f, stageIndex, actionIndex); err != nil {
return Failure, err
}
j.Status = Success
return Success, nil
}
func (j *Job) RunKubectl(name string, verbose, timestamp bool, f *Flow, stageIndex, actionIndex int) (string, error) {
j.SaveDatabase(verbose, timestamp, f, stageIndex, actionIndex)
originYaml := []byte{}
if u, err := url.Parse(j.Kubectl); err != nil {
return Failure, err
} else {
if u.Scheme == "" {
if utils.IsFileExist(j.Kubectl) == true {
// Read YAML file from local
data, err := ioutil.ReadFile(j.Kubectl)
if err != nil {
return Failure, err
}
originYaml = data
} else {
return Failure, errors.New("Kubectl PATH is invalid")
}
} else {
// Download YAML from URL
resp, err := http.Get(j.Kubectl)
if err != nil {
return Failure, err
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return Failure, err
}
originYaml = body
}
}
base64Yaml := base64.StdEncoding.EncodeToString(originYaml)
//TODO port and ip address can set from setting
home, _ := homeDir.Dir()
configFile, err := clientcmd.BuildConfigFromFlags("", fmt.Sprintf("%s/.kube/config", home))
if err != nil {
return Failure, err
}
apiServerInsecure := fmt.Sprintf("http:%s:8080", strings.Split(configFile.Host, ":")[1])
namespace := "default"
if f.Namespace != "" {
namespace = f.Namespace
}
randomContainerName := fmt.Sprintf("kubectl-create-%s", utils.RandomString(10))
podTemplate := j.KubectlPodTemplates(randomContainerName, apiServerInsecure, namespace, base64Yaml, f)
if err := j.InvokePod(podTemplate, randomContainerName, verbose, timestamp, f, stageIndex, actionIndex); err != nil {
return Failure, err
}
j.Status = Success
return Success, nil
}
func (j *Job) InvokePod(podTemplate *apiv1.Pod, randomContainerName string, verbose, timestamp bool, f *Flow, stageIndex, actionIndex int) error {
home, _ := homeDir.Dir()
if config, err := clientcmd.BuildConfigFromFlags("", fmt.Sprintf("%s/.kube/config", home)); err != nil {
return err
} else {
if clientSet, err := kubernetes.NewForConfig(config); err != nil {
return err
} else {
p := clientSet.CoreV1().Pods(apiv1.NamespaceDefault)
if _, err := p.Create(podTemplate); err != nil {
j.Status = Failure
return err
}
j.Status = Pending
time.Sleep(time.Second * 2)
start := time.Now()
ForLoop:
for {
pod, err := p.Get(randomContainerName, metav1.GetOptions{})
if err != nil {
j.Log(err.Error(), false, timestamp)
return err
}
switch pod.Status.Phase {
case apiv1.PodPending:
j.Log(fmt.Sprintf("Job %s is %s", j.Name, pod.Status.Phase), verbose, timestamp)
case apiv1.PodRunning, apiv1.PodSucceeded:
break ForLoop
case apiv1.PodUnknown:
j.Log(fmt.Sprintf("Job %s is %s, Detail:[%s] \n", j.Name, pod.Status.Phase, pod.Status.ContainerStatuses[0].State.String()), verbose, timestamp)
case apiv1.PodFailed:
j.Log(fmt.Sprintf("Job %s is %s, Detail:[%s] \n", j.Name, pod.Status.Phase, pod.Status.ContainerStatuses[0].State.String()), verbose, timestamp)
break ForLoop
}
duration := time.Now().Sub(start)
if duration.Minutes() > 3 {
return errors.New(fmt.Sprintf("Job %s Pending more than 3 minutes", j.Name))
}
time.Sleep(time.Second * 2)
}
req := p.GetLogs(randomContainerName, &apiv1.PodLogOptions{
Follow: true,
Timestamps: false,
})
if read, err := req.Stream(); err != nil {
// TODO Parse ContainerCreating error
} else {
reader := bufio.NewReader(read)
for {
line, err := reader.ReadString('\n')
if err != nil {
if err == io.EOF {
break
}
j.Status = Failure
return err
}
if strings.Contains(line, "[COUT]") && len(j.Outputs) != 0 {
j.FetchOutputs(f.Stages[stageIndex].Name, f.Stages[stageIndex].Actions[actionIndex].Name, line)
}
j.Status = Running
j.Log(line, false, timestamp)
f.Log(line, verbose, timestamp)
}
}
}
}
return nil
}
func (j *Job) SaveDatabase(verbose, timestamp bool, f *Flow, stageIndex, actionIndex int) {
// Save Job into database
job := new(model.JobV1)
resources, _ := j.Resources.JSON()
environments, _ := json.Marshal(j.Environments)
outputs, _ := json.Marshal(j.Outputs)
subscriptions, _ := json.Marshal(j.Subscriptions)
jobID, err := job.Put(f.Stages[stageIndex].Actions[actionIndex].ID, j.Timeout, j.Name, j.T, j.Endpoint, string(resources), string(environments), string(outputs), string(subscriptions))
if err != nil {
j.Log(fmt.Sprintf("Save Job [%s] errorK: %s", j.Name, err.Error()), false, timestamp)
}
j.ID = jobID
// Record Job data
jobData := new(model.JobDataV1)
startTime := time.Now()
defer func() {
currentNumber, err := jobData.GetNumbers(j.ID)
if err != nil {
j.Log(fmt.Sprintf("Get Job Data [%s] Numbers error: %s", j.Name, err.Error()), verbose, timestamp)
}
if err := jobData.Put(j.ID, currentNumber+1, j.Status, startTime, time.Now()); err != nil {
j.Log(fmt.Sprintf("Save Job Data [%s] error: %s", j.Name, err.Error()), false, timestamp)
}
}()
}
func (j *Job) FetchOutputs(stageName, actionName, log string) error {
output := strings.TrimPrefix(log, "[COUT]")
splits := strings.Split(output, "=")
for _, o := range j.Outputs {
if strings.TrimSpace(o) == strings.TrimSpace(splits[0]) {
key := fmt.Sprintf("%s.%s.%s[%s]", stageName, actionName, j.Name, o)
RWlock.Lock()
GlobalOutputs[key] = strings.TrimSpace(splits[1])
RWlock.Unlock()
}
}
return nil
}
func (j *Job) KubectlPodTemplates(randomContainerName, apiServer, namespace, yamlContent string, f *Flow) *apiv1.Pod {
result := &apiv1.Pod{
TypeMeta: metav1.TypeMeta{
Kind: "Pod",
APIVersion: "v1",
},
ObjectMeta: metav1.ObjectMeta{<|fim▁hole|> Spec: apiv1.PodSpec{
Containers: []apiv1.Container{
{
Name: randomContainerName,
//TODO can config from settings
Image: "hub.opshub.sh/containerops/kubectl-create:1.7.4",
},
},
RestartPolicy: apiv1.RestartPolicyNever,
},
}
//Add api-server address, namespace & yaml content
coDataValue := fmt.Sprintf(" api-server-url=%s namespace=%s", apiServer, namespace)
result.Spec.Containers[0].Env = append(result.Spec.Containers[0].Env, apiv1.EnvVar{Name: "CO_DATA", Value: coDataValue})
result.Spec.Containers[0].Env = append(result.Spec.Containers[0].Env, apiv1.EnvVar{Name: "YAML", Value: yamlContent})
//Add user defined enviroments
if len(j.Environments) > 0 {
for _, environment := range j.Environments {
for k, v := range environment {
env := apiv1.EnvVar{
Name: k,
Value: v,
}
result.Spec.Containers[0].Env = append(result.Spec.Containers[0].Env, env)
}
}
}
//Add flow enviroments
if len(f.Environments) > 0 {
for _, environment := range f.Environments {
for k, v := range environment {
env := apiv1.EnvVar{
Name: k,
Value: v,
}
result.Spec.Containers[0].Env = append(result.Spec.Containers[0].Env, env)
}
}
}
return result
}
func (j *Job) PodTemplates(randomContainerName string, f *Flow) *apiv1.Pod {
result := &apiv1.Pod{
TypeMeta: metav1.TypeMeta{
Kind: "Pod",
APIVersion: "v1",
},
ObjectMeta: metav1.ObjectMeta{
Name: randomContainerName,
},
Spec: apiv1.PodSpec{
Containers: []apiv1.Container{
{
Name: randomContainerName,
Image: j.Endpoint,
Resources: apiv1.ResourceRequirements{
Requests: apiv1.ResourceList{
apiv1.ResourceCPU: resource.MustParse(j.Resources.CPU),
apiv1.ResourceMemory: resource.MustParse(j.Resources.Memory),
},
},
},
},
RestartPolicy: apiv1.RestartPolicyNever,
},
}
//Add user defined enviroments
if len(j.Environments) > 0 {
for _, environment := range j.Environments {
for k, v := range environment {
env := apiv1.EnvVar{
Name: k,
Value: v,
}
result.Spec.Containers[0].Env = append(result.Spec.Containers[0].Env, env)
}
}
}
//Add flow enviroments
if len(f.Environments) > 0 {
for _, environment := range f.Environments {
for k, v := range environment {
env := apiv1.EnvVar{
Name: k,
Value: v,
}
result.Spec.Containers[0].Env = append(result.Spec.Containers[0].Env, env)
}
}
}
//Add user defined subscrptions
if len(j.Subscriptions) > 0 {
for _, subscription := range j.Subscriptions {
for k, env_key := range subscription {
if env_value, ok := GlobalOutputs[k]; ok {
env := apiv1.EnvVar{
Name: env_key,
Value: env_value,
}
result.Spec.Containers[0].Env = append(result.Spec.Containers[0].Env, env)
}
}
}
}
return result
}
func (r *Resource) JSON() ([]byte, error) {
return json.Marshal(&r)
}<|fim▁end|> | Name: randomContainerName,
}, |
<|file_name|>setup.rs<|end_file_name|><|fim▁begin|>// This file is part of rust-web/twig
//
// For the copyright and license information, please view the LICENSE
// file that was distributed with this source code.
//! Stores the Twig configuration.
use std::path::Path;
use std::rc::Rc;
use extension;
use extension::api::Extension;
use engine::{Engine, options, Options, extension_registry, ExtensionRegistry};
use engine::error::TwigError;
use api::error::Traced;
#[allow(dead_code)]
pub const VERSION: &'static str = "1.18.1";
<|fim▁hole|>pub struct Setup {
opt: Options,
ext: ExtensionRegistry,
}
impl Default for Setup {
fn default() -> Setup {
let mut ext = ExtensionRegistry::default();
ext.push(extension::Core::new()).unwrap(); // core extension
Setup {
opt: Options::default(),
ext: ext,
}
}
}
/// Builds an instance of the Twig Engine, according to supplied options and engine extensions.
///
/// The following extensions will be registered by default:
/// * core
/// * escaper
/// * optimizer
///
/// # Examples
///
/// ```
/// use twig::{Setup, Engine};
/// use twig::extension::Debug;
///
/// let mut setup = Setup::default()
/// .set_strict_variables(true)
/// .add_extension(Debug::new()).unwrap();
/// let engine = Engine::new(setup).unwrap();
/// ```
#[allow(dead_code)]
impl Setup {
/// Create engine from setup.
///
/// # Examples
///
/// ```
/// use twig::Setup;
///
/// let twig = Setup::default().engine().unwrap();
/// ```
pub fn engine(mut self) -> Result<Engine, Traced<TwigError>> {
let mut c = Engine::default();
let o = self.opt;
// add default extensions
try_traced!(self.ext.push(extension::Escaper::new(o.autoescape)));
try_traced!(self.ext.push(extension::Optimizer::new(o.optimizations)));
// init extensions
try_traced!(self.ext.init(&mut c));
c.ext = Some(Rc::new(self.ext));
// TODO: register staging extension (!)
// // init staging extension
// let staging = ext::Staging::new();
// try_traced!(c.init_extension(&*staging));
// c.ext_staging = Some(staging);
return Ok(c);
}
/// Registers an extension
pub fn add_extension(mut self, extension: Box<Extension>) -> Result<Self, Traced<TwigError>> {
try_traced!(self.ext.push(extension));
Ok(self)
}
/// When set to true, it automatically set "auto_reload" to true as well
/// (default to false)
pub fn set_debug(mut self, debug: bool) -> Self {
self.opt.debug = debug;
self
}
/// The charset used by the templates (default to UTF-8)
pub fn set_charset(mut self, set_charset: options::Charset) -> Self {
self.opt.charset = set_charset;
self
}
/// Whether to ignore invalid variables in templates
/// (default to false).
pub fn set_strict_variables(mut self, strict_variables: bool) -> Self {
self.opt.strict_variables = strict_variables;
self
}
/// Whether to enable auto-escaping (default to html):
/// * false: disable auto-escaping
/// * true: equivalent to html
/// * html, js: set the autoescaping to one of the supported strategies
/// * filename: set the autoescaping strategy based on the template filename extension
/// * PHP callback: a PHP callback that returns an escaping strategy based on the template "filename"
pub fn set_autoescape(mut self, autoescape: options::Autoescape) -> Self {
self.opt.autoescape = autoescape;
self
}
/// An absolute path where to store the compiled templates (optional)
pub fn set_cache(mut self, cache: Option<&Path>) -> Self {
self.opt.cache = cache.map(|reference| reference.to_owned());
self
}
/// Whether to reload the template if the original source changed (optional).
/// If you don't provide the auto_reload option, it will be
/// determined automatically based on the debug value.
pub fn set_auto_reload(mut self, auto_reload: Option<bool>) -> Self {
self.opt.auto_reload = auto_reload;
self
}
/// A flag that indicates whether optimizations are applied
pub fn set_optimizations(mut self, optimizations: options::Optimizations) -> Self {
self.opt.optimizations = optimizations;
self
}
/// Get all options
pub fn options(&self) -> &Options {
&self.opt
}
/// Get all registered extensions
pub fn extensions(&self) -> extension_registry::Iter {
self.ext.iter()
}
}
#[allow(dead_code)]
#[cfg(test)]
mod test {
// use super::*;
// #[test]
// pub fn get_unary_operators() {
// let mut e = Environment;
// e.get_unary_operators();
// }
// #[test]
// pub fn get_binary_operators() {
// let mut e = Environment;
// e.get_binary_operators();
// }
}<|fim▁end|> | #[derive(Debug)] |
<|file_name|>tensor_array_grad.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for operators defined in tensor_array_ops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import tensor_array_ops
# TODO(b/31222613): These ops may be differentiable, and there may be
# latent bugs here.
ops.NotDifferentiable("TensorArray")
ops.NotDifferentiable("TensorArrayGrad")
ops.NotDifferentiable("TensorArraySize")
ops.NotDifferentiable("TensorArrayClose")
ops.NotDifferentiable("TensorArrayV2")
ops.NotDifferentiable("TensorArrayGradV2")
ops.NotDifferentiable("TensorArraySizeV2")
ops.NotDifferentiable("TensorArrayCloseV2")
ops.NotDifferentiable("TensorArrayV3")
ops.NotDifferentiable("TensorArrayGradV3")
ops.NotDifferentiable("TensorArraySizeV3")
ops.NotDifferentiable("TensorArrayCloseV3")
def _GetGradSource(op_or_tensor):
"""Identify which call to tf.gradients created this gradient op or tensor.
TensorArray gradient calls use an accumulator TensorArray object. If
multiple gradients are calculated and run in the same session, the multiple
gradient nodes may accidentally flow throuth the same accumulator TensorArray.
This double counting breaks the TensorArray gradient flow.
The solution is to identify which gradient call this particular
TensorArray*Grad is being called in, by looking at the input gradient
tensor's name, and create or lookup an accumulator gradient TensorArray
associated with this specific call. This solves any confusion and ensures
different gradients from the same forward graph get their own accumulators.
This function creates the unique label associated with the tf.gradients call
that is used to create the gradient TensorArray.
Args:
op_or_tensor: `Tensor` or `Operation` which is an input to a
TensorArray*Grad call.
Returns:
A python string, the unique label associated with this particular
gradients calculation.
Raises:
ValueError: If not called within a gradients calculation.
"""
name_tokens = op_or_tensor.name.split("/")
grad_pos = [i for i, x in enumerate(name_tokens) if x.startswith("gradients")]
if not grad_pos:
raise ValueError(
"Expected op/tensor name to start with gradients (excluding scope)"
", got: %s" % op_or_tensor.name)
return "/".join(name_tokens[:grad_pos[-1] + 1])
@ops.RegisterGradient("TensorArrayRead")
@ops.RegisterGradient("TensorArrayReadV2")
@ops.RegisterGradient("TensorArrayReadV3")
def _TensorArrayReadGrad(op, grad):
"""Gradient for TensorArrayRead.
Args:
op: Forward TensorArrayRead op.
grad: Gradient `Tensor` to TensorArrayRead.
Returns:
A flow `Tensor`, which can be used in control dependencies to
force the write of `grad` to the gradient `TensorArray`.
"""
# Note: the forward flow dependency in the call to grad() is necessary for
# the case of dynamic sized TensorArrays. When creating the gradient
# TensorArray, the final size of the forward array must be known.
# For this we need to wait until it has been created by depending on
# the input flow of the original op.
handle = op.inputs[0]
index = op.inputs[1]
flow = op.inputs[2]
dtype = op.get_attr("dtype")
grad_source = _GetGradSource(grad)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
w_g = g.write(index, grad)
return [None, None, w_g.flow]
@ops.RegisterGradient("TensorArrayWrite")
@ops.RegisterGradient("TensorArrayWriteV2")
@ops.RegisterGradient("TensorArrayWriteV3")
def _TensorArrayWriteGrad(op, flow):
"""Gradient for TensorArrayWrite.
Args:
op: Forward TensorArrayWrite op.
flow: Gradient `Tensor` flow to TensorArrayWrite.
Returns:
A grad `Tensor`, the gradient created in an upstream ReadGrad or PackGrad.
"""
# handle is the output store_handle of TensorArrayReadGrad or
# the handle output of TensorArrayWriteGrad. we must use this one.
handle = op.inputs[0]
index = op.inputs[1]
dtype = op.get_attr("T")
grad_source = _GetGradSource(flow)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
grad = g.read(index)
return [None, None, grad, flow]
@ops.RegisterGradient("TensorArrayGather")
@ops.RegisterGradient("TensorArrayGatherV2")
@ops.RegisterGradient("TensorArrayGatherV3")
def _TensorArrayGatherGrad(op, grad):
"""Gradient for TensorArrayGather.
Args:
op: Forward TensorArrayGather op.
grad: Gradient `Tensor` to TensorArrayGather.
Returns:
A flow `Tensor`, which can be used in control dependencies to
force the write of `grad` to the gradient `TensorArray`.
"""
# Note: the forward flow dependency in the call to grad() is necessary for
# the case of dynamic sized TensorArrays. When creating the gradient
# TensorArray, the final size of the forward array must be known.
# For this we need to wait until it has been created by depending on
# the input flow of the original op.
handle = op.inputs[0]
indices = op.inputs[1]
flow = op.inputs[2]
dtype = op.get_attr("dtype")
grad_source = _GetGradSource(grad)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
u_g = g.scatter(indices, grad)
return [None, None, u_g.flow]
@ops.RegisterGradient("TensorArrayScatter")
@ops.RegisterGradient("TensorArrayScatterV2")
@ops.RegisterGradient("TensorArrayScatterV3")
def _TensorArrayScatterGrad(op, flow):
"""Gradient for TensorArrayScatter.
Args:
op: Forward TensorArrayScatter op.
flow: Gradient `Tensor` flow to TensorArrayScatter.
<|fim▁hole|> Returns:
A grad `Tensor`, the gradient created in upstream ReadGrads or PackGrad.
"""
handle = op.inputs[0]
indices = op.inputs[1]
dtype = op.get_attr("T")
grad_source = _GetGradSource(flow)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
grad = g.gather(indices)
return [None, None, grad, flow]
@ops.RegisterGradient("TensorArrayConcat")
@ops.RegisterGradient("TensorArrayConcatV2")
@ops.RegisterGradient("TensorArrayConcatV3")
def _TensorArrayConcatGrad(op, grad, unused_lengths_grad):
"""Gradient for TensorArrayConcat.
Args:
op: Forward TensorArrayConcat op.
grad: Gradient `Tensor` to TensorArrayConcat.
Returns:
A flow `Tensor`, which can be used in control dependencies to
force the write of `grad` to the gradient `TensorArray`.
"""
# Note: the forward flow dependency in the call to grad() is necessary for
# the case of dynamic sized TensorArrays. When creating the gradient
# TensorArray, the final size of the forward array must be known.
# For this we need to wait until it has been created by depending on
# the input flow of the original op.
handle = op.inputs[0]
flow = op.inputs[1]
lengths = op.outputs[1]
dtype = op.get_attr("dtype")
grad_source = _GetGradSource(grad)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
u_g = g.split(grad, lengths=lengths)
# handle, flow_in
return [None, u_g.flow]
@ops.RegisterGradient("TensorArraySplit")
@ops.RegisterGradient("TensorArraySplitV2")
@ops.RegisterGradient("TensorArraySplitV3")
def _TensorArraySplitGrad(op, flow):
"""Gradient for TensorArraySplit.
Args:
op: Forward TensorArraySplit op.
flow: Gradient `Tensor` flow to TensorArraySplit.
Returns:
A grad `Tensor`, the gradient created in upstream ReadGrads or PackGrad.
"""
handle = op.inputs[0]
dtype = op.get_attr("T")
grad_source = _GetGradSource(flow)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
grad = g.concat()
# handle, value, lengths, flow_in
return [None, grad, None, flow]<|fim▁end|> | |
<|file_name|>filter.py<|end_file_name|><|fim▁begin|># This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT<|fim▁hole|># ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
# old (pre-0.8.4) location for ChangeFilter
from buildbot.changes.filter import ChangeFilter
_hush_pyflakes = ChangeFilter # keep pyflakes happy<|fim▁end|> | |
<|file_name|>test_blasdot.py<|end_file_name|><|fim▁begin|>from __future__ import division, absolute_import, print_function
import sys
from itertools import product
import numpy as np
from numpy.core import zeros, float64
from numpy.testing import dec, TestCase, assert_almost_equal, assert_, \
assert_raises, assert_array_equal, assert_allclose, assert_equal
from numpy.core.multiarray import inner as inner_
DECPREC = 14
class TestInner(TestCase):
def test_vecself(self):
"""Ticket 844."""
# Inner product of a vector with itself segfaults or give meaningless
# result
a = zeros(shape = (1, 80), dtype = float64)
p = inner_(a, a)
assert_almost_equal(p, 0, decimal = DECPREC)
try:
import numpy.core._dotblas as _dotblas
except ImportError:
_dotblas = None
@dec.skipif(_dotblas is None, "Numpy is not compiled with _dotblas")
def test_blasdot_used():
from numpy.core import dot, vdot, inner, alterdot, restoredot
assert_(dot is _dotblas.dot)
assert_(vdot is _dotblas.vdot)
assert_(inner is _dotblas.inner)
assert_(alterdot is _dotblas.alterdot)
assert_(restoredot is _dotblas.restoredot)
def test_dot_2args():
from numpy.core import dot
a = np.array([[1, 2], [3, 4]], dtype=float)
b = np.array([[1, 0], [1, 1]], dtype=float)
c = np.array([[3, 2], [7, 4]], dtype=float)
d = dot(a, b)
assert_allclose(c, d)
def test_dot_3args():
np.random.seed(22)
f = np.random.random_sample((1024, 16))
v = np.random.random_sample((16, 32))
r = np.empty((1024, 32))
for i in range(12):
np.dot(f, v, r)
assert_equal(sys.getrefcount(r), 2)
r2 = np.dot(f, v, out=None)
assert_array_equal(r2, r)
assert_(r is np.dot(f, v, out=r))
v = v[:, 0].copy() # v.shape == (16,)
r = r[:, 0].copy() # r.shape == (1024,)
r2 = np.dot(f, v)
assert_(r is np.dot(f, v, r))
assert_array_equal(r2, r)
def test_dot_3args_errors():
np.random.seed(22)
f = np.random.random_sample((1024, 16))
v = np.random.random_sample((16, 32))
r = np.empty((1024, 31))
assert_raises(ValueError, np.dot, f, v, r)
r = np.empty((1024,))
assert_raises(ValueError, np.dot, f, v, r)
r = np.empty((32,))
assert_raises(ValueError, np.dot, f, v, r)
r = np.empty((32, 1024))
assert_raises(ValueError, np.dot, f, v, r)
assert_raises(ValueError, np.dot, f, v, r.T)
r = np.empty((1024, 64))
assert_raises(ValueError, np.dot, f, v, r[:, ::2])
assert_raises(ValueError, np.dot, f, v, r[:, :32])
<|fim▁hole|> assert_raises(ValueError, np.dot, f, v, r)
r = np.empty((1024, 32), dtype=int)
assert_raises(ValueError, np.dot, f, v, r)
def test_dot_array_order():
""" Test numpy dot with different order C, F
Comparing results with multiarray dot.
Double and single precisions array are compared using relative
precision of 7 and 5 decimals respectively.
Use 30 decimal when comparing exact operations like:
(a.b)' = b'.a'
"""
_dot = np.core.multiarray.dot
a_dim, b_dim, c_dim = 10, 4, 7
orders = ["C", "F"]
dtypes_prec = {np.float64: 7, np.float32: 5}
np.random.seed(7)
for arr_type, prec in dtypes_prec.items():
for a_order in orders:
a = np.asarray(np.random.randn(a_dim, a_dim),
dtype=arr_type, order=a_order)
assert_array_equal(np.dot(a, a), a.dot(a))
# (a.a)' = a'.a', note that mse~=1e-31 needs almost_equal
assert_almost_equal(a.dot(a), a.T.dot(a.T).T, decimal=prec)
#
# Check with making explicit copy
#
a_T = a.T.copy(order=a_order)
assert_almost_equal(a_T.dot(a_T), a.T.dot(a.T), decimal=prec)
assert_almost_equal(a.dot(a_T), a.dot(a.T), decimal=prec)
assert_almost_equal(a_T.dot(a), a.T.dot(a), decimal=prec)
#
# Compare with multiarray dot
#
assert_almost_equal(a.dot(a), _dot(a, a), decimal=prec)
assert_almost_equal(a.T.dot(a), _dot(a.T, a), decimal=prec)
assert_almost_equal(a.dot(a.T), _dot(a, a.T), decimal=prec)
assert_almost_equal(a.T.dot(a.T), _dot(a.T, a.T), decimal=prec)
for res in a.dot(a), a.T.dot(a), a.dot(a.T), a.T.dot(a.T):
assert res.flags.c_contiguous
for b_order in orders:
b = np.asarray(np.random.randn(a_dim, b_dim),
dtype=arr_type, order=b_order)
b_T = b.T.copy(order=b_order)
assert_almost_equal(a_T.dot(b), a.T.dot(b), decimal=prec)
assert_almost_equal(b_T.dot(a), b.T.dot(a), decimal=prec)
# (b'.a)' = a'.b
assert_almost_equal(b.T.dot(a), a.T.dot(b).T, decimal=prec)
assert_almost_equal(a.dot(b), _dot(a, b), decimal=prec)
assert_almost_equal(b.T.dot(a), _dot(b.T, a), decimal=prec)
for c_order in orders:
c = np.asarray(np.random.randn(b_dim, c_dim),
dtype=arr_type, order=c_order)
c_T = c.T.copy(order=c_order)
assert_almost_equal(c.T.dot(b.T), c_T.dot(b_T), decimal=prec)
assert_almost_equal(c.T.dot(b.T).T, b.dot(c), decimal=prec)
assert_almost_equal(b.dot(c), _dot(b, c), decimal=prec)
assert_almost_equal(c.T.dot(b.T), _dot(c.T, b.T), decimal=prec)
@dec.skipif(True) # ufunc override disabled for 1.9
def test_dot_override():
class A(object):
def __numpy_ufunc__(self, ufunc, method, pos, inputs, **kwargs):
return "A"
class B(object):
def __numpy_ufunc__(self, ufunc, method, pos, inputs, **kwargs):
return NotImplemented
a = A()
b = B()
c = np.array([[1]])
assert_equal(np.dot(a, b), "A")
assert_equal(c.dot(a), "A")
assert_raises(TypeError, np.dot, b, c)
assert_raises(TypeError, c.dot, b)
def test_npdot_segfault():
if sys.platform != 'darwin': return
# Test for float32 np.dot segfault
# https://github.com/numpy/numpy/issues/4007
def aligned_array(shape, align, dtype, order='C'):
# Make array shape `shape` with aligned at `align` bytes
d = dtype()
# Make array of correct size with `align` extra bytes
N = np.prod(shape)
tmp = np.zeros(N * d.nbytes + align, dtype=np.uint8)
address = tmp.__array_interface__["data"][0]
# Find offset into array giving desired alignment
for offset in range(align):
if (address + offset) % align == 0: break
tmp = tmp[offset:offset+N*d.nbytes].view(dtype=dtype)
return tmp.reshape(shape, order=order)
def as_aligned(arr, align, dtype, order='C'):
# Copy `arr` into an aligned array with same shape
aligned = aligned_array(arr.shape, align, dtype, order)
aligned[:] = arr[:]
return aligned
def assert_dot_close(A, X, desired):
assert_allclose(np.dot(A, X), desired, rtol=1e-5, atol=1e-7)
m = aligned_array(100, 15, np.float32)
s = aligned_array((100, 100), 15, np.float32)
# This always segfaults when the sgemv alignment bug is present
np.dot(s, m)
# test the sanity of np.dot after applying patch
for align, m, n, a_order in product(
(15, 32),
(10000,),
(200, 89),
('C', 'F')):
# Calculation in double precision
A_d = np.random.rand(m, n)
X_d = np.random.rand(n)
desired = np.dot(A_d, X_d)
# Calculation with aligned single precision
A_f = as_aligned(A_d, align, np.float32, order=a_order)
X_f = as_aligned(X_d, align, np.float32)
assert_dot_close(A_f, X_f, desired)
# Strided A rows
A_d_2 = A_d[::2]
desired = np.dot(A_d_2, X_d)
A_f_2 = A_f[::2]
assert_dot_close(A_f_2, X_f, desired)
# Strided A columns, strided X vector
A_d_22 = A_d_2[:, ::2]
X_d_2 = X_d[::2]
desired = np.dot(A_d_22, X_d_2)
A_f_22 = A_f_2[:, ::2]
X_f_2 = X_f[::2]
assert_dot_close(A_f_22, X_f_2, desired)
# Check the strides are as expected
if a_order == 'F':
assert_equal(A_f_22.strides, (8, 8 * m))
else:
assert_equal(A_f_22.strides, (8 * n, 8))
assert_equal(X_f_2.strides, (8,))
# Strides in A rows + cols only
X_f_2c = as_aligned(X_f_2, align, np.float32)
assert_dot_close(A_f_22, X_f_2c, desired)
# Strides just in A cols
A_d_12 = A_d[:, ::2]
desired = np.dot(A_d_12, X_d_2)
A_f_12 = A_f[:, ::2]
assert_dot_close(A_f_12, X_f_2c, desired)
# Strides in A cols and X
assert_dot_close(A_f_12, X_f_2, desired)<|fim▁end|> | r = np.empty((1024, 32), dtype=np.float32) |
<|file_name|>computechecksums.py<|end_file_name|><|fim▁begin|>from nbgrader import utils
from nbgrader.preprocessors import NbGraderPreprocessor<|fim▁hole|>
class ComputeChecksums(NbGraderPreprocessor):
"""A preprocessor to compute checksums of grade cells."""
def preprocess_cell(self, cell, resources, cell_index):
# compute checksums of grade cell and solution cells
if utils.is_grade(cell) or utils.is_solution(cell) or utils.is_locked(cell):
checksum = utils.compute_checksum(cell)
cell.metadata.nbgrader['checksum'] = checksum
if utils.is_grade(cell) or utils.is_solution(cell):
self.log.debug(
"Checksum for '%s' is %s",
cell.metadata.nbgrader['grade_id'],
checksum)
return cell, resources<|fim▁end|> | |
<|file_name|>step_3_complete.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import csv
import json
import requests
def main():
# We'll use a local version of this file from now on to save on
# bandwith.
with open('bills.json', 'r') as f:
data = json.load(f)
objects = data['objects']
# Create a csv file to output
with open('bills.csv', 'w') as o:
# Create a csv writer. This will help us format the file
# correctly.
writer = csv.writer(o)
# Write out the header row
writer.writerow([
u'title',
u'label',
u'number',
u'current_status'
])
<|fim▁hole|> # Iterate through each dict in the array `objects`
for bill in objects:
writer.writerow([
bill['title_without_number'].encode('utf-8'),
bill['bill_type_label'].encode('utf-8'),
bill['number'],
bill['current_status'].encode('utf-8')
])
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>extension.py<|end_file_name|><|fim▁begin|>"""
the flask extension
"""
import warnings
from functools import wraps
import logging
from flask import request, current_app, g, Blueprint
from werkzeug.http import http_date
from limits.errors import ConfigurationError
from limits.storage import storage_from_string, MemoryStorage
from limits.strategies import STRATEGIES
from limits.util import parse_many
import six
import sys
import time
from .errors import RateLimitExceeded
from .util import get_ipaddr
class C:
ENABLED = "RATELIMIT_ENABLED"
HEADERS_ENABLED = "RATELIMIT_HEADERS_ENABLED"
STORAGE_URL = "RATELIMIT_STORAGE_URL"
STORAGE_OPTIONS = "RATELIMIT_STORAGE_OPTIONS"
STRATEGY = "RATELIMIT_STRATEGY"
GLOBAL_LIMITS = "RATELIMIT_GLOBAL"
HEADER_LIMIT = "RATELIMIT_HEADER_LIMIT"
HEADER_REMAINING = "RATELIMIT_HEADER_REMAINING"
HEADER_RESET = "RATELIMIT_HEADER_RESET"
SWALLOW_ERRORS = "RATELIMIT_SWALLOW_ERRORS"
IN_MEMORY_FALLBACK = "RATELIMIT_IN_MEMORY_FALLBACK"
HEADER_RETRY_AFTER = "RATELIMIT_HEADER_RETRY_AFTER"
HEADER_RETRY_AFTER_VALUE = "RATELIMIT_HEADER_RETRY_AFTER_VALUE"
class HEADERS:
RESET = 1
REMAINING = 2
LIMIT = 3
RETRY_AFTER = 4
MAX_BACKEND_CHECKS = 5
class ExtLimit(object):
"""
simple wrapper to encapsulate limits and their context
"""
def __init__(self, limit, key_func, scope, per_method, methods, error_message,
exempt_when):
self._limit = limit
self.key_func = key_func
self._scope = scope
self.per_method = per_method
self.methods = methods and [m.lower() for m in methods] or methods
self.error_message = error_message
self.exempt_when = exempt_when
@property
def limit(self):
return self._limit() if callable(self._limit) else self._limit
@property
def scope(self):
return self._scope(request.endpoint) if callable(self._scope) else self._scope
@property
def is_exempt(self):
"""Check if the limit is exempt."""
return self.exempt_when and self.exempt_when()
class Limiter(object):
"""
:param app: :class:`flask.Flask` instance to initialize the extension
with.
:param list global_limits: a variable list of strings denoting global
limits to apply to all routes. :ref:`ratelimit-string` for more details.
:param function key_func: a callable that returns the domain to rate limit by.
:param bool headers_enabled: whether ``X-RateLimit`` response headers are written.
:param str strategy: the strategy to use. refer to :ref:`ratelimit-strategy`
:param str storage_uri: the storage location. refer to :ref:`ratelimit-conf`
:param dict storage_options: kwargs to pass to the storage implementation upon
instantiation.
:param bool auto_check: whether to automatically check the rate limit in the before_request
chain of the application. default ``True``
:param bool swallow_errors: whether to swallow errors when hitting a rate limit.
An exception will still be logged. default ``False``
:param list in_memory_fallback: a variable list of strings denoting fallback
limits to apply when the storage is down.
"""
def __init__(self, app=None
, key_func=None
, global_limits=[]
, headers_enabled=False
, strategy=None
, storage_uri=None
, storage_options={}
, auto_check=True
, swallow_errors=False
, in_memory_fallback=[]
, retry_after=None
):
self.app = app
self.logger = logging.getLogger("flask-limiter")
self.enabled = True
self._global_limits = []
self._in_memory_fallback = []
self._exempt_routes = set()
self._request_filters = []
self._headers_enabled = headers_enabled
self._header_mapping = {}
self._retry_after = retry_after
self._strategy = strategy
self._storage_uri = storage_uri
self._storage_options = storage_options
self._auto_check = auto_check
self._swallow_errors = swallow_errors
if not key_func:
warnings.warn(
"Use of the default `get_ipaddr` function is discouraged."
" Please refer to https://flask-limiter.readthedocs.org/#rate-limit-domain"
" for the recommended configuration",
UserWarning
)
self._key_func = key_func or get_ipaddr
for limit in global_limits:
self._global_limits.extend(
[
ExtLimit(
limit, self._key_func, None, False, None, None, None
) for limit in parse_many(limit)
]
)
for limit in in_memory_fallback:
self._in_memory_fallback.extend(
[
ExtLimit(
limit, self._key_func, None, False, None, None, None
) for limit in parse_many(limit)
]
)
self._route_limits = {}
self._dynamic_route_limits = {}
self._blueprint_limits = {}
self._blueprint_dynamic_limits = {}
self._blueprint_exempt = set()
self._storage = self._limiter = None
self._storage_dead = False
self._fallback_limiter = None
self.__check_backend_count = 0
self.__last_check_backend = time.time()
class BlackHoleHandler(logging.StreamHandler):
def emit(*_):
return
self.logger.addHandler(BlackHoleHandler())
if app:
self.init_app(app)
def init_app(self, app):
"""
:param app: :class:`flask.Flask` instance to rate limit.
"""
self.enabled = app.config.setdefault(C.ENABLED, True)
self._swallow_errors = app.config.setdefault(
C.SWALLOW_ERRORS, self._swallow_errors
)
self._headers_enabled = (
self._headers_enabled
or app.config.setdefault(C.HEADERS_ENABLED, False)
)
self._storage_options.update(
app.config.get(C.STORAGE_OPTIONS, {})
)
self._storage = storage_from_string(
self._storage_uri
or app.config.setdefault(C.STORAGE_URL, 'memory://'),
** self._storage_options
)
strategy = (
self._strategy
or app.config.setdefault(C.STRATEGY, 'fixed-window')
)
if strategy not in STRATEGIES:
raise ConfigurationError("Invalid rate limiting strategy %s" % strategy)
self._limiter = STRATEGIES[strategy](self._storage)
self._header_mapping.update({
HEADERS.RESET : self._header_mapping.get(HEADERS.RESET,None) or app.config.setdefault(C.HEADER_RESET, "X-RateLimit-Reset"),
HEADERS.REMAINING : self._header_mapping.get(HEADERS.REMAINING,None) or app.config.setdefault(C.HEADER_REMAINING, "X-RateLimit-Remaining"),
HEADERS.LIMIT : self._header_mapping.get(HEADERS.LIMIT,None) or app.config.setdefault(C.HEADER_LIMIT, "X-RateLimit-Limit"),
HEADERS.RETRY_AFTER : self._header_mapping.get(HEADERS.RETRY_AFTER,None) or app.config.setdefault(C.HEADER_RETRY_AFTER, "Retry-After"),
})
self._retry_after = (
self._retry_after
or app.config.get(C.HEADER_RETRY_AFTER_VALUE)
)
conf_limits = app.config.get(C.GLOBAL_LIMITS, None)
if not self._global_limits and conf_limits:
self._global_limits = [
ExtLimit(
limit, self._key_func, None, False, None, None, None
) for limit in parse_many(conf_limits)
]
fallback_limits = app.config.get(C.IN_MEMORY_FALLBACK, None)
if not self._in_memory_fallback and fallback_limits:
self._in_memory_fallback = [
ExtLimit(
limit, self._key_func, None, False, None, None, None
) for limit in parse_many(fallback_limits)
]
if self._auto_check:
app.before_request(self.__check_request_limit)
app.after_request(self.__inject_headers)
if self._in_memory_fallback:
self._fallback_storage = MemoryStorage()
self._fallback_limiter = STRATEGIES[strategy](self._fallback_storage)
# purely for backward compatibility as stated in flask documentation
if not hasattr(app, 'extensions'):
app.extensions = {} # pragma: no cover
app.extensions['limiter'] = self
def __should_check_backend(self):
if self.__check_backend_count > MAX_BACKEND_CHECKS:
self.__check_backend_count = 0
if time.time() - self.__last_check_backend > pow(2, self.__check_backend_count):
self.__last_check_backend = time.time()
self.__check_backend_count += 1
return True
return False
def check(self):
"""
check the limits for the current request
:raises: RateLimitExceeded
"""
self.__check_request_limit()
def reset(self):
"""
resets the storage if it supports being reset
"""
try:
self._storage.reset()
self.logger.info("Storage has be reset and all limits cleared")
except NotImplementedError:
self.logger.warning("This storage type does not support being reset")
@property
def limiter(self):
if self._storage_dead and self._in_memory_fallback:
return self._fallback_limiter
else:
return self._limiter
def __inject_headers(self, response):
current_limit = getattr(g, 'view_rate_limit', None)
if self.enabled and self._headers_enabled and current_limit:
window_stats = self.limiter.get_window_stats(*current_limit)
response.headers.add(
self._header_mapping[HEADERS.LIMIT],
str(current_limit[0].amount)
)
response.headers.add(
self._header_mapping[HEADERS.REMAINING],
window_stats[1]
)
response.headers.add(
self._header_mapping[HEADERS.RESET],
window_stats[0]
)
response.headers.add(
self._header_mapping[HEADERS.RETRY_AFTER],
self._retry_after == 'http-date' and http_date(window_stats[0])
or int(window_stats[0] - time.time())
)
return response
def __check_request_limit(self):
endpoint = request.endpoint or ""
view_func = current_app.view_functions.get(endpoint, None)
name = ("%s.%s" % (
view_func.__module__, view_func.__name__
) if view_func else ""
)
if (not request.endpoint
or not self.enabled
or view_func == current_app.send_static_file
or name in self._exempt_routes
or request.blueprint in self._blueprint_exempt
or any(fn() for fn in self._request_filters)
):
return
limits = (
name in self._route_limits and self._route_limits[name]
or []
)
dynamic_limits = []
if name in self._dynamic_route_limits:
for lim in self._dynamic_route_limits[name]:
try:
dynamic_limits.extend(
ExtLimit(
limit, lim.key_func, lim.scope, lim.per_method,
lim.methods, lim.error_message, lim.exempt_when
) for limit in parse_many(lim.limit)
)
except ValueError as e:
self.logger.error(
"failed to load ratelimit for view function %s (%s)"
, name, e
)
if request.blueprint:
if (request.blueprint in self._blueprint_dynamic_limits
and not dynamic_limits
):
for lim in self._blueprint_dynamic_limits[request.blueprint]:
try:
dynamic_limits.extend(
ExtLimit(
limit, lim.key_func, lim.scope, lim.per_method,
lim.methods, lim.error_message, lim.exempt_when
) for limit in parse_many(lim.limit)
)
except ValueError as e:
self.logger.error(
"failed to load ratelimit for blueprint %s (%s)"
, request.blueprint, e
)
if (request.blueprint in self._blueprint_limits
and not limits
):
limits.extend(self._blueprint_limits[request.blueprint])
failed_limit = None
limit_for_header = None
try:
all_limits = []
if self._storage_dead and self._fallback_limiter:
if self.__should_check_backend() and self._storage.check():
self.logger.info(
"Rate limit storage recovered"
)
self._storage_dead = False
self.__check_backend_count = 0
else:
all_limits = self._in_memory_fallback
if not all_limits:
all_limits = (limits + dynamic_limits or self._global_limits)
for lim in all_limits:
limit_scope = lim.scope or endpoint
if lim.is_exempt:
return
if lim.methods is not None and request.method.lower() not in lim.methods:
return
if lim.per_method:
limit_scope += ":%s" % request.method
if not limit_for_header or lim.limit < limit_for_header[0]:
limit_for_header = (lim.limit, lim.key_func(), limit_scope)
if not self.limiter.hit(lim.limit, lim.key_func(), limit_scope):
self.logger.warning(
"ratelimit %s (%s) exceeded at endpoint: %s"
, lim.limit, lim.key_func(), limit_scope
)
failed_limit = lim
limit_for_header = (lim.limit, lim.key_func(), limit_scope)
break
g.view_rate_limit = limit_for_header
if failed_limit:
if failed_limit.error_message:
exc_description = failed_limit.error_message if not callable(
failed_limit.error_message
) else failed_limit.error_message()
else:
exc_description = six.text_type(failed_limit.limit)
raise RateLimitExceeded(exc_description)
except Exception as e: # no qa
if isinstance(e, RateLimitExceeded):
six.reraise(*sys.exc_info())
if self._in_memory_fallback and not self._storage_dead:
self.logger.warn(
"Rate limit storage unreachable - falling back to"
" in-memory storage"
)
self._storage_dead = True
self.__check_request_limit()
else:
if self._swallow_errors:
self.logger.exception(
"Failed to rate limit. Swallowing error"
)
else:
six.reraise(*sys.exc_info())
def __limit_decorator(self, limit_value,
key_func=None, shared=False,
scope=None,
per_method=False,
methods=None,
error_message=None,
exempt_when=None):
_scope = scope if shared else None
def _inner(obj):
func = key_func or self._key_func
is_route = not isinstance(obj, Blueprint)
name = "%s.%s" % (obj.__module__, obj.__name__) if is_route else obj.name
dynamic_limit, static_limits = None, []
if callable(limit_value):
dynamic_limit = ExtLimit(limit_value, func, _scope, per_method,
methods, error_message, exempt_when)
else:
try:
static_limits = [ExtLimit(
limit, func, _scope, per_method,
methods, error_message, exempt_when
) for limit in parse_many(limit_value)]
except ValueError as e:
self.logger.error(
"failed to configure %s %s (%s)",
"view function" if is_route else "blueprint", name, e
)
if isinstance(obj, Blueprint):
if dynamic_limit:
self._blueprint_dynamic_limits.setdefault(name, []).append(
dynamic_limit
)
else:
self._blueprint_limits.setdefault(name, []).extend(
static_limits
)
else:
@wraps(obj)
def __inner(*a, **k):
return obj(*a, **k)
if dynamic_limit:
self._dynamic_route_limits.setdefault(name, []).append(
dynamic_limit
)
else:
self._route_limits.setdefault(name, []).extend(
static_limits<|fim▁hole|> )
return __inner
return _inner
def limit(self, limit_value, key_func=None, per_method=False,
methods=None, error_message=None, exempt_when=None):
"""
decorator to be used for rate limiting individual routes or blueprints.
:param limit_value: rate limit string or a callable that returns a string.
:ref:`ratelimit-string` for more details.
:param function key_func: function/lambda to extract the unique identifier for
the rate limit. defaults to remote address of the request.
:param bool per_method: whether the limit is sub categorized into the http
method of the request.
:param list methods: if specified, only the methods in this list will be rate
limited (default: None).
:param error_message: string (or callable that returns one) to override the
error message used in the response.
:return:
"""
return self.__limit_decorator(limit_value, key_func, per_method=per_method,
methods=methods, error_message=error_message,
exempt_when=exempt_when)
def shared_limit(self, limit_value, scope, key_func=None,
error_message=None, exempt_when=None):
"""
decorator to be applied to multiple routes sharing the same rate limit.
:param limit_value: rate limit string or a callable that returns a string.
:ref:`ratelimit-string` for more details.
:param scope: a string or callable that returns a string
for defining the rate limiting scope.
:param function key_func: function/lambda to extract the unique identifier for
the rate limit. defaults to remote address of the request.
:param error_message: string (or callable that returns one) to override the
error message used in the response.
"""
return self.__limit_decorator(
limit_value, key_func, True, scope, error_message=error_message,
exempt_when=exempt_when
)
def exempt(self, obj):
"""
decorator to mark a view or all views in a blueprint as exempt from rate limits.
"""
if not isinstance(obj, Blueprint):
name = "%s.%s" % (obj.__module__, obj.__name__)
@wraps(obj)
def __inner(*a, **k):
return obj(*a, **k)
self._exempt_routes.add(name)
return __inner
else:
self._blueprint_exempt.add(obj.name)
def request_filter(self, fn):
"""
decorator to mark a function as a filter to be executed
to check if the request is exempt from rate limiting.
"""
self._request_filters.append(fn)
return fn<|fim▁end|> | |
<|file_name|>lesson2-2.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
"""
Домашнее задание по уроку 2-2
«Работа с разными форматами данных»
Выполнил Мартысюк Илья PY-3
"""
import re
import glob
import chardet
from os.path import join
from xml.etree.cElementTree import XMLParser, parse
def open_data_file(path):
with open(path, 'rb') as encoding_detect_file:
file_text = encoding_detect_file.read()
encoding = chardet.detect(file_text)['encoding']
parser = XMLParser(encoding=encoding)
tree = parse(path, parser=parser)
root = tree.getroot()
return root
def compile_data(root):
long_dict = dict()
for i in root.iter('description'):
clean_re = re.compile(r'<.*?>|[^\w\s]+|[\d]+|[a-z]+|[A-Z]+|[\n]')
clean_text = clean_re.sub('', i.text)
temp_list = clean_text.strip().split(' ')
for t in temp_list:
if len(t) > 6:
try:
long_dict[t] += 1
except KeyError:
long_dict.update({t: 1})
long_dict = sorted(long_dict.items(), key=lambda x: x[1], reverse=True)
print(long_dict)
<|fim▁hole|> return long_dict
def print_result(long_dict):
print('ТОП 10 самых часто встречающихся слов:')
for i in range(10):
print('{}) Слово "{}" встречается {} раз'.format(i+1, long_dict[i][0], long_dict[i][1]))
path = 'lesson2-2'
files = glob.glob(join(path, '*.xml'))
for file in files:
print('\nОбработка файла {}'.format(file))
print_result(compile_data(open_data_file(file)))<|fim▁end|> | |
<|file_name|>dns-controller.js<|end_file_name|><|fim▁begin|>angular.module('dnsControllers', ['dnsServices', 'dnsModels'])
.controller('dnsCtrl', function($scope, $location, socket, Hosts, Zone) {
$scope.dns = {
zone : Zone.get(),
hosts : Hosts.list()
};
socket.on('new:host', function (host) {
var found = false;
if ($scope.dns.hosts.records) {<|fim▁hole|> $scope.dns.hosts.records[i].record = host.record;
break;
}
}
}
if (!found) {
if (!$scope.dns.hosts.records)
$scope.dns.hosts.records = [];
$scope.dns.hosts.records.push(host);
}
});
socket.on('delete:host', function (host) {
if ($scope.dns.hosts.records) {
for (var i = 0 ; i < $scope.dns.hosts.records.length ; ++i) {
if ($scope.dns.hosts.records[i].name === host.name) {
$scope.dns.hosts.records.splice(i, 1);
break;
}
}
}
});
})
;<|fim▁end|> | for (var i = 0 ; i < $scope.dns.hosts.records.length ; ++i) {
if ($scope.dns.hosts.records[i].name === host.name) {
found = true; |
<|file_name|>nl.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2015, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.lang['nl'] = {
"editor": "Tekstverwerker",
"editorPanel": "Tekstverwerker beheerpaneel",
"common": {
"editorHelp": "Druk ALT 0 voor hulp",
"browseServer": "Bladeren op server",
"url": "URL",
"protocol": "Protocol",
"upload": "Upload",
"uploadSubmit": "Naar server verzenden",
"image": "Afbeelding",
"flash": "Flash",
"form": "Formulier",
"checkbox": "Selectievinkje",
"radio": "Keuzerondje",
"textField": "Tekstveld",
"textarea": "Tekstvak",
"hiddenField": "Verborgen veld",
"button": "Knop",
"select": "Selectieveld",
"imageButton": "Afbeeldingsknop",
"notSet": "<niet ingevuld>",
"id": "Id",
"name": "Naam",
"langDir": "Schrijfrichting",
"langDirLtr": "Links naar rechts (LTR)",
"langDirRtl": "Rechts naar links (RTL)",
"langCode": "Taalcode",
"longDescr": "Lange URL-omschrijving",
"cssClass": "Stylesheet-klassen",
"advisoryTitle": "Adviserende titel",
"cssStyle": "Stijl",
"ok": "OK",
"cancel": "Annuleren",
"close": "Sluiten",
"preview": "Voorbeeld",
"resize": "Sleep om te herschalen",
"generalTab": "Algemeen",
"advancedTab": "Geavanceerd",
"validateNumberFailed": "Deze waarde is geen geldig getal.",
"confirmNewPage": "Alle aangebrachte wijzigingen gaan verloren. Weet u zeker dat u een nieuwe pagina wilt openen?",
"confirmCancel": "Enkele opties zijn gewijzigd. Weet u zeker dat u dit dialoogvenster wilt sluiten?",
"options": "Opties",
"target": "Doelvenster",
"targetNew": "Nieuw venster (_blank)",
"targetTop": "Hele venster (_top)",
"targetSelf": "Zelfde venster (_self)",
"targetParent": "Origineel venster (_parent)",
"langDirLTR": "Links naar rechts (LTR)",
"langDirRTL": "Rechts naar links (RTL)",
"styles": "Stijl",
"cssClasses": "Stylesheet-klassen",
"width": "Breedte",
"height": "Hoogte",
"align": "Uitlijning",
"alignLeft": "Links",
"alignRight": "Rechts",
"alignCenter": "Centreren",
"alignJustify": "Uitvullen",
"alignTop": "Boven",
"alignMiddle": "Midden",
"alignBottom": "Onder",
"alignNone": "Geen",
"invalidValue": "Ongeldige waarde.",
"invalidHeight": "De hoogte moet een getal zijn.",
"invalidWidth": "De breedte moet een getal zijn.",
"invalidCssLength": "Waarde in veld \"%1\" moet een positief nummer zijn, met of zonder een geldige CSS meeteenheid (px, %, in, cm, mm, em, ex, pt of pc).",
"invalidHtmlLength": "Waarde in veld \"%1\" moet een positief nummer zijn, met of zonder een geldige HTML meeteenheid (px of %).",
"invalidInlineStyle": "Waarde voor de online stijl moet bestaan uit een of meerdere tupels met het formaat \"naam : waarde\", gescheiden door puntkomma's.",
"cssLengthTooltip": "Geef een nummer in voor een waarde in pixels of geef een nummer in met een geldige CSS eenheid (px, %, in, cm, mm, em, ex, pt, of pc).",
"unavailable": "%1<span class=\"cke_accessibility\">, niet beschikbaar</span>"
},
"about": {
"copy": "Copyright © $1. Alle rechten voorbehouden.",
"dlgTitle": "Over CKEditor",
"help": "Bekijk de $1 voor hulp.",
"moreInfo": "Bezoek onze website voor licentieinformatie:",
"title": "Over CKEditor",
"userGuide": "CKEditor gebruiksaanwijzing"
},
"basicstyles": {
"bold": "Vet",
"italic": "Cursief",
"strike": "Doorhalen",
"subscript": "Subscript",
"superscript": "Superscript",
"underline": "Onderstrepen"
},
"bidi": {"ltr": "Schrijfrichting van links naar rechts", "rtl": "Schrijfrichting van rechts naar links"},
"blockquote": {"toolbar": "Citaatblok"},
"clipboard": {
"copy": "Kopiëren",
"copyError": "De beveiligingsinstelling van de browser verhinderen het automatisch kopiëren. Gebruik de sneltoets Ctrl/Cmd+C van het toetsenbord.",
"cut": "Knippen",
"cutError": "De beveiligingsinstelling van de browser verhinderen het automatisch knippen. Gebruik de sneltoets Ctrl/Cmd+X van het toetsenbord.",
"paste": "Plakken",
"pasteArea": "Plakgebied",
"pasteMsg": "Plak de tekst in het volgende vak gebruikmakend van uw toetsenbord (<strong>Ctrl/Cmd+V</strong>) en klik op OK.",
"securityMsg": "Door de beveiligingsinstellingen van uw browser is het niet mogelijk om direct vanuit het klembord in de editor te plakken. Middels opnieuw plakken in dit venster kunt u de tekst alsnog plakken in de editor.",
"title": "Plakken"
},
"button": {"selectedLabel": "%1 (Geselecteerd)"},
"colorbutton": {
"auto": "Automatisch",
"bgColorTitle": "Achtergrondkleur",
"colors": {
"000": "Zwart",
"800000": "Kastanjebruin",
"8B4513": "Chocoladebruin",
"2F4F4F": "Donkerleigrijs",
"008080": "Blauwgroen",
"000080": "Marine",
"4B0082": "Indigo",
"696969": "Donkergrijs",
"B22222": "Baksteen",
"A52A2A": "Bruin",
"DAA520": "Donkergeel",
"006400": "Donkergroen",
"40E0D0": "Turquoise",
"0000CD": "Middenblauw",
"800080": "Paars",
"808080": "Grijs",
"F00": "Rood",
"FF8C00": "Donkeroranje",
"FFD700": "Goud",
"008000": "Groen",
"0FF": "Cyaan",
"00F": "Blauw",
"EE82EE": "Violet",
"A9A9A9": "Donkergrijs",
"FFA07A": "Lichtzalm",
"FFA500": "Oranje",
"FFFF00": "Geel",
"00FF00": "Felgroen",
"AFEEEE": "Lichtturquoise",
"ADD8E6": "Lichtblauw",
"DDA0DD": "Pruim",
"D3D3D3": "Lichtgrijs",
"FFF0F5": "Linnen",
"FAEBD7": "Ivoor",
"FFFFE0": "Lichtgeel",
"F0FFF0": "Honingdauw",
"F0FFFF": "Azuur",
"F0F8FF": "Licht hemelsblauw",
"E6E6FA": "Lavendel",
"FFF": "Wit"
},
"more": "Meer kleuren...",
"panelTitle": "Kleuren",
"textColorTitle": "Tekstkleur"
},
"colordialog": {
"clear": "Wissen",
"highlight": "Actief",
"options": "Kleuropties",
"selected": "Geselecteerde kleur",
"title": "Selecteer kleur"
},
"templates": {
"button": "Sjablonen",
"emptyListMsg": "(Geen sjablonen gedefinieerd)",
"insertOption": "Vervang de huidige inhoud",
"options": "Template opties",
"selectPromptMsg": "Selecteer het sjabloon dat in de editor geopend moet worden (de actuele inhoud gaat verloren):",
"title": "Inhoud sjablonen"
},
"contextmenu": {"options": "Contextmenu opties"},
"div": {
"IdInputLabel": "Id",
"advisoryTitleInputLabel": "Adviserende titel",
"cssClassInputLabel": "Stylesheet klassen",
"edit": "Div wijzigen",
"inlineStyleInputLabel": "Inline stijl",
"langDirLTRLabel": "Links naar rechts (LTR)",
"langDirLabel": "Schrijfrichting",
"langDirRTLLabel": "Rechts naar links (RTL)",
"languageCodeInputLabel": " Taalcode",
"remove": "Div verwijderen",
"styleSelectLabel": "Stijl",
"title": "Div aanmaken",
"toolbar": "Div aanmaken"
},
"toolbar": {
"toolbarCollapse": "Werkbalk inklappen",
"toolbarExpand": "Werkbalk uitklappen",
"toolbarGroups": {
"document": "Document",
"clipboard": "Klembord/Ongedaan maken",
"editing": "Bewerken",
"forms": "Formulieren",
"basicstyles": "Basisstijlen",
"paragraph": "Paragraaf",
"links": "Links",
"insert": "Invoegen",
"styles": "Stijlen",
"colors": "Kleuren",
"tools": "Toepassingen"
},
"toolbars": "Werkbalken"
},
"elementspath": {"eleLabel": "Elementenpad", "eleTitle": "%1 element"},
"find": {
"find": "Zoeken",
"findOptions": "Zoekopties",
"findWhat": "Zoeken naar:",
"matchCase": "Hoofdlettergevoelig",
"matchCyclic": "Doorlopend zoeken",
"matchWord": "Hele woord moet voorkomen",
"notFoundMsg": "De opgegeven tekst is niet gevonden.",
"replace": "Vervangen",
"replaceAll": "Alles vervangen",
"replaceSuccessMsg": "%1 resultaten vervangen.",
"replaceWith": "Vervangen met:",
"title": "Zoeken en vervangen"
},
"fakeobjects": {
"anchor": "Interne link",
"flash": "Flash animatie",
"hiddenfield": "Verborgen veld",
"iframe": "IFrame",
"unknown": "Onbekend object"
},
"flash": {
"access": "Script toegang",
"accessAlways": "Altijd",
"accessNever": "Nooit",
"accessSameDomain": "Zelfde domeinnaam",
"alignAbsBottom": "Absoluut-onder",
"alignAbsMiddle": "Absoluut-midden",
"alignBaseline": "Basislijn",
"alignTextTop": "Boven tekst",
"bgcolor": "Achtergrondkleur",
"chkFull": "Schermvullend toestaan",
"chkLoop": "Herhalen",
"chkMenu": "Flashmenu's inschakelen",
"chkPlay": "Automatisch afspelen",
"flashvars": "Variabelen voor Flash",
"hSpace": "HSpace",
"properties": "Eigenschappen Flash",
"propertiesTab": "Eigenschappen",
"quality": "Kwaliteit",
"qualityAutoHigh": "Automatisch hoog",
"qualityAutoLow": "Automatisch laag",
"qualityBest": "Beste",
"qualityHigh": "Hoog",
"qualityLow": "Laag",
"qualityMedium": "Gemiddeld",
"scale": "Schaal",
"scaleAll": "Alles tonen",
"scaleFit": "Precies passend",
"scaleNoBorder": "Geen rand",
"title": "Eigenschappen Flash",
"vSpace": "VSpace",
"validateHSpace": "De HSpace moet een getal zijn.",
"validateSrc": "De URL mag niet leeg zijn.",
"validateVSpace": "De VSpace moet een getal zijn.",<|fim▁hole|> "windowModeWindow": "Venster"
},
"font": {
"fontSize": {"label": "Lettergrootte", "voiceLabel": "Lettergrootte", "panelTitle": "Lettergrootte"},
"label": "Lettertype",
"panelTitle": "Lettertype",
"voiceLabel": "Lettertype"
},
"forms": {
"button": {
"title": "Eigenschappen knop",
"text": "Tekst (waarde)",
"type": "Soort",
"typeBtn": "Knop",
"typeSbm": "Versturen",
"typeRst": "Leegmaken"
},
"checkboxAndRadio": {
"checkboxTitle": "Eigenschappen aanvinkvakje",
"radioTitle": "Eigenschappen selectievakje",
"value": "Waarde",
"selected": "Geselecteerd",
"required": "Vereist"
},
"form": {
"title": "Eigenschappen formulier",
"menu": "Eigenschappen formulier",
"action": "Actie",
"method": "Methode",
"encoding": "Codering"
},
"hidden": {"title": "Eigenschappen verborgen veld", "name": "Naam", "value": "Waarde"},
"select": {
"title": "Eigenschappen selectieveld",
"selectInfo": "Informatie",
"opAvail": "Beschikbare opties",
"value": "Waarde",
"size": "Grootte",
"lines": "Regels",
"chkMulti": "Gecombineerde selecties toestaan",
"required": "Vereist",
"opText": "Tekst",
"opValue": "Waarde",
"btnAdd": "Toevoegen",
"btnModify": "Wijzigen",
"btnUp": "Omhoog",
"btnDown": "Omlaag",
"btnSetValue": "Als geselecteerde waarde instellen",
"btnDelete": "Verwijderen"
},
"textarea": {"title": "Eigenschappen tekstvak", "cols": "Kolommen", "rows": "Rijen"},
"textfield": {
"title": "Eigenschappen tekstveld",
"name": "Naam",
"value": "Waarde",
"charWidth": "Breedte (tekens)",
"maxChars": "Maximum aantal tekens",
"required": "Vereist",
"type": "Soort",
"typeText": "Tekst",
"typePass": "Wachtwoord",
"typeEmail": "E-mail",
"typeSearch": "Zoeken",
"typeTel": "Telefoonnummer",
"typeUrl": "URL"
}
},
"format": {
"label": "Opmaak",
"panelTitle": "Opmaak",
"tag_address": "Adres",
"tag_div": "Normaal (DIV)",
"tag_h1": "Kop 1",
"tag_h2": "Kop 2",
"tag_h3": "Kop 3",
"tag_h4": "Kop 4",
"tag_h5": "Kop 5",
"tag_h6": "Kop 6",
"tag_p": "Normaal",
"tag_pre": "Met opmaak"
},
"horizontalrule": {"toolbar": "Horizontale lijn invoegen"},
"iframe": {
"border": "Framerand tonen",
"noUrl": "Vul de IFrame URL in",
"scrolling": "Scrollbalken inschakelen",
"title": "IFrame-eigenschappen",
"toolbar": "IFrame"
},
"image": {
"alt": "Alternatieve tekst",
"border": "Rand",
"btnUpload": "Naar server verzenden",
"button2Img": "Wilt u de geselecteerde afbeeldingsknop vervangen door een eenvoudige afbeelding?",
"hSpace": "HSpace",
"img2Button": "Wilt u de geselecteerde afbeelding vervangen door een afbeeldingsknop?",
"infoTab": "Informatie afbeelding",
"linkTab": "Link",
"lockRatio": "Afmetingen vergrendelen",
"menu": "Eigenschappen afbeelding",
"resetSize": "Afmetingen resetten",
"title": "Eigenschappen afbeelding",
"titleButton": "Eigenschappen afbeeldingsknop",
"upload": "Upload",
"urlMissing": "De URL naar de afbeelding ontbreekt.",
"vSpace": "VSpace",
"validateBorder": "Rand moet een heel nummer zijn.",
"validateHSpace": "HSpace moet een heel nummer zijn.",
"validateVSpace": "VSpace moet een heel nummer zijn."
},
"indent": {"indent": "Inspringing vergroten", "outdent": "Inspringing verkleinen"},
"smiley": {"options": "Smiley opties", "title": "Smiley invoegen", "toolbar": "Smiley"},
"justify": {"block": "Uitvullen", "center": "Centreren", "left": "Links uitlijnen", "right": "Rechts uitlijnen"},
"language": {"button": "Taal instellen", "remove": "Taal verwijderen"},
"link": {
"acccessKey": "Toegangstoets",
"advanced": "Geavanceerd",
"advisoryContentType": "Aanbevolen content-type",
"advisoryTitle": "Adviserende titel",
"anchor": {
"toolbar": "Interne link",
"menu": "Eigenschappen interne link",
"title": "Eigenschappen interne link",
"name": "Naam interne link",
"errorName": "Geef de naam van de interne link op",
"remove": "Interne link verwijderen"
},
"anchorId": "Op kenmerk interne link",
"anchorName": "Op naam interne link",
"charset": "Karakterset van gelinkte bron",
"cssClasses": "Stylesheet-klassen",
"emailAddress": "E-mailadres",
"emailBody": "Inhoud bericht",
"emailSubject": "Onderwerp bericht",
"id": "Id",
"info": "Linkomschrijving",
"langCode": "Taalcode",
"langDir": "Schrijfrichting",
"langDirLTR": "Links naar rechts (LTR)",
"langDirRTL": "Rechts naar links (RTL)",
"menu": "Link wijzigen",
"name": "Naam",
"noAnchors": "(Geen interne links in document gevonden)",
"noEmail": "Geef een e-mailadres",
"noUrl": "Geef de link van de URL",
"other": "<ander>",
"popupDependent": "Afhankelijk (Netscape)",
"popupFeatures": "Instellingen popupvenster",
"popupFullScreen": "Volledig scherm (IE)",
"popupLeft": "Positie links",
"popupLocationBar": "Locatiemenu",
"popupMenuBar": "Menubalk",
"popupResizable": "Herschaalbaar",
"popupScrollBars": "Schuifbalken",
"popupStatusBar": "Statusbalk",
"popupToolbar": "Werkbalk",
"popupTop": "Positie boven",
"rel": "Relatie",
"selectAnchor": "Kies een interne link",
"styles": "Stijl",
"tabIndex": "Tabvolgorde",
"target": "Doelvenster",
"targetFrame": "<frame>",
"targetFrameName": "Naam doelframe",
"targetPopup": "<popupvenster>",
"targetPopupName": "Naam popupvenster",
"title": "Link",
"toAnchor": "Interne link in pagina",
"toEmail": "E-mail",
"toUrl": "URL",
"toolbar": "Link invoegen/wijzigen",
"type": "Linktype",
"unlink": "Link verwijderen",
"upload": "Upload"
},
"list": {"bulletedlist": "Opsomming invoegen", "numberedlist": "Genummerde lijst invoegen"},
"liststyle": {
"armenian": "Armeense nummering",
"bulletedTitle": "Eigenschappen lijst met opsommingstekens",
"circle": "Cirkel",
"decimal": "Cijfers (1, 2, 3, etc.)",
"decimalLeadingZero": "Cijfers beginnen met nul (01, 02, 03, etc.)",
"disc": "Schijf",
"georgian": "Georgische nummering (an, ban, gan, etc.)",
"lowerAlpha": "Kleine letters (a, b, c, d, e, etc.)",
"lowerGreek": "Grieks kleine letters (alpha, beta, gamma, etc.)",
"lowerRoman": "Romeins kleine letters (i, ii, iii, iv, v, etc.)",
"none": "Geen",
"notset": "<niet gezet>",
"numberedTitle": "Eigenschappen genummerde lijst",
"square": "Vierkant",
"start": "Start",
"type": "Type",
"upperAlpha": "Hoofdletters (A, B, C, D, E, etc.)",
"upperRoman": "Romeinse hoofdletters (I, II, III, IV, V, etc.)",
"validateStartNumber": "Startnummer van de lijst moet een heel nummer zijn."
},
"magicline": {"title": "Hier paragraaf invoeren"},
"maximize": {"maximize": "Maximaliseren", "minimize": "Minimaliseren"},
"newpage": {"toolbar": "Nieuwe pagina"},
"pagebreak": {"alt": "Pagina-einde", "toolbar": "Pagina-einde invoegen"},
"pastetext": {"button": "Plakken als platte tekst", "title": "Plakken als platte tekst"},
"pastefromword": {
"confirmCleanup": "De tekst die u wilt plakken lijkt gekopieerd te zijn vanuit Word. Wilt u de tekst opschonen voordat deze geplakt wordt?",
"error": "Het was niet mogelijk om de geplakte tekst op te schonen door een interne fout",
"title": "Plakken vanuit Word",
"toolbar": "Plakken vanuit Word"
},
"preview": {"preview": "Voorbeeld"},
"print": {"toolbar": "Afdrukken"},
"removeformat": {"toolbar": "Opmaak verwijderen"},
"save": {"toolbar": "Opslaan"},
"selectall": {"toolbar": "Alles selecteren"},
"showblocks": {"toolbar": "Toon blokken"},
"sourcearea": {"toolbar": "Broncode"},
"specialchar": {
"options": "Speciale tekens opties",
"title": "Selecteer speciaal teken",
"toolbar": "Speciaal teken invoegen"
},
"scayt": {
"btn_about": "Over SCAYT",
"btn_dictionaries": "Woordenboeken",
"btn_disable": "SCAYT uitschakelen",
"btn_enable": "SCAYT inschakelen",
"btn_langs": "Talen",
"btn_options": "Opties",
"text_title": "Controleer de spelling tijdens het typen"
},
"stylescombo": {
"label": "Stijl",
"panelTitle": "Opmaakstijlen",
"panelTitle1": "Blok stijlen",
"panelTitle2": "Inline stijlen",
"panelTitle3": "Object stijlen"
},
"table": {
"border": "Randdikte",
"caption": "Onderschrift",
"cell": {
"menu": "Cel",
"insertBefore": "Voeg cel in voor",
"insertAfter": "Voeg cel in na",
"deleteCell": "Cellen verwijderen",
"merge": "Cellen samenvoegen",
"mergeRight": "Voeg samen naar rechts",
"mergeDown": "Voeg samen naar beneden",
"splitHorizontal": "Splits cel horizontaal",
"splitVertical": "Splits cel vertikaal",
"title": "Celeigenschappen",
"cellType": "Celtype",
"rowSpan": "Rijen samenvoegen",
"colSpan": "Kolommen samenvoegen",
"wordWrap": "Automatische terugloop",
"hAlign": "Horizontale uitlijning",
"vAlign": "Verticale uitlijning",
"alignBaseline": "Tekstregel",
"bgColor": "Achtergrondkleur",
"borderColor": "Randkleur",
"data": "Gegevens",
"header": "Kop",
"yes": "Ja",
"no": "Nee",
"invalidWidth": "De celbreedte moet een getal zijn.",
"invalidHeight": "De celhoogte moet een getal zijn.",
"invalidRowSpan": "Rijen samenvoegen moet een heel getal zijn.",
"invalidColSpan": "Kolommen samenvoegen moet een heel getal zijn.",
"chooseColor": "Kies"
},
"cellPad": "Celopvulling",
"cellSpace": "Celafstand",
"column": {
"menu": "Kolom",
"insertBefore": "Voeg kolom in voor",
"insertAfter": "Voeg kolom in na",
"deleteColumn": "Kolommen verwijderen"
},
"columns": "Kolommen",
"deleteTable": "Tabel verwijderen",
"headers": "Koppen",
"headersBoth": "Beide",
"headersColumn": "Eerste kolom",
"headersNone": "Geen",
"headersRow": "Eerste rij",
"invalidBorder": "De randdikte moet een getal zijn.",
"invalidCellPadding": "Celopvulling moet een getal zijn.",
"invalidCellSpacing": "Celafstand moet een getal zijn.",
"invalidCols": "Het aantal kolommen moet een getal zijn groter dan 0.",
"invalidHeight": "De tabelhoogte moet een getal zijn.",
"invalidRows": "Het aantal rijen moet een getal zijn groter dan 0.",
"invalidWidth": "De tabelbreedte moet een getal zijn.",
"menu": "Tabeleigenschappen",
"row": {
"menu": "Rij",
"insertBefore": "Voeg rij in voor",
"insertAfter": "Voeg rij in na",
"deleteRow": "Rijen verwijderen"
},
"rows": "Rijen",
"summary": "Samenvatting",
"title": "Tabeleigenschappen",
"toolbar": "Tabel",
"widthPc": "procent",
"widthPx": "pixels",
"widthUnit": "eenheid breedte"
},
"undo": {"redo": "Opnieuw uitvoeren", "undo": "Ongedaan maken"},
"wsc": {
"btnIgnore": "Negeren",
"btnIgnoreAll": "Alles negeren",
"btnReplace": "Vervangen",
"btnReplaceAll": "Alles vervangen",
"btnUndo": "Ongedaan maken",
"changeTo": "Wijzig in",
"errorLoading": "Er is een fout opgetreden bij het laden van de dienst: %s.",
"ieSpellDownload": "De spellingscontrole is niet geïnstalleerd. Wilt u deze nu downloaden?",
"manyChanges": "Klaar met spellingscontrole: %1 woorden aangepast",
"noChanges": "Klaar met spellingscontrole: geen woorden aangepast",
"noMispell": "Klaar met spellingscontrole: geen fouten gevonden",
"noSuggestions": "- Geen suggesties -",
"notAvailable": "Excuses, deze dienst is momenteel niet beschikbaar.",
"notInDic": "Niet in het woordenboek",
"oneChange": "Klaar met spellingscontrole: één woord aangepast",
"progress": "Bezig met spellingscontrole...",
"title": "Spellingscontrole",
"toolbar": "Spellingscontrole"
}
};<|fim▁end|> | "windowMode": "Venster modus",
"windowModeOpaque": "Ondoorzichtig",
"windowModeTransparent": "Doorzichtig", |
<|file_name|>lmmanalysisave_septcut4and2ifsame.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import os,sys,glob,re
import numpy as np
import scipy
from scipy import stats
import datetime
import time
from datetime import timedelta
#import matplotlib
#matplotlib.use('Agg')
#import matplotlib.pyplot as plt
#from matplotlib import colors as c
#from matplotlib import cm
from scipy.stats.kde import gaussian_kde
from numpy import linspace
from scipy.stats import kruskal
#from scipy.stats import nanmean
#from scipy.stats import nanmedian
import pandas as pd
import statsmodels.api as sm
from scipy.stats import mstats
#freqlist = ["numberofbouts_min", "numberofbouts_10min", "dpixnumberofbouts_min", "dpixnumberofbouts_10min", "aveinterboutinterval_min", "aveinterboutinterval_10min", "avedpixinterboutinterval_min", "avedpixinterboutinterval_10min", "dpixsecpermin", "dpixminper10min", "distsecpermin", "distminper10min"]
#loclist = ["interboutcenterfrac", "interboutaverhofrac", "centerfrac", "averhofrac"]
#featlist = ["dpixavebouttime_min", "dpixavebouttime_10min", "aveboutvel_min", "aveboutvel_10min", "avebouttime_min", "avebouttime_10min", "aveboutspeed_min", "aveboutspeed_10min", "aveboutdist_min", "aveboutdist_10min", "aveboutdisp_min", "aveboutdisp_10min", "aveboutcumdpix_min", "aveboutcumdpix_10min"]
nonstimcombos = {"Frequency of movement": ["numberofbouts_min", "numberofbouts_10min", "dpixnumberofbouts_min", "dpixnumberofbouts_10min", "aveinterboutinterval_min", "aveinterboutinterval_10min", "avedpixinterboutinterval_min", "avedpixinterboutinterval_10min", "dpixsecper_min", "dpixminper_10min", "distsecper_min", "distminper_10min"], "Location in well": ["interboutcenterfrac_min", "interboutaverhofrac_min", "centerfrac_min", "averhofrac_min","interboutcenterfrac_10min", "interboutaverhofrac_10min", "centerfrac_10min", "averhofrac_10min"], "Features of movement": ["dpixavebouttime_min", "dpixavebouttime_10min", "aveboutvel_min", "aveboutvel_10min", "avebouttime_min", "avebouttime_10min", "aveboutspeed_min", "aveboutspeed_10min", "aveboutdist_min", "aveboutdist_10min", "aveboutdisp_min", "aveboutdisp_10min", "aveboutcumdpix_min", "aveboutcumdpix_10min"]}
typecombos = [["Night tap habituation", "Day tap habituation 1", "Day tap habituation 2", "Day tap habituation 3"], ["Day light flash", "Night light flash"],["Night early prepulse tap", "Day early prepulse tap"], ["Night all prepulse tap", "Day all prepulse tap"], ["Day all strong tap", "Night all strong tap"], ["Day early strong tap","Night early strong tap"],["Night early weak tap", "Day early weak tap"], ["Day all weak tap", "Night all weak tap"], ["Dark flash block 3 start","Dark flash block 3 end","Dark flash block 4 start","Dark flash block 4 end","Dark flash block 1 start","Dark flash block 1 end","Dark flash block 2 start","Dark flash block 2 end"]]
stimcombos = {
#"Day light flash and weak tap": ["106106"],
#"Night light flash and weak tap": ["night106106"],
"Night tap habituation": ["nighttaphab102", "nighttaphab1"],
"Day tap habituation 1": ["adaytaphab102", "adaytaphab1"],
"Day tap habituation 3": ["cdaytaphab102", "cdaytaphab1"],
"Day tap habituation 2": ["bdaytaphab102", "bdaytaphab1"],
"Day light flash": ["lightflash104"],
#"Day light flash": ["lightflash104", "lightflash0"],
"Night light flash": ["nightlightflash104"],
#"Night light flash": ["nightlightflash104", "nightlightflash0"],
"Night early prepulse tap": ["shortnightprepulseinhibition100b"],
#"Night early prepulse tap": ["shortnightprepulseinhibition100b", "shortnightprepulseinhibition100c"],
"Night all prepulse tap": ["nightprepulseinhibition100b"],
#"Night all prepulse tap": ["nightprepulseinhibition100b", "nightprepulseinhibition100c"],
"Day early prepulse tap": ["shortdayprepulseinhibition100b"],
#"Day early prepulse tap": ["shortdayprepulseinhibition100b", "shortdayprepulseinhibition100c"],
"Day all prepulse tap": ["dayprepulseinhibition100b"],
#"Day all prepulse tap": ["dayprepulseinhibition100b", "dayprepulseinhibition100c"],
"Day all weak tap": ["dayprepulseinhibition100a", "dayprepulseinhibition101"],
"Day early weak tap": ["shortdayprepulseinhibition100a", "shortdayprepulseinhibition101"],
"Night all weak tap": ["nightprepulseinhibition100a", "nightprepulseinhibition101"],
"Night early weak tap": ["shortnightprepulseinhibition100a", "shortnightprepulseinhibition101"],
"Day early strong tap": ["adaytappre102", "shortdayprepulseinhibition102"],
#"Day early strong tap": ["adaytappre102", "adaytappre1", "shortdayprepulseinhibition102"],
"Day all strong tap": ["dayprepulseinhibition102", "adaytappostbdaytappre102","bdaytappostcdaytappre102", "cdaytappost102"],
#"Day all strong tap": ["dayprepulseinhibition102", "adaytappostbdaytappre102","bdaytappostcdaytappre102", "bdaytappostcdaytappre1", "cdaytappost1", "cdaytappost102","adaytappostbdaytappre1"],
"Night early strong tap": ["nighttappre102"],
#"Night early strong tap": ["nighttappre1", "nighttappre102"],
"Night all strong tap": ["nightprepulseinhibition102","nighttappost102"],
#"Night all strong tap": ["nightprepulseinhibition102","nighttappost102", "nighttappost1"],
#"Dark flash all blocks": ["darkflash103", "darkflash0"],
"Dark flash block 3 start": ["cdarkflash103"],
"Dark flash block 3 end": ["c2darkflash103"],
"Dark flash block 1 start": ["adarkflash103"],
"Dark flash block 1 end": ["a2darkflash103"],
"Dark flash block 2 start": ["bdarkflash103"],
"Dark flash block 2 end": ["b2darkflash103"],
"Dark flash block 4 start": ["ddarkflash103"],
"Dark flash block 4 end": ["d2darkflash103"]}
# "Dark flash block 3 start": ["cdarkflash103", "cdarkflash0"],
# "Dark flash block 3 end": ["c2darkflash103", "c2darkflash0"],
# "Dark flash block 1 start": ["adarkflash103", "adarkflash0"],
# "Dark flash block 1 end": ["a2darkflash103", "a2darkflash0"],
# "Dark flash block 2 start": ["bdarkflash103", "bdarkflash0"],
# "Dark flash block 2 end": ["b2darkflash103", "b2darkflash0"],
# "Dark flash block 4 start": ["ddarkflash103", "ddarkflash0"],
# "Dark flash block 4 end": ["d2darkflash103", "d2darkflash0"]}
#direction = {
# "aveboutspeed": 1
# "aveboutspeed": 1
# ones that are opposite of expected
# fullboutdatamaxloc (max peak location (larger is less strong of response))
# latency (longer is less good), similar to max peak
# aveinterboutinterval
# rho or centerfrac, not sure which orientation would want
# make wall-hugging positive
# lower centerfrac means more positive, which is how it is right now I think, yes, so if I default everything to switching signs, then averhofrac is the odd one out and should be skipped
# for most, larger should mean - and should mean mutant is stronger response or more movement
# need to make most into opposite
# standard
# cumdpix, displacement, distance, speed, velocity, secpermin, numberofbouts, frequency of response, polygonarea
# unsure - fullboutdata as done with linear model, and also the dark flash ones done with linear model
#}
direction_swaps = ["rhofrac", "latency", "interboutinterval", "fullboutdatamaxloc"]
for file in glob.glob("*linearmodel*"): # THIS IS WHAT THE PRINT OUTPUT MUST POINT TO, CAN HAVE SOMETHING AT END, BUT MUST START THIS WAY
if "finalsorted" in file:
continue
dir = os.path.basename(os.path.dirname(os.path.realpath(__file__)))
ffile = open('finalsortedupdatedCP4or2_' + file + "_" + dir, 'w')
ofile = open(file, 'r')
lines = ofile.readlines()
pdict = {}
for line in lines:
# anova data
if line.startswith("anova:"):
pval = line.split(":")[3].strip().split()[3].strip()
#anova: ribgraph_mean_ribbon_latencyresponse_dpix_nighttappost102.png : Mean of array wt, mut, H-stat, P-value: 25.8557471264 21.4177419355 2.63243902441 0.104700765405
meanwtminmut = float(line.split(":")[3].strip().split()[0]) - float(line.split(":")[3].strip().split()[1])
name = line.split(":")[1].strip()
pdict[name] = [pval, meanwtminmut]
# ffile.write(str(pval))
# ffile.write(', ')
# ffile.write(str(meanwtminmut))
# ffile.write(', ')
# ffile.write(name.strip())
# ffile.write('\n')
# linear mixed model data - this formatting could change if I change the linear model I'm using
else:
list = []
for line in range(0, len(lines)):
#print lines[line]
if lines[line].startswith("mutornot[T.wt] "):
#print lines[line]
if len(lines[line].split()) > 3:
pvalue = lines[line].split()[4]
coef = lines[line].split()[1]
if float(pvalue) == 0:
pvalue = 0.001
list.append((float(pvalue), float(coef), lines[line-13].strip()))
#list.append((float(pvalue), lines[line-13].strip(), lines[line].split()[1:6]))
# list2 = sorted(list, key=lambda x: x[0])
for fline in list:
#pdict[str(fline[2])] = (str(fline[0])[:8], str(fline[1])[:8])
pdict[str(fline[2])] = [str(fline[0])[:8], str(fline[1])[:8]]
#ffile.write(str(fline[0])[:8])
#ffile.write(', ')
#ffile.write(str(fline[1])[:8])
#ffile.write(', ')
#ffile.write(str(fline[2]))
#ffile.write('\n')
splitdict = {}
for k in pdict:
# k = ribgraph_mean_ribbonbout_dpixavebouttime_min_day1taps.png
# section = day1taps<|fim▁hole|> for k2 in nonstimcombos.keys():
# k2 = "Frequency of movement"
for v2 in nonstimcombos[k2]:
# v2 = numberofbouts_min
if v2 in k:
test = False
for k3 in splitdict.keys():
if (k2 + " " + section) == k3:
test = True
if test == False:
splitdict[k2 + " " + section] = []
splitdict[k2 + " " + section].append([k,pdict[k]])
else:
splitdict[k2 + " " + section].append([k,pdict[k]])
break
for sk2 in stimcombos.keys():
# sk2 = "Night light flash"
for sv2 in stimcombos[sk2]:
# sv2 = nightlightflash104
if sv2 == k.split('.')[0].split('_')[-1]:
# combining everything for these stimuli responses
test = False
for sk3 in splitdict.keys():
if sk2 == sk3:
test = True
if test == False:
splitdict[sk2] = []
splitdict[sk2].append([k,pdict[k]])
else:
splitdict[sk2].append([k,pdict[k]])
break
for skey in splitdict.keys():
lowest = 10
listints = []
cutpoint = 0.05
cutpointnumber = 3
if skey in stimcombos.keys():
cutpointnumber = 4
else:
cutpointnumber = 3
cutlist = []
for t in typecombos:
for tt in t:
if skey == tt:
#cutpointnumber = 4
#print "TEST", skey, t
import copy
shortt = copy.copy(t)
shortt.remove(tt)
#print shortt
for svey0 in splitdict[skey]:
if abs(float(svey0[1][0])) < cutpoint:
if "bigmovesribgraph_mean_ribbon_freqresponse_dpix_" in svey0[0] and "100b.png" in svey0[0]:
cutpointnumber = 0
#print "testing1 ", skey, svey0
for ttt in shortt:
for tsvey in splitdict[ttt]:
#print "testing3", ttt, tsvey
if '_'.join(svey0[0].split('.')[0].split('_')[:-1]) == '_'.join(tsvey[0].split('.')[0].split('_')[:-1]):
#print "testing4", ttt, tsvey, '_'.join(svey0[0].split('.')[0].split('_')[:-1]), '_'.join(tsvey[0].split('.')[0].split('_')[:-1])
if abs(float(tsvey[1][0])) < cutpoint:
#print "testing5", tsvey
cutpointnumber = 2
break
for svey in splitdict[skey]:
switch = False
for x in direction_swaps:
if x in svey[0]:
switch = True
if switch == False:
if float(svey[1][1]) > 0:
# change the sign of the original data
# if wt is moving more than mutant (>0), want signs swapped so mutant is over wt (ie, mutant moving less than wt has - number)
svey[1][0] = float(svey[1][0]) * -1
# else, data is fine as is
else: # switch == True
# in the cases where a switch is needed for the sign (such as interboutinterval because it's opposite when considering frequency)
if float(svey[1][1]) < 0: # if wt has greater interboutinterval and then the number is positive (ie, mutant moves more), don't swap, do swap if <
# change the sign of the original data
svey[1][0] = float(svey[1][0]) * -1
#lowest = 10
#listints = []
#cutpoint = 0.05
#cutpointnumber = 3
#cutlist = []
for svey in splitdict[skey]:
#print skey, svey
listints.append(float(svey[1][0]))
if abs(float(svey[1][0])) < abs(lowest):
lowest = float(svey[1][0])
if abs(float(svey[1][0])) < cutpoint:
cutlist.append(float(svey[1][0]))
ave = np.mean(np.absolute(np.asarray(listints)))
if lowest < 0:
ave = ave * -1
if len(cutlist) > cutpointnumber:
cutave = np.mean(np.absolute(np.asarray(cutlist)))
if lowest < 0:
cutave = cutave * -1
else:
cutave = ave
ffile.write("Lowest ")
ffile.write(skey)
ffile.write(": ")
ffile.write(str(lowest))
ffile.write('\n')
ffile.write("Average ")
ffile.write(skey)
ffile.write(": ")
ffile.write(str(ave))
ffile.write('\n')
ffile.write("Lowaverage (reg if not >")#3, <0.05) ")
ffile.write(str(cutpointnumber))
ffile.write(", <0.05) ")
ffile.write(skey)
ffile.write(": ")
ffile.write(str(cutave))
ffile.write('\n')
for svey in splitdict[skey]:
ffile.write(str(svey[0]))
ffile.write(', ')
ffile.write(str(svey[1][0]))
ffile.write(', ')
ffile.write(str(svey[1][1]))
ffile.write('\n')
#print splitdict
#ffile.write(k)
#ffile.write(', ')
#ffile.write(str(pdict[k][0]))
#ffile.write(', ')
#ffile.write(str(pdict[k][1]))
#ffile.write('\n')<|fim▁end|> | # or section = adaytappostbdaytappre102
if k.startswith("ratio"):
continue
section = k.split('.')[0].split('_')[-1] |
<|file_name|>control.component.ts<|end_file_name|><|fim▁begin|>import { Component, ElementRef, ChangeDetectionStrategy, ChangeDetectorRef, OnDestroy, OnInit } from '@angular/core';
import { Control } from 'ol/control';
import { MapComponent } from './map.component';
@Component({
selector: 'ol-map > ol-control',
template: '<ng-content></ng-content>',
changeDetection: ChangeDetectionStrategy.OnPush
})
export class ControlComponent implements OnInit, OnDestroy {
protected control: Control;
public element: HTMLElement;
constructor(
protected changeDetectorRef: ChangeDetectorRef,
protected elementRef: ElementRef,
protected mapComponent: MapComponent
) {
this.changeDetectorRef.detach();<|fim▁hole|> if(this.elementRef.nativeElement) {
this.element = this.elementRef.nativeElement;
this.control = new Control({element: this.element});
this.mapComponent.getMap().addControl(this.control);
}
}
ngOnDestroy() {
if (this.control) {
this.mapComponent.getMap().removeControl(this.control);
this.control = null;
}
}
}<|fim▁end|> | }
ngOnInit() { |
<|file_name|>20170106.py<|end_file_name|><|fim▁begin|>a = "python"
print(a*2)
try:
print(a[-10])
except IndexError as e:
print("인덱스 범위를 초과 했습니다.")
print(e)
print(a[0:4])
print(a[1:-2])
# -10은 hi뒤로 10칸
print("%-10sjane." % "hi")
b = "Python is best choice."
print(b.find("b"))
print(b.find("B"))
try:
print(b.index("B"))
except ValueError as e:
print(e)
c = "hi"
print(c.upper())
a = " hi"
print("kk",a.lstrip())<|fim▁hole|>a = " hi "
print(a.strip())<|fim▁end|> | |
<|file_name|>Q424.py<|end_file_name|><|fim▁begin|>a=['999','1']
max_length=len(max(a,key=len))
tem=0
carry=0
for i in xrange(len(a)):
while len(a[i])<max_length:
a[i]='0'+a[i]
for x in xrange(len(a)):<|fim▁hole|> a[x]=a[x][::-1]
out=''
for x in xrange(max_length):
for i in a:
tem+=int(i[x])
print(i[x],carry)
tem+=carry
carry=0
print(tem)
if tem>9:
carry=tem/10
out=str(tem%10)+out
tem=0
if carry > 0:
out=str(carry)+out
print (out)<|fim▁end|> | |
<|file_name|>textattributes.rs<|end_file_name|><|fim▁begin|>// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by<|fim▁hole|>//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! GtkTextTag — A tag that can be applied to text in a GtkTextBuffer
use gtk::ffi;
pub struct TextAttributes {
pointer: *mut ffi::C_GtkTextAttributes
}
impl TextAttributes {
pub fn new() -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_new() };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_copy(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy_values_from(&self, src: &TextAttributes) {
unsafe { ffi::gtk_text_attributes_copy_values(src.pointer, self.pointer) }
}
pub fn unref(&self) {
unsafe { ffi::gtk_text_attributes_unref(self.pointer) }
}
pub fn _ref(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_ref(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
}
impl_GObjectFunctions!(TextAttributes, C_GtkTextAttributes)<|fim▁end|> | // the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version. |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var path = require('path');
var async = require('async');
module.exports = function(content) {
var cb = this.async();
var json = JSON.parse(content);
async.mapSeries(
json.imports,
function(url, callback) {
this.loadModule(url, function(err, source, map, module) {
if (err) {
return callback(err);
}<|fim▁hole|> if (err) {
return cb(err);
}
// Combine all the results into one object and return it
cb(null, 'module.exports = ' + JSON.stringify(results.reduce(function(prev, result) {
return Object.assign({}, prev, result);
}, json)));
}
);
}<|fim▁end|> | callback(null, this.exec(source, url));
}.bind(this))
}.bind(this),
function(err, results) { |
<|file_name|>demo_CH_coords.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------<|fim▁hole|>#
# Created: 1/01/2019
# Copyright: (c) ProjectChrono 2019
#------------------------------------------------------------------------------
print ("First tutorial for PyChrono: vectors, matrices etc.");
# Load the Chrono::Engine core module!
import pychrono as chrono
try:
import numpy as np
from numpy import linalg as LA
except ImportError:
print("You need NumPyto run this demo!")
# Test logging
chrono.GetLog().Bar()
chrono.GetLog() << "result is: " << 11+1.5 << "\n"
chrono.GetLog().Bar()
# Test vectors
my_vect1 = chrono.ChVectorD()
my_vect1.x=5
my_vect1.y=2
my_vect1.z=3
my_vect2 = chrono.ChVectorD(3,4,5)
my_vect4 = my_vect1*10 + my_vect2
my_len = my_vect4.Length()
print ('vect sum =', my_vect1 + my_vect2)
print ('vect cross =', my_vect1 % my_vect2)
print ('vect dot =', my_vect1 ^ my_vect2)
# Test quaternions
my_quat = chrono.ChQuaternionD(1,2,3,4)
my_qconjugate = ~my_quat
print ('quat. conjugate =', my_qconjugate)
print ('quat. dot product=', my_qconjugate ^ my_quat)
print ('quat. product=', my_qconjugate % my_quat)
# Test matrices and NumPy interoperability
mlist = [[1,2,3,4], [5,6,7,8], [9,10,11,12], [13,14,15,16]]
ma = chrono.ChMatrixDynamicD()
ma.SetMatr(mlist) # Create a Matrix from a list. Size is adjusted automatically.
npmat = np.asarray(ma.GetMatr()) # Create a 2D npy array from the list extracted from ChMatrixDynamic
w, v = LA.eig(npmat) # get eigenvalues and eigenvectors using numpy
mb = chrono.ChMatrixDynamicD(4,4)
prod = v * npmat # you can perform linear algebra operations with numpy and then feed results into a ChMatrixDynamicD using SetMatr
mb.SetMatr(v.tolist()) # create a ChMatrixDynamicD from the numpy eigenvectors
mr = chrono.ChMatrix33D()
mr.SetMatr([[1,2,3], [4,5,6], [7,8,9]])
print (mr*my_vect1);
# Test frames -
# create a frame representing a translation and a rotation
# of 20 degrees on X axis
my_frame = chrono.ChFrameD(my_vect2, chrono.Q_from_AngAxis(20*chrono.CH_C_DEG_TO_RAD, chrono.ChVectorD(1,0,0)))
my_vect5 = my_vect1 >> my_frame
# Print the class hierarchy of a chrono class
import inspect
inspect.getmro(chrono.ChStreamOutAsciiFile)
# Use the ChFunction classes
my_funct = chrono.ChFunction_Sine(0,0.5,3)
print ('function f(0.2)=', my_funct.Get_y(0.2) )
# Inherit from the ChFunction, from the Python side,
# (do not forget the __init__ constructor)
class MySquareFunct (chrono.ChFunction):
def __init__(self):
chrono.ChFunction.__init__(self)
def Get_y(self,x):
return x*x
my_funct2 = MySquareFunct()
print ('function f(2) =', my_funct2.Get_y(3) )
print ('function df/dx=', my_funct2.Get_y_dx(3) )<|fim▁end|> | # Name: pychrono example
# Purpose:
#
# Author: Alessandro Tasora |
<|file_name|>ssupplier.js<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2013 Emay Komarudin
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* @author Emay Komarudin
*<|fim▁hole|>
Ext.define('App.store.PO.ssupplier', {
extend: 'Ext.data.Store',
fields: [
'id',
'address',
"email",
"fax",
"phone",
"name"
],
proxy: {
type: 'rest',
url: getApiUrl() + '/suppliers',
reader: {
type: 'json',
root: 'results',
totalProperty: 'total'
}
}
});<|fim▁end|> | **/
|
<|file_name|>alert.js<|end_file_name|><|fim▁begin|>angular.module('ui.bootstrap.alert', [])
.controller('UibAlertController', ['$scope', '$attrs', '$timeout', function($scope, $attrs, $timeout) {
$scope.closeable = !!$attrs.close;
if (angular.isDefined($attrs.dismissOnTimeout)) {
$timeout(function() {
$scope.close();
}, parseInt($attrs.dismissOnTimeout, 10));
}
}])
.directive('uibAlert', function() {<|fim▁hole|> return attrs.templateUrl || 'template/alert/alert.html';
},
transclude: true,
replace: true,
scope: {
type: '@',
close: '&'
}
};
});
/* Deprecated alert below */
angular.module('ui.bootstrap.alert')
.value('$alertSuppressWarning', false)
.controller('AlertController', ['$scope', '$attrs', '$controller', '$log', '$alertSuppressWarning', function($scope, $attrs, $controller, $log, $alertSuppressWarning) {
if (!$alertSuppressWarning) {
$log.warn('AlertController is now deprecated. Use UibAlertController instead.');
}
angular.extend(this, $controller('UibAlertController', {
$scope: $scope,
$attrs: $attrs
}));
}])
.directive('alert', ['$log', '$alertSuppressWarning', function($log, $alertSuppressWarning) {
return {
controller: 'AlertController',
controllerAs: 'alert',
templateUrl: function(element, attrs) {
return attrs.templateUrl || 'template/alert/alert.html';
},
transclude: true,
replace: true,
scope: {
type: '@',
close: '&'
},
link: function() {
if (!$alertSuppressWarning) {
$log.warn('alert is now deprecated. Use uib-alert instead.');
}
}
};
}]);<|fim▁end|> | return {
controller: 'UibAlertController',
controllerAs: 'alert',
templateUrl: function(element, attrs) { |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React, {Component, PropTypes} from 'react'
import style from './style.js'
import ErrorStackParser from 'error-stack-parser'
import assign from 'object-assign'
import {isFilenameAbsolute, makeUrl, makeLinkText} from './lib'
export default class RedBox extends Component {
static propTypes = {
error: PropTypes.instanceOf(Error).isRequired,
filename: PropTypes.string,
editorScheme: PropTypes.string,
useLines: PropTypes.bool,
useColumns: PropTypes.bool
}
static displayName = 'RedBox'
static defaultProps = {
useLines: true,
useColumns: true
}
render () {
const {error, filename, editorScheme, useLines, useColumns} = this.props
const {redbox, message, stack, frame, file, linkToFile} = assign({}, style, this.props.style)
const frames = ErrorStackParser.parse(error).map((f, index) => {
let text
let url
if (index === 0 && filename && !isFilenameAbsolute(f.fileName)) {
url = makeUrl(filename, editorScheme)
text = makeLinkText(filename)
} else {
let lines = useLines ? f.lineNumber : null
let columns = useColumns ? f.columnNumber : null
url = makeUrl(f.fileName, editorScheme, lines, columns)
text = makeLinkText(f.fileName, lines, columns)
}
return (
<div style={frame} key={index}>
<div>{f.functionName}</div>
<div style={file}>
<a href={url} style={linkToFile}>{text}</a>
</div><|fim▁hole|> return (
<div style={redbox}>
<div style={message}>{error.name}: {error.message}</div>
<div style={stack}>{frames}</div>
</div>
)
}
}<|fim▁end|> | </div>
)
}) |
<|file_name|>a301_readfile.py<|end_file_name|><|fim▁begin|>"""
download a file named filename from the atsc301 downloads directory
and save it as a local file with the same name.
command line example::
python -m a301utils.a301_readfile photon_data.csv
module example::
from a301utils.a301_readfile import download
download('photon_data.csv')
"""
import argparse
import requests
from pathlib import Path
import sys
import os
import shutil
def download(filename):
"""
copy file filename from http://clouds.eos.ubc.ca/~phil/courses/atsc301/downloads to
the local directory
Parameters
----------
filename: string
name of file to fetch from
Returns
-------
Side effect: Creates a copy of that file in the local directory
"""
url = 'https://clouds.eos.ubc.ca/~phil/courses/atsc301/downloads/{}'.format(filename)<|fim▁hole|> the_size = filepath.stat().st_size
print(('\n{} already exists\n'
'and is {} bytes\n'
'will not overwrite\n').format(filename,the_size))
return None
tempfile = str(filepath) + '_tmp'
temppath = Path(tempfile)
with open(tempfile, 'wb') as localfile:
response = requests.get(url, stream=True)
if not response.ok:
print('response: ',response)
raise Exception('Something is wrong, requests.get() failed with filename {}'.format(filename))
for block in response.iter_content(1024):
if not block:
break
localfile.write(block)
the_size=temppath.stat().st_size
if the_size < 10.e3:
print('Warning -- your file is tiny (smaller than 10 Kbyte)\nDid something go wrong?')
shutil.move(tempfile,filename)
the_size=filepath.stat().st_size
print('downloaded {}\nsize = {}'.format(filename,the_size))
return None
if __name__ == "__main__":
linebreaks=argparse.RawTextHelpFormatter
descrip=__doc__.lstrip()
parser = argparse.ArgumentParser(formatter_class=linebreaks,description=descrip)
parser.add_argument('filename',type=str,help='name of file to download')
args=parser.parse_args()
download(args.filename)<|fim▁end|> | filepath = Path('./{}'.format(filename))
if filepath.exists(): |
<|file_name|>list-nodes.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# LIST NODES PY
# Extract just the nodes from the JSON file for human inspection
import argparse, json
parser = argparse.ArgumentParser()
parser.add_argument('plan', type=str, help='Plan data file')
args = parser.parse_args()
<|fim▁hole|> J = json.load(fp)
except Exception as e:
print("could not read JSON in file: %s\n" % args.plan + str(e))
exit(1)
for k in J.keys():
print(k)<|fim▁end|> | try:
with open(args.plan) as fp: |
<|file_name|>test_integration_tester.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
"""
Functional tests of the RabbitMQ Workers
"""
import mock
import json
import unittest
import ADSDeploy.app as app
from ADSDeploy.pipeline.workers import IntegrationTestWorker, \
DatabaseWriterWorker
from ADSDeploy.webapp.views import MiniRabbit
from ADSDeploy.models import Base, Deployment
RABBITMQ_URL = 'amqp://guest:[email protected]:6672/adsdeploy_test?' \
'socket_timeout=10&backpressure_detection=t'
class TestIntegrationTestWorker(unittest.TestCase):
"""
Tests the functionality of the Integration Worker
"""
def setUp(self):
# Create queue
with MiniRabbit(RABBITMQ_URL) as w:
w.make_queue('in', exchange='test')
w.make_queue('out', exchange='test')
w.make_queue('database', exchange='test')
# Create database
app.init_app({
'SQLALCHEMY_URL': 'sqlite://',
'SQLALCHEMY_ECHO': False,
})
Base.metadata.bind = app.session.get_bind()
Base.metadata.create_all()
self.app = app
def tearDown(self):
# Destroy queue
with MiniRabbit(RABBITMQ_URL) as w:
w.delete_queue('in', exchange='test')
w.delete_queue('out', exchange='test')
w.delete_queue('database', exchange='test')
# Destroy database
Base.metadata.drop_all()
self.app.close_app()
@mock.patch('ADSDeploy.pipeline.integration_tester.IntegrationTestWorker.run_test')
def test_workflow_of_integration_worker(self, mock_run_test):
"""
General work flow of the integration worker from receiving a packet,
to finishing with a packet.
"""
# Worker receives a packet, most likely from the deploy worker
# Example packet:
#
# {
# 'application': 'staging',
# 'service': 'adsws',
# 'release': '',
# 'config': {},
# }
#
#
example_packet = {
'application': 'staging',
'service': 'adsws',
'version': 'v1.0.0',
'config': {},
'action': 'test'
}
expected_packet = example_packet.copy()
expected_packet['tested'] = True
# Override the run test returned value. This means the logic of the test
# does not have to be mocked
mock_run_test.return_value = expected_packet
with MiniRabbit(RABBITMQ_URL) as w:
w.publish(route='in', exchange='test', payload=json.dumps(example_packet))
# Worker runs the tests
params = {
'RABBITMQ_URL': RABBITMQ_URL,
'exchange': 'test',
'subscribe': 'in',
'publish': 'out',<|fim▁hole|> 'status': 'database',
'TEST_RUN': True
}
test_worker = IntegrationTestWorker(params=params)
test_worker.run()
test_worker.connection.close()
# Worker sends a packet to the next worker
with MiniRabbit(RABBITMQ_URL) as w:
m_in = w.message_count(queue='in')
m_out = w.message_count(queue='out')
p = w.get_packet(queue='out')
self.assertEqual(m_in, 0)
self.assertEqual(m_out, 1)
# Remove values that are not in the starting packet
self.assertTrue(p.pop('tested'))
self.assertEqual(
p,
example_packet
)
@mock.patch('ADSDeploy.pipeline.integration_tester.IntegrationTestWorker.run_test')
def test_db_writes_on_test_pass(self, mocked_run_test):
"""
Check that the database is being written to when a test passes
"""
# Stub data
packet = {
'application': 'adsws',
'environment': 'staging',
'version': 'v1.0.0',
}
expected_packet = packet.copy()
expected_packet['tested'] = True
mocked_run_test.return_value = expected_packet
# Start the IntegrationTester worker
params = {
'RABBITMQ_URL': RABBITMQ_URL,
'exchange': 'test',
'subscribe': 'in',
'publish': 'out',
'status': 'database',
'TEST_RUN': True
}
# Push to rabbitmq
with MiniRabbit(RABBITMQ_URL) as w:
w.publish(route='in', exchange='test', payload=json.dumps(packet))
test_worker = IntegrationTestWorker(params=params)
test_worker.run()
test_worker.connection.close()
# Assert there is a packet on the publish queue
with MiniRabbit(RABBITMQ_URL) as w:
self.assertEqual(w.message_count('out'), 1)
self.assertEqual(w.message_count('database'), 1)
# Start the DB Writer worker
params = {
'RABBITMQ_URL': RABBITMQ_URL,
'exchange': 'test',
'subscribe': 'database',
'TEST_RUN': True
}
db_worker = DatabaseWriterWorker(params=params)
db_worker.app = self.app
db_worker.run()
db_worker.connection.close()
with self.app.session_scope() as session:
all_deployments = session.query(Deployment).all()
self.assertEqual(
len(all_deployments),
1,
msg='More (or less) than 1 deployment entry: {}'
.format(all_deployments)
)
deployment = all_deployments[0]
for key in packet:
self.assertEqual(
packet[key],
getattr(deployment, key)
)
self.assertEqual(deployment.tested, True)<|fim▁end|> | |
<|file_name|>ss_3par_cpg.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright: (c) 2018, Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
short_description: Manage HPE StoreServ 3PAR CPG
author:
- Farhan Nomani (@farhan7500)
- Gautham P Hegde (@gautamphegde)
description:
- Create and delete CPG on HPE 3PAR.
module: ss_3par_cpg
options:
cpg_name:
description:
- Name of the CPG.
required: true
disk_type:
choices:
- FC
- NL
- SSD
description:
- Specifies that physical disks must have the specified device type.
domain:
description:
- Specifies the name of the domain in which the object will reside.
growth_increment:
description:
- Specifies the growth increment(in MiB, GiB or TiB) the amount of logical disk storage
created on each auto-grow operation.
growth_limit:
description:
- Specifies that the autogrow operation is limited to the specified
storage amount that sets the growth limit(in MiB, GiB or TiB).
growth_warning:
description:
- Specifies that the threshold(in MiB, GiB or TiB) of used logical disk space when exceeded
results in a warning alert.
high_availability:
choices:
- PORT
- CAGE
- MAG
description:
- Specifies that the layout must support the failure of one port pair,
one cage, or one magazine.
raid_type:
choices:
- R0
- R1
- R5
- R6
description:
- Specifies the RAID type for the logical disk.
set_size:<|fim▁hole|> state:
choices:
- present
- absent
description:
- Whether the specified CPG should exist or not.
required: true
secure:
description:
- Specifies whether the certificate needs to be validated while communicating.
type: bool
default: no
extends_documentation_fragment: hpe3par
version_added: 2.8
'''
EXAMPLES = r'''
- name: Create CPG sample_cpg
ss_3par_cpg:
storage_system_ip: 10.10.10.1
storage_system_username: username
storage_system_password: password
state: present
cpg_name: sample_cpg
domain: sample_domain
growth_increment: 32000 MiB
growth_limit: 64000 MiB
growth_warning: 48000 MiB
raid_type: R6
set_size: 8
high_availability: MAG
disk_type: FC
secure: no
- name: Delete CPG sample_cpg
ss_3par_cpg:
storage_system_ip: 10.10.10.1
storage_system_username: username
storage_system_password: password
state: absent
cpg_name: sample_cpg
secure: no
'''
RETURN = r'''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.storage.hpe3par import hpe3par
try:
from hpe3par_sdk import client
from hpe3parclient import exceptions
HAS_3PARCLIENT = True
except ImportError:
HAS_3PARCLIENT = False
def validate_set_size(raid_type, set_size):
if raid_type:
set_size_array = client.HPE3ParClient.RAID_MAP[raid_type]['set_sizes']
if set_size in set_size_array:
return True
return False
def cpg_ldlayout_map(ldlayout_dict):
if ldlayout_dict['RAIDType'] is not None and ldlayout_dict['RAIDType']:
ldlayout_dict['RAIDType'] = client.HPE3ParClient.RAID_MAP[
ldlayout_dict['RAIDType']]['raid_value']
if ldlayout_dict['HA'] is not None and ldlayout_dict['HA']:
ldlayout_dict['HA'] = getattr(
client.HPE3ParClient, ldlayout_dict['HA'])
return ldlayout_dict
def create_cpg(
client_obj,
cpg_name,
domain,
growth_increment,
growth_limit,
growth_warning,
raid_type,
set_size,
high_availability,
disk_type):
try:
if not validate_set_size(raid_type, set_size):
return (False, False, "Set size %s not part of RAID set %s" % (set_size, raid_type))
if not client_obj.cpgExists(cpg_name):
ld_layout = dict()
disk_patterns = []
if disk_type:
disk_type = getattr(client.HPE3ParClient, disk_type)
disk_patterns = [{'diskType': disk_type}]
ld_layout = {
'RAIDType': raid_type,
'setSize': set_size,
'HA': high_availability,
'diskPatterns': disk_patterns}
ld_layout = cpg_ldlayout_map(ld_layout)
if growth_increment is not None:
growth_increment = hpe3par.convert_to_binary_multiple(
growth_increment)
if growth_limit is not None:
growth_limit = hpe3par.convert_to_binary_multiple(
growth_limit)
if growth_warning is not None:
growth_warning = hpe3par.convert_to_binary_multiple(
growth_warning)
optional = {
'domain': domain,
'growthIncrementMiB': growth_increment,
'growthLimitMiB': growth_limit,
'usedLDWarningAlertMiB': growth_warning,
'LDLayout': ld_layout}
client_obj.createCPG(cpg_name, optional)
else:
return (True, False, "CPG already present")
except exceptions.ClientException as e:
return (False, False, "CPG creation failed | %s" % (e))
return (True, True, "Created CPG %s successfully." % cpg_name)
def delete_cpg(
client_obj,
cpg_name):
try:
if client_obj.cpgExists(cpg_name):
client_obj.deleteCPG(cpg_name)
else:
return (True, False, "CPG does not exist")
except exceptions.ClientException as e:
return (False, False, "CPG delete failed | %s" % e)
return (True, True, "Deleted CPG %s successfully." % cpg_name)
def main():
module = AnsibleModule(argument_spec=hpe3par.cpg_argument_spec(),
required_together=[['raid_type', 'set_size']])
if not HAS_3PARCLIENT:
module.fail_json(msg='the python hpe3par_sdk library is required (https://pypi.org/project/hpe3par_sdk)')
if len(module.params["cpg_name"]) < 1 or len(module.params["cpg_name"]) > 31:
module.fail_json(msg="CPG name must be at least 1 character and not more than 31 characters")
storage_system_ip = module.params["storage_system_ip"]
storage_system_username = module.params["storage_system_username"]
storage_system_password = module.params["storage_system_password"]
cpg_name = module.params["cpg_name"]
domain = module.params["domain"]
growth_increment = module.params["growth_increment"]
growth_limit = module.params["growth_limit"]
growth_warning = module.params["growth_warning"]
raid_type = module.params["raid_type"]
set_size = module.params["set_size"]
high_availability = module.params["high_availability"]
disk_type = module.params["disk_type"]
secure = module.params["secure"]
wsapi_url = 'https://%s:8080/api/v1' % storage_system_ip
try:
client_obj = client.HPE3ParClient(wsapi_url, secure)
except exceptions.SSLCertFailed:
module.fail_json(msg="SSL Certificate Failed")
except exceptions.ConnectionError:
module.fail_json(msg="Connection Error")
except exceptions.UnsupportedVersion:
module.fail_json(msg="Unsupported WSAPI version")
except Exception as e:
module.fail_json(msg="Initializing client failed. %s" % e)
if storage_system_username is None or storage_system_password is None:
module.fail_json(msg="Storage system username or password is None")
if cpg_name is None:
module.fail_json(msg="CPG Name is None")
# States
if module.params["state"] == "present":
try:
client_obj.login(storage_system_username, storage_system_password)
return_status, changed, msg = create_cpg(
client_obj,
cpg_name,
domain,
growth_increment,
growth_limit,
growth_warning,
raid_type,
set_size,
high_availability,
disk_type
)
except Exception as e:
module.fail_json(msg="CPG create failed | %s" % e)
finally:
client_obj.logout()
elif module.params["state"] == "absent":
try:
client_obj.login(storage_system_username, storage_system_password)
return_status, changed, msg = delete_cpg(
client_obj,
cpg_name
)
except Exception as e:
module.fail_json(msg="CPG create failed | %s" % e)
finally:
client_obj.logout()
if return_status:
module.exit_json(changed=changed, msg=msg)
else:
module.fail_json(msg=msg)
if __name__ == '__main__':
main()<|fim▁end|> | description:
- Specifies the set size in the number of chunklets. |
<|file_name|>enable_feature_flag_test.go<|end_file_name|><|fim▁begin|>package featureflag_test
import (
"errors"
fakeflag "github.com/cloudfoundry/cli/cf/api/feature_flags/fakes"
"github.com/cloudfoundry/cli/cf/command_registry"
"github.com/cloudfoundry/cli/cf/configuration/core_config"
testcmd "github.com/cloudfoundry/cli/testhelpers/commands"
testconfig "github.com/cloudfoundry/cli/testhelpers/configuration"
. "github.com/cloudfoundry/cli/testhelpers/matchers"
testreq "github.com/cloudfoundry/cli/testhelpers/requirements"
testterm "github.com/cloudfoundry/cli/testhelpers/terminal"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("enable-feature-flag command", func() {
var (
ui *testterm.FakeUI
requirementsFactory *testreq.FakeReqFactory
configRepo core_config.Repository
flagRepo *fakeflag.FakeFeatureFlagRepository
deps command_registry.Dependency
)
updateCommandDependency := func(pluginCall bool) {
deps.Ui = ui
deps.RepoLocator = deps.RepoLocator.SetFeatureFlagRepository(flagRepo)
deps.Config = configRepo
command_registry.Commands.SetCommand(command_registry.Commands.FindCommand("enable-feature-flag").SetDependency(deps, pluginCall))
}
BeforeEach(func() {
ui = &testterm.FakeUI{}
configRepo = testconfig.NewRepositoryWithDefaults()
requirementsFactory = &testreq.FakeReqFactory{LoginSuccess: true}
flagRepo = &fakeflag.FakeFeatureFlagRepository{}
})
runCommand := func(args ...string) bool {
return testcmd.RunCliCommand("enable-feature-flag", args, requirementsFactory, updateCommandDependency, false)
}
Describe("requirements", func() {
It("requires the user to be logged in", func() {
requirementsFactory.LoginSuccess = false
Expect(runCommand()).ToNot(HavePassedRequirements())
})
It("fails with usage if a single feature is not specified", func() {
runCommand()
Expect(ui.Outputs).To(ContainSubstrings(
[]string{"Incorrect Usage", "Requires an argument"},
))
})
})
Describe("when logged in", func() {
BeforeEach(func() {
flagRepo.UpdateReturns(nil)<|fim▁hole|> runCommand("user_org_creation")
flag, set := flagRepo.UpdateArgsForCall(0)
Expect(flag).To(Equal("user_org_creation"))
Expect(set).To(BeTrue())
Expect(ui.Outputs).To(ContainSubstrings(
[]string{"Setting status of user_org_creation as my-user..."},
[]string{"OK"},
[]string{"Feature user_org_creation Enabled."},
))
})
Context("when an error occurs", func() {
BeforeEach(func() {
flagRepo.UpdateReturns(errors.New("An error occurred."))
})
It("fails with an error", func() {
runCommand("i-dont-exist")
Expect(ui.Outputs).To(ContainSubstrings(
[]string{"FAILED"},
[]string{"An error occurred."},
))
})
})
})
})<|fim▁end|> | })
It("Sets the flag", func() { |
<|file_name|>BaseModels.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
Created on Mar 12, 2012
@author: moloch
Copyright 2012
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import re
from uuid import uuid4<|fim▁hole|>from datetime import datetime
from sqlalchemy import Column
from sqlalchemy.types import DateTime, Integer, String
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.ext.declarative import declarative_base
generate_uuid = lambda: str(uuid4())
class _DatabaseObject(object):
''' All game objects inherit from this object '''
@declared_attr
def __tablename__(self):
''' Converts name from camel case to snake case '''
name = self.__name__
return (
name[0].lower() +
re.sub(r'([A-Z])',
lambda letter: "_" + letter.group(0).lower(), name[1:]
)
)
id = Column(Integer, unique=True, primary_key=True) # lint:ok
uuid = Column(String(36), unique=True, default=generate_uuid)
created = Column(DateTime, default=datetime.now)
# Create an instance called "BaseObject"
DatabaseObject = declarative_base(cls=_DatabaseObject)<|fim▁end|> | |
<|file_name|>cast_sign_loss.rs<|end_file_name|><|fim▁begin|>use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint;
use clippy_utils::{method_chain_args, sext};
use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
use super::CAST_SIGN_LOSS;
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
if should_lint(cx, cast_op, cast_from, cast_to) {
span_lint(
cx,
CAST_SIGN_LOSS,
expr.span,
&format!(
"casting `{}` to `{}` may lose the sign of the value",
cast_from, cast_to
),
);
}
}
fn should_lint(cx: &LateContext<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) -> bool {
match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
if !cast_from.is_signed() || cast_to.is_signed() {
return false;
}<|fim▁hole|> // Don't lint for positive constants.
let const_val = constant(cx, cx.typeck_results(), cast_op);
if_chain! {
if let Some((Constant::Int(n), _)) = const_val;
if let ty::Int(ity) = *cast_from.kind();
if sext(cx.tcx, n, ity) >= 0;
then {
return false;
}
}
// Don't lint for the result of methods that always return non-negative values.
if let ExprKind::MethodCall(path, _, _, _) = cast_op.kind {
let mut method_name = path.ident.name.as_str();
let allowed_methods = ["abs", "checked_abs", "rem_euclid", "checked_rem_euclid"];
if_chain! {
if method_name == "unwrap";
if let Some(arglist) = method_chain_args(cast_op, &["unwrap"]);
if let ExprKind::MethodCall(inner_path, _, _, _) = &arglist[0][0].kind;
then {
method_name = inner_path.ident.name.as_str();
}
}
if allowed_methods.iter().any(|&name| method_name == name) {
return false;
}
}
true
},
(false, true) => !cast_to.is_signed(),
(_, _) => false,
}
}<|fim▁end|> | |
<|file_name|>collections.py<|end_file_name|><|fim▁begin|>from pypom import Region
from selenium.webdriver.common.by import By<|fim▁hole|>
class Collections(Base):
"""Collections page."""
_item_locator = (By.CSS_SELECTOR, '.item')
def wait_for_page_to_load(self):
self.wait.until(lambda _: len(self.collections) > 0 and
self.collections[0].name)
return self
@property
def collections(self):
collections = self.find_elements(*self._item_locator)
return [self.Collection(self, el) for el in collections]
class Collection(Region):
"""Represents an individual collection."""
_name_locator = (By.CSS_SELECTOR, '.info > h3')
@property
def name(self):
return self.find_element(*self._name_locator).text<|fim▁end|> |
from base import Base |
<|file_name|>CheckCards.tsx<|end_file_name|><|fim▁begin|>//Libraries
import React, {FunctionComponent} from 'react'
//Components
import CheckCard from 'src/alerting/components/CheckCard'
import FilterList from 'src/shared/components/Filter'
import {
EmptyState,
ResourceList,
Panel,
Gradients,
Button,
IconFont,
ComponentColor,
} from '@influxdata/clockface'
// Types
import {Check} from 'src/types'
import {ComponentSize} from '@influxdata/clockface'
interface Props {
checks: Check[]
searchTerm: string
showFirstTimeWidget: boolean
onCreateCheck: () => void
}
const CheckCards: FunctionComponent<Props> = ({
checks,
searchTerm,
showFirstTimeWidget,
onCreateCheck,
}) => {
const cards = cs => cs.map(c => <CheckCard key={c.id} check={c} />)
const body = filtered => (
<ResourceList.Body
emptyState={
<EmptyChecksList
showFirstTimeWidget={showFirstTimeWidget}
onCreateCheck={onCreateCheck}
searchTerm={searchTerm}
/><|fim▁hole|> </ResourceList.Body>
)
const filteredChecks = (
<FilterList<Check>
list={checks}
searchKeys={['name']}
searchTerm={searchTerm}
>
{filtered => body(filtered)}
</FilterList>
)
return (
<>
<ResourceList>{filteredChecks}</ResourceList>
</>
)
}
interface EmptyProps {
showFirstTimeWidget: boolean
onCreateCheck: () => void
searchTerm: string
}
const EmptyChecksList: FunctionComponent<EmptyProps> = ({
showFirstTimeWidget,
onCreateCheck,
searchTerm,
}) => {
if (searchTerm) {
return (
<EmptyState size={ComponentSize.Small} className="alert-column--empty">
<EmptyState.Text
text="No checks match your search"
highlightWords={['checks']}
/>
</EmptyState>
)
}
if (showFirstTimeWidget) {
return (
<Panel
gradient={Gradients.PolarExpress}
size={ComponentSize.Large}
className="alerting-first-time"
>
<Panel.Body>
<h1>
Looks like it's your
<br />
first time here
</h1>
<h5>
Welcome to our new Monitoring & Alerting feature!
<br />
To get started try creating a Check:
</h5>
<Button
size={ComponentSize.Medium}
color={ComponentColor.Primary}
onClick={onCreateCheck}
text="Create a Check"
icon={IconFont.Plus}
/>
</Panel.Body>
</Panel>
)
}
return (
<EmptyState size={ComponentSize.Small} className="alert-column--empty">
<EmptyState.Text
text="Looks like you have not created a Check yet LINEBREAK LINEBREAK You will need one to be notified about LINEBREAK any changes in system status"
highlightWords={['Check']}
/>
</EmptyState>
)
}
export default CheckCards<|fim▁end|> | }
>
{cards(filtered)} |
<|file_name|>_fetch_new_rt_data.py<|end_file_name|><|fim▁begin|>import json
import os
import time
from rottentomatoes import RT
BOX_OFFICE_COUNTRIES = [
"us",
"in",
"uk",
"nl",
]
LIMIT = 50 # max allowed by rotten tomatoes
OUTPUT_FILE = "download/more_movies.json"
def main():
assert os.environ["RT_KEY"], "Your Rotten Tomatoes API key should be stored in the RT_KEY env var!"
rt = RT() # NOTE: you should have your API key stored in RT_KEY before this will work
movies = []
link_template = ""
for country in BOX_OFFICE_COUNTRIES:
print "requesting box office hits for {}".format(country)
r = rt.lists('movies', 'box_office', limit=LIMIT, country=country)
movies += r['movies']
link_template = link_template or r['link_template']
time.sleep(10) # respect our API limits!
# to maintain compatibility with movies.json fields, our top level dict
# should have the following fields:
# total (int)
# movies (list)
# link_template (string)
total = len(movies)<|fim▁hole|> "link_template": link_template,
}
with open(OUTPUT_FILE, "w") as f:
json.dump(result, f, indent=2, sort_keys=True)
if __name__ == "__main__":
main()<|fim▁end|> | result = {
"total": total,
"movies": movies, |
<|file_name|>issue-2063-resource.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(managed_boxes)]
// test that autoderef of a type like this does not
// cause compiler to loop. Note that no instances
// of such a type could ever be constructed.
struct t { //~ ERROR this type cannot be instantiated
x: x,
to_str: (),
}
struct x(@t); //~ ERROR this type cannot be instantiated
fn main() {
}<|fim▁end|> | //
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.