prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>CMapper.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
dCellSize = 20
WindowWidth = 400
WindowHeight = 400
class SCell(object):<|fim▁hole|> self._top = ymin
self.bottom = ymax
def Update(self):
self._iTicksSpentHere += 1
def Reset(self):
self._iTicksSpentHere = 0
class CMapper(object):
def __init__(self, MaxRangeX, MaxRangeY):
self._dCellSize = dCellSize
self._NumCellsX = (MaxRangeX/self._dCellSize) + 1
self._NumCellsY = (MaxRangeY/self._dCellSize) + 1
self._2DvecCells = []
for x in xrange(self._NumCellsX):
temp = []
for y in xrange(self._NumCellsY):
temp.append(SCell(x*self._dCellSize, (x+1)*self._dCellSize, y*self._dCellSize, (y+1)*self._dCellSize))
self._2DvecCells.append(temp)
self._iTotalCells = self._NumCellsX * self._NumCellsY
def Update(self, xPos, yPos):
if ((xPos < 0) or (xPos > WindowWidth) or (yPos < 0) or (yPos > WindowHeight)):
return
cellX = int(xPos/self._dCellSize)
cellY = int(yPos/self._dCellSize)
self._2DvecCells[cellX][cellY].Update()
def TicksLingered(self, xPos, yPos):
if ((xPos < 0) or (xPos > WindowWidth) or (yPos < 0) or (yPos > WindowHeight)):
return 999
cellX = int(xPos/self._dCellSize)
cellY = int(yPos/self._dCellSize)
return self._2DvecCells[cellX][cellY]._iTicksSpentHere
def BeenVisited(self, xPos, yPos):
print "Not implemented!"
def Render(self):
print "To be implemented"
def Reset(self):
for i in xrange(self._NumCellsX):
for j in xrange(self._NumCellsY):
self._2DvecCells[i][j].Reset()
def NumCellsVisited(self):
total = 0
for i in xrange(self._NumCellsX):
for j in xrange(self._NumCellsY):
if self._2DvecCells[i][j]._iTicksSpentHere > 0:
total += 1
return total<|fim▁end|> | def __init__(self, xmin, xmax, ymin, ymax):
self._iTicksSpentHere = 0
self._left = xmin
self._right = xmax |
<|file_name|>test_assembly.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from solum.api import auth
from solum.api.handlers import assembly_handler
from solum.common import exception
from solum.common import repo_utils
from solum.objects import assembly
from solum.openstack.common.fixture import config
from solum.tests import base
from solum.tests import fakes
from solum.tests import utils
STATES = assembly.States
@mock.patch('solum.objects.registry')
class TestAssemblyHandler(base.BaseTestCase):
def setUp(self):
super(TestAssemblyHandler, self).setUp()
self.ctx = utils.dummy_context()
self.CONF = self.useFixture(config.Config())
self.CONF.config(auth_uri='http://fakeidentity.com',
group=auth.OPT_GROUP_NAME)
self.CONF.config(keystone_version='3')
def test_assembly_get(self, mock_registry):
mock_registry.return_value.Assembly.get_by_uuid.return_value = {
'plan_id': '1234'
}
handler = assembly_handler.AssemblyHandler(self.ctx)
res = handler.get('test_id')
self.assertIsNotNone(res)
get_by_uuid = mock_registry.Assembly.get_by_uuid
get_by_uuid.assert_called_once_with(self.ctx, 'test_id')
def test_assembly_get_all(self, mock_registry):<|fim▁hole|> self.assertIsNotNone(res)
mock_registry.AssemblyList.get_all.assert_called_once_with(self.ctx)
def test_update(self, mock_registry):
data = {'user_id': 'new_user_id',
'plan_uuid': 'input_plan_uuid'}
handler = assembly_handler.AssemblyHandler(self.ctx)
handler.update('test_id', data)
mock_registry.Assembly.update_and_save.assert_called_once_with(
self.ctx, 'test_id', data)
@mock.patch('solum.worker.api.API.build_app')
@mock.patch('solum.common.clients.OpenStackClients.keystone')
def test_create(self, mock_kc, mock_pa, mock_registry):
data = {'user_id': 'new_user_id',
'uuid': 'input_uuid',
'plan_uuid': 'input_plan_uuid'}
db_obj = fakes.FakeAssembly()
mock_registry.Assembly.return_value = db_obj
fp = fakes.FakePlan()
mock_registry.Plan.get_by_id.return_value = fp
fp.raw_content = {
'name': 'theplan',
'artifacts': [{'name': 'nodeus',
'artifact_type': 'heroku',
'content': {'private': False,
'href': 'https://example.com/ex.git'},
'language_pack': 'auto'}]}
mock_registry.Image.return_value = fakes.FakeImage()
handler = assembly_handler.AssemblyHandler(self.ctx)
res = handler.create(data)
db_obj.update.assert_called_once_with(data)
db_obj.create.assert_called_once_with(self.ctx)
self.assertEqual(db_obj, res)
git_info = {
'source_url': "https://example.com/ex.git",
'commit_sha': '',
'repo_token': None,
'status_url': None,
}
mock_pa.assert_called_once_with(
verb='launch_workflow', workflow=['unittest', 'build', 'deploy'],
build_id=8, name='nodeus', assembly_id=8,
git_info=git_info, test_cmd=None, ports=[80],
base_image_id='auto', source_format='heroku',
image_format='qcow2', run_cmd=None)
@mock.patch('solum.common.clients.OpenStackClients.keystone')
def test_create_with_username_in_ctx(self, mock_kc, mock_registry):
data = {'plan_uuid': 'input_plan_uuid'}
db_obj = fakes.FakeAssembly()
mock_registry.Assembly.return_value = db_obj
fp = fakes.FakePlan()
mock_registry.Plan.get_by_id.return_value = fp
fp.raw_content = {'name': 'theplan'}
handler = assembly_handler.AssemblyHandler(self.ctx)
res = handler.create(data)
self.assertEqual(res.username, self.ctx.user_name)
@mock.patch('solum.common.clients.OpenStackClients.keystone')
def test_create_without_username_in_ctx(self, mock_kc, mock_registry):
data = {'plan_uuid': 'input_plan_uuid'}
ctx = utils.dummy_context()
ctx.user_name = ''
db_obj = fakes.FakeAssembly()
mock_registry.Assembly.return_value = db_obj
fp = fakes.FakePlan()
mock_registry.Plan.get_by_id.return_value = fp
fp.raw_content = {'name': 'theplan'}
handler = assembly_handler.AssemblyHandler(ctx)
res = handler.create(data)
self.assertEqual(res.username, '')
@mock.patch('solum.worker.api.API.build_app')
@mock.patch('solum.common.clients.OpenStackClients.keystone')
def test_create_with_private_github_repo(self, mock_kc, mock_pa,
mock_registry):
data = {'user_id': 'new_user_id',
'uuid': 'input_uuid',
'plan_uuid': 'input_plan_uuid'}
db_obj = fakes.FakeAssembly()
mock_registry.Assembly.return_value = db_obj
fp = fakes.FakePlan()
mock_registry.Plan.get_by_id.return_value = fp
fp.raw_content = {
'name': 'theplan',
'artifacts': [{'name': 'nodeus',
'artifact_type': 'heroku',
'content': {'private': True,
'href': 'https://example.com/ex.git',
'public_key': 'ssh-rsa abc'},
'language_pack': 'auto'}]}
fp.deploy_keys_uri = 'secret_ref_uri'
mock_registry.Image.return_value = fakes.FakeImage()
handler = assembly_handler.AssemblyHandler(self.ctx)
res = handler.create(data)
db_obj.update.assert_called_once_with(data)
db_obj.create.assert_called_once_with(self.ctx)
self.assertEqual(db_obj, res)
git_info = {
'source_url': "https://example.com/ex.git",
'commit_sha': '',
'repo_token': None,
'status_url': None,
}
mock_pa.assert_called_once_with(
verb='launch_workflow', workflow=['unittest', 'build', 'deploy'],
build_id=8, name='nodeus', assembly_id=8,
git_info=git_info, ports=[80],
test_cmd=None, base_image_id='auto', source_format='heroku',
image_format='qcow2', run_cmd=None)
@mock.patch('solum.common.clients.OpenStackClients.keystone')
@mock.patch('solum.deployer.api.API.destroy_assembly')
@mock.patch('solum.conductor.api.API.update_assembly')
def test_delete(self, mock_cond, mock_deploy, mock_kc, mock_registry):
db_obj = fakes.FakeAssembly()
mock_registry.Assembly.get_by_uuid.return_value = db_obj
handler = assembly_handler.AssemblyHandler(self.ctx)
handler.delete('test_id')
mock_registry.Assembly.get_by_uuid.assert_called_once_with(self.ctx,
'test_id')
mock_cond.assert_called_once_with(db_obj.id, {'status': 'DELETING'})
mock_deploy.assert_called_once_with(assem_id=db_obj.id)
@mock.patch('httplib2.Http.request')
def test_verify_artifact_raise_exp(self, http_mock, mock_registry):
artifact = {"name": "Test",
"artifact_type": "heroku",
"content": {"href": "https://github.com/some/project"},
"language_pack": "auto",
"repo_token": "abcd"}
http_mock.return_value = ({'status': '404'}, '') # Not a collaborator
collab_url = 'https://api.github.com/repos/u/r/collaborators/foo'
self.assertRaises(exception.RequestForbidden,
repo_utils.verify_artifact,
artifact, collab_url)<|fim▁end|> | mock_registry.AssemblyList.get_all.return_value = {}
handler = assembly_handler.AssemblyHandler(self.ctx)
res = handler.get_all() |
<|file_name|>error.js<|end_file_name|><|fim▁begin|>/* Copyright 2014 Open Ag Data Alliance
*
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var debug = require('debug-logger')('oada-error');
var codes = {
OK: 200,
CREATED: 201,
NO_CONTENT: 204,
PARTIAL_CONTENT: 206,
MOVED_PERMANENTLY: 301,
NOT_MODIFIED: 304,
TEMPORARY_REDIRECT: 307,
BAD_REQUEST: 400,
UNAUTHORIZED: 401,
FORBIDDEN: 403,
NOT_FOUND: 404,
NOT_ACCEPTABLE: 406,
CONFLICT: 409,
LENGTH_REQUIRED: 411,
PRECONDITION_FAILED: 412,
UNSUPPORTED_MEDIA_TYPE: 415,
REQUESTED_RANGE_NOT_SATISFIABLE: 416,
TOO_MANY_REQUESTS: 429,
INTERNAL_ERROR: 500,
};
module.exports.codes = codes;<|fim▁hole|>var names = {
200: 'OK',
201: 'Created',
204: 'No Content',
206: 'Partial Content',
301: 'Moved Permanently',
304: 'Not Modified',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
403: 'Forbidden',
404: 'Not Found',
406: 'Not Acceptable',
409: 'Conflict',
411: 'Length Required',
412: 'Precondition Failed',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
429: 'Too Many Requests',
500: 'Internal Error',
};
function OADAError(message, code, userMessage, href, detail) {
var error = Error.apply(null, arguments);
// Copy Error's properties
var self = this;
Object.getOwnPropertyNames(error).forEach(function(propertyName) {
Object.defineProperty(self, propertyName,
Object.getOwnPropertyDescriptor(error, propertyName));
});
// Convert named code to numeric code
if (isNaN(parseFloat(code))) {
this.code = codes[code];
} else {
this.code = code;
}
// Make sure code is OADA compliant
if (!names[code]) {
this.code = codes['INTERNAL_ERROR'];
}
this.status = names[this.code];
Object.defineProperty(this, 'message', {
configurable: true,
enumerable: false,
value: this.message || message || '',
writable: true
});
Object.defineProperty(this, 'type', {
configurable: true,
enumerable: false,
value: 'OADAError',
writable: true
});
this.title = this.message;
this.href = href || 'https://github.com/OADA/oada-docs';
if (detail) {
this.detail = detail;
}
this.userMessage = userMessage ||
'Unexpected error. Please try again or contact support.';
}
OADAError.prototype = Object.create(Error.prototype);
OADAError.prototype.name = 'OADAError';
OADAError.codes = codes;
module.exports.OADAError = OADAError;
function middleware(cb) {
return function(err, req, res, next) {
debug.trace('**** OADAError: ',err);
if (err.name === 'Error') {
debug.error(err);
// Don't expose interal error to client
err = new OADAError('Unexpected Error', codes.INTERNAL_ERROR);
}
if (err.type !== 'OADAError') {
return next(err);
}
if (typeof cb === 'function') {
cb(err);
}
debug.error('OADAError: ' + err);
res.status(err.code).json(err);
};
}
module.exports.middleware = middleware;<|fim▁end|> | |
<|file_name|>favoritesview.py<|end_file_name|><|fim▁begin|># Copyright (C) 2006-2007 Red Hat, Inc.
# Copyright (C) 2008 One Laptop Per Child
# Copyright (C) 2008-2013 Sugar Labs
# Copyright (C) 2013 Daniel Francis
# Copyright (C) 2013 Walter Bender
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
from gettext import gettext as _
from gi.repository import GObject
from gi.repository import Gio
from gi.repository import GLib
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GdkPixbuf
from sugar3.graphics import style
from sugar3.graphics.icon import Icon
from sugar3.graphics.icon import CanvasIcon
from sugar3.graphics.palettemenu import PaletteMenuItem
from sugar3.graphics.palettemenu import PaletteMenuItemSeparator
from sugar3.graphics.alert import Alert, ErrorAlert
from sugar3.graphics.xocolor import XoColor
from sugar3.activity import activityfactory
from sugar3 import dispatch
from sugar3.datastore import datastore
from jarabe.view.palettes import JournalPalette
from jarabe.view.palettes import CurrentActivityPalette
from jarabe.view.palettes import ActivityPalette
from jarabe.view.buddyicon import BuddyIcon
from jarabe.view.buddymenu import BuddyMenu
from jarabe.model.buddy import get_owner_instance
from jarabe.model import shell
from jarabe.model import bundleregistry
from jarabe.model import desktop
from jarabe.journal import misc
from jarabe.desktop import schoolserver
from jarabe.desktop.schoolserver import RegisterError
from jarabe.desktop import favoriteslayout
from jarabe.desktop.viewcontainer import ViewContainer
from jarabe.util.normalize import normalize_string
_logger = logging.getLogger('FavoritesView')
_ICON_DND_TARGET = ('activity-icon', Gtk.TargetFlags.SAME_WIDGET, 0)
LAYOUT_MAP = {favoriteslayout.RingLayout.key: favoriteslayout.RingLayout,
# favoriteslayout.BoxLayout.key: favoriteslayout.BoxLayout,
# favoriteslayout.TriangleLayout.key:
# favoriteslayout.TriangleLayout,
# favoriteslayout.SunflowerLayout.key:
# favoriteslayout.SunflowerLayout,
favoriteslayout.RandomLayout.key: favoriteslayout.RandomLayout}
"""Map numeric layout identifiers to uninstantiated subclasses of
`FavoritesLayout` which implement the layouts. Additional information
about the layout can be accessed with fields of the class."""
_favorites_settings = None
class FavoritesBox(Gtk.VBox):
__gtype_name__ = 'SugarFavoritesBox'
def __init__(self, favorite_view):
Gtk.VBox.__init__(self)
self.favorite_view = favorite_view
self._view = FavoritesView(self)
self.pack_start(self._view, True, True, 0)
self._view.show()
self._alert = None
def set_filter(self, query):
self._view.set_filter(query)
def set_resume_mode(self, resume_mode):
self._view.set_resume_mode(resume_mode)
def grab_focus(self):
# overwrite grab focus in order to grab focus from the parent
self._view.grab_focus()
def add_alert(self, alert):
if self._alert is not None:
self.remove_alert()
self._alert = alert
self.pack_start(alert, False, True, 0)
self.reorder_child(alert, 0)
def remove_alert(self):
self.remove(self._alert)
self._alert = None
def _get_selected(self, query):
return self._view._get_selected(query)
class FavoritesView(ViewContainer):
__gtype_name__ = 'SugarFavoritesView'
def __init__(self, box):
self._box = box
self._layout = None
owner_icon = OwnerIcon(style.XLARGE_ICON_SIZE)
owner_icon.connect('register-activate', self.__register_activate_cb)
current_activity = CurrentActivityIcon()
ViewContainer.__init__(self, layout=self._layout,
owner_icon=owner_icon,
activity_icon=current_activity)
self.set_can_focus(False)
self.add_events(Gdk.EventMask.BUTTON_PRESS_MASK |
Gdk.EventMask.POINTER_MOTION_HINT_MASK)
self.drag_dest_set(0, [], 0)
# Drag and drop is set only for the Random layout. This is
# the flag that enables or disables it.
self._dragging_mode = False
self._drag_motion_hid = None
self._drag_drop_hid = None
self._drag_data_received_hid = None
self._dragging = False
self._pressed_button = None
self._press_start_x = 0
self._press_start_y = 0
self._hot_x = None
self._hot_y = None
self._last_clicked_icon = None
self._alert = None
self._resume_mode = True
GLib.idle_add(self.__connect_to_bundle_registry_cb)
favorites_settings = get_settings(self._box.favorite_view)
favorites_settings.changed.connect(self.__settings_changed_cb)
self._set_layout(favorites_settings.layout)
def __settings_changed_cb(self, **kwargs):
favorites_settings = get_settings(self._box.favorite_view)
layout_set = self._set_layout(favorites_settings.layout)
if layout_set:
self.set_layout(self._layout)
registry = bundleregistry.get_registry()
for info in registry:
if registry.is_bundle_favorite(info.get_bundle_id(),
info.get_activity_version(),
self._box.favorite_view):
self._add_activity(info)
def _set_layout(self, layout):
if layout not in LAYOUT_MAP:
logging.warn('Unknown favorites layout: %r', layout)
layout = favoriteslayout.RingLayout.key
assert layout in LAYOUT_MAP
if type(self._layout) == LAYOUT_MAP[layout]:
return False
if self._layout is not None and self._dragging_mode:
self.disconnect(self._drag_motion_hid)
self.disconnect(self._drag_drop_hid)
self.disconnect(self._drag_data_received_hid)
if layout == favoriteslayout.RandomLayout.key:
self._dragging_mode = True
self._drag_motion_hid = self.connect(
'drag-motion', self.__drag_motion_cb)
self._drag_drop_hid = self.connect(
'drag-drop', self.__drag_drop_cb)
self._drag_data_received_hid = self.connect(
'drag-data-received', self.__drag_data_received_cb)
else:
self._dragging_mode = False
self._layout = LAYOUT_MAP[layout]()
return True
layout = property(None, _set_layout)
def do_add(self, child):
if child != self._owner_icon and child != self._activity_icon:
self._children.append(child)
child.connect('button-press-event', self.__button_press_cb)
child.connect('button-release-event', self.__button_release_cb)
child.connect('motion-notify-event', self.__motion_notify_event_cb)
child.connect('drag-begin', self.__drag_begin_cb)
if child.get_realized():
child.set_parent_window(self.get_parent_window())
child.set_parent(self)
def __button_release_cb(self, widget, event):
if self._dragging:
return True
else:
return False
def __button_press_cb(self, widget, event):
if event.button == 1 and event.type == Gdk.EventType.BUTTON_PRESS:
self._last_clicked_icon = widget
self._pressed_button = event.button
self._press_start_x = event.x
self._press_start_y = event.y
return False
def __motion_notify_event_cb(self, widget, event):
if not self._dragging_mode:
return False
# if the mouse button is not pressed, no drag should occurr
if not event.get_state() & Gdk.ModifierType.BUTTON1_MASK:
self._pressed_button = None
return False
if event.is_hint:
x, y, state_ = event.window.get_pointer()
else:
x = event.x
y = event.y
if widget.drag_check_threshold(int(self._press_start_x),
int(self._press_start_y),
int(x),
int(y)):
self._dragging = True
target_entry = Gtk.TargetEntry.new(*_ICON_DND_TARGET)
target_list = Gtk.TargetList.new([target_entry])
widget.drag_begin(target_list,
Gdk.DragAction.MOVE,
1,
event)
return False
def __drag_begin_cb(self, widget, context):
if not self._dragging_mode:
return False
pixbuf = GdkPixbuf.Pixbuf.new_from_file(widget.props.file_name)
self._hot_x = pixbuf.props.width / 2
self._hot_y = pixbuf.props.height / 2
Gtk.drag_set_icon_pixbuf(context, pixbuf, self._hot_x, self._hot_y)
def __drag_motion_cb(self, widget, context, x, y, time):
if self._last_clicked_icon is not None:
Gdk.drag_status(context, context.get_suggested_action(), time)
return True
else:
return False
def __drag_drop_cb(self, widget, context, x, y, time):
if self._last_clicked_icon is not None:
target = Gdk.Atom.intern_static_string(_ICON_DND_TARGET[0])
self.drag_get_data(context, target, time)
self._layout.move_icon(self._last_clicked_icon,
x - self._hot_x, y - self._hot_y,
self.get_allocation())
self._pressed_button = None
self._press_start_x = None<|fim▁hole|> self._last_clicked_icon = None
self._dragging = False
return True
else:
return False
def __drag_data_received_cb(self, widget, context, x, y, selection_data,
info, time):
Gdk.drop_finish(context, success=True, time_=time)
def __connect_to_bundle_registry_cb(self):
registry = bundleregistry.get_registry()
for info in registry:
if registry.is_bundle_favorite(info.get_bundle_id(),
info.get_activity_version(),
self._box.favorite_view):
self._add_activity(info)
registry.connect('bundle-added', self.__activity_added_cb)
registry.connect('bundle-removed', self.__activity_removed_cb)
registry.connect('bundle-changed', self.__activity_changed_cb)
def _add_activity(self, activity_info):
if activity_info.get_bundle_id() == 'org.laptop.JournalActivity':
return
icon = ActivityIcon(activity_info)
icon.props.pixel_size = style.STANDARD_ICON_SIZE
# icon.set_resume_mode(self._resume_mode)
self.add(icon)
icon.show()
def __activity_added_cb(self, activity_registry, activity_info):
registry = bundleregistry.get_registry()
if registry.is_bundle_favorite(activity_info.get_bundle_id(),
activity_info.get_activity_version(),
self._box.favorite_view):
self._add_activity(activity_info)
def __activity_removed_cb(self, activity_registry, activity_info):
icon = self._find_activity_icon(activity_info.get_bundle_id(),
activity_info.get_activity_version())
if icon is not None:
self.remove(icon)
def _find_activity_icon(self, bundle_id, version):
for icon in self.get_children():
if isinstance(icon, ActivityIcon) and \
icon.bundle_id == bundle_id and icon.version == version:
return icon
return None
def __activity_changed_cb(self, activity_registry, activity_info):
if activity_info.get_bundle_id() == 'org.laptop.JournalActivity':
return
icon = self._find_activity_icon(activity_info.get_bundle_id(),
activity_info.get_activity_version())
if icon is not None:
self.remove(icon)
registry = bundleregistry.get_registry()
if registry.is_bundle_favorite(activity_info.get_bundle_id(),
activity_info.get_activity_version(),
self._box.favorite_view):
self._add_activity(activity_info)
def set_filter(self, query):
query = query.strip()
for icon in self.get_children():
if icon not in [self._owner_icon, self._activity_icon]:
activity_name = icon.get_activity_name().decode('utf-8')
normalized_name = normalize_string(activity_name)
if normalized_name.find(query) > -1:
icon.alpha = 1.0
else:
icon.alpha = 0.33
def _get_selected(self, query):
query = query.strip()
selected = []
for icon in self.get_children():
if icon not in [self._owner_icon, self._activity_icon]:
activity_name = icon.get_activity_name().decode('utf-8')
normalized_name = normalize_string(activity_name)
if normalized_name.find(query) > -1:
selected.append(icon)
return selected
def __register_activate_cb(self, icon):
alert = Alert()
alert.props.title = _('Registration')
alert.props.msg = _('Please wait, searching for your school server.')
self._box.add_alert(alert)
GObject.idle_add(self.__register)
def __register(self):
self._box.remove_alert()
alert = ErrorAlert()
try:
schoolserver.register_laptop()
except RegisterError, e:
alert.props.title = _('Registration Failed')
alert.props.msg = '%s' % e
else:
alert.props.title = _('Registration Successful')
alert.props.msg = _('You are now registered '
'with your school server.')
alert.connect('response', self.__register_alert_response_cb)
self._box.add_alert(alert)
return False
def __register_alert_response_cb(self, alert, response_id):
self._box.remove_alert()
def set_resume_mode(self, resume_mode):
self._resume_mode = resume_mode
for icon in self.get_children():
if hasattr(icon, 'set_resume_mode'):
icon.set_resume_mode(self._resume_mode)
class ActivityIcon(CanvasIcon):
__gtype_name__ = 'SugarFavoriteActivityIcon'
_BORDER_WIDTH = style.zoom(9)
_MAX_RESUME_ENTRIES = 5
def __init__(self, activity_info):
CanvasIcon.__init__(self, cache=True,
file_name=activity_info.get_icon())
self._activity_info = activity_info
self._journal_entries = []
self._resume_mode = True
self.connect_after('activate', self.__button_activate_cb)
datastore.updated.connect(self.__datastore_listener_updated_cb)
datastore.deleted.connect(self.__datastore_listener_deleted_cb)
self._refresh()
self._update()
def _refresh(self):
bundle_id = self._activity_info.get_bundle_id()
properties = ['uid', 'title', 'icon-color', 'activity', 'activity_id',
'mime_type', 'mountpoint']
self._get_last_activity_async(bundle_id, properties)
def __datastore_listener_updated_cb(self, **kwargs):
bundle_id = self._activity_info.get_bundle_id()
if kwargs['metadata'].get('activity', '') == bundle_id:
self._refresh()
def __datastore_listener_deleted_cb(self, **kwargs):
for entry in self._journal_entries:
if entry['uid'] == kwargs['object_id']:
self._refresh()
break
def _get_last_activity_async(self, bundle_id, properties):
query = {'activity': bundle_id}
datastore.find(query, sorting=['+timestamp'],
limit=self._MAX_RESUME_ENTRIES,
properties=properties,
reply_handler=self.__get_last_activity_reply_handler_cb,
error_handler=self.__get_last_activity_error_handler_cb)
def __get_last_activity_reply_handler_cb(self, entries, total_count):
# If there's a problem with the DS index, we may get entries not
# related to this activity.
checked_entries = []
for entry in entries:
if entry['activity'] == self.bundle_id:
checked_entries.append(entry)
self._journal_entries = checked_entries
self._update()
def __get_last_activity_error_handler_cb(self, error):
logging.error('Error retrieving most recent activities: %r', error)
def _update(self):
self.palette = None
if not self._resume_mode or not self._journal_entries:
xo_color = XoColor('%s,%s' % (style.COLOR_BUTTON_GREY.get_svg(),
style.COLOR_WHITE.get_svg()))
else:
xo_color = misc.get_icon_color(self._journal_entries[0])
self.props.xo_color = xo_color
def create_palette(self):
palette = FavoritePalette(self._activity_info, self._journal_entries)
palette.connect('activate', self.__palette_activate_cb)
palette.connect('entry-activate', self.__palette_entry_activate_cb)
self.connect_to_palette_pop_events(palette)
return palette
def __palette_activate_cb(self, palette):
self._activate()
def __palette_entry_activate_cb(self, palette, metadata):
self._resume(metadata)
def do_get_preferred_width(self):
width = CanvasIcon.do_get_preferred_width(self)[0]
width += ActivityIcon._BORDER_WIDTH * 2
return (width, width)
def do_get_preferred_height(self):
height = CanvasIcon.do_get_preferred_height(self)[0]
height += ActivityIcon._BORDER_WIDTH * 2
return (height, height)
def __button_activate_cb(self, icon):
self._activate()
def _resume(self, journal_entry):
if not journal_entry['activity_id']:
journal_entry['activity_id'] = activityfactory.create_activity_id()
misc.resume(journal_entry, self._activity_info.get_bundle_id())
def _activate(self):
if self.palette is not None:
self.palette.popdown(immediate=True)
if self._resume_mode and self._journal_entries:
self._resume(self._journal_entries[0])
else:
misc.launch(self._activity_info)
def run_activity(self):
self._activate()
def get_bundle_id(self):
return self._activity_info.get_bundle_id()
bundle_id = property(get_bundle_id, None)
def get_version(self):
return self._activity_info.get_activity_version()
version = property(get_version, None)
def get_activity_name(self):
return self._activity_info.get_name()
def _get_installation_time(self):
return self._activity_info.get_installation_time()
installation_time = property(_get_installation_time, None)
def _get_fixed_position(self):
registry = bundleregistry.get_registry()
return registry.get_bundle_position(self.bundle_id, self.version)
fixed_position = property(_get_fixed_position, None)
def set_resume_mode(self, resume_mode):
self._resume_mode = resume_mode
self._update()
class FavoritePalette(ActivityPalette):
__gtype_name__ = 'SugarFavoritePalette'
__gsignals__ = {
'entry-activate': (GObject.SignalFlags.RUN_FIRST,
None, ([object])),
}
def __init__(self, activity_info, journal_entries):
ActivityPalette.__init__(self, activity_info)
if not journal_entries:
xo_color = XoColor('%s,%s' % (style.COLOR_BUTTON_GREY.get_svg(),
style.COLOR_WHITE.get_svg()))
else:
xo_color = misc.get_icon_color(journal_entries[0])
self.props.icon = Icon(file=activity_info.get_icon(),
xo_color=xo_color,
pixel_size=style.STANDARD_ICON_SIZE)
if journal_entries:
self.props.secondary_text = journal_entries[0]['title']
menu_items = []
for entry in journal_entries:
icon_file_name = misc.get_icon_name(entry)
color = misc.get_icon_color(entry)
menu_item = PaletteMenuItem(text_label=entry['title'],
file_name=icon_file_name,
xo_color=color)
menu_item.connect('activate', self.__resume_entry_cb, entry)
menu_items.append(menu_item)
menu_item.show()
if journal_entries:
separator = PaletteMenuItemSeparator()
menu_items.append(separator)
separator.show()
for i in range(0, len(menu_items)):
self.menu_box.pack_start(menu_items[i], True, True, 0)
def __resume_entry_cb(self, menu_item, entry):
if entry is not None:
self.emit('entry-activate', entry)
class CurrentActivityIcon(CanvasIcon):
def __init__(self):
CanvasIcon.__init__(self, icon_name='activity-journal',
pixel_size=style.STANDARD_ICON_SIZE, cache=True)
self._home_model = shell.get_model()
self._home_activity = self._home_model.get_active_activity()
if self._home_activity is not None:
self._update()
self._home_model.connect('active-activity-changed',
self.__active_activity_changed_cb)
self.connect_after('activate', self.__activate_cb)
def __activate_cb(self, icon):
window = self._home_model.get_active_activity().get_window()
window.activate(Gtk.get_current_event_time())
def _update(self):
if self._home_activity is not None:
self.props.file_name = self._home_activity.get_icon_path()
self.props.xo_color = self._home_activity.get_icon_color()
if self._home_activity.is_journal():
if self._unbusy():
GLib.timeout_add(100, self._unbusy)
self.props.pixel_size = style.STANDARD_ICON_SIZE
if self.palette is not None:
self.palette.destroy()
self.palette = None
def _unbusy(self):
if self.get_window():
import jarabe.desktop.homewindow
jarabe.desktop.homewindow.get_instance().unbusy()
return False
return True
def create_palette(self):
if self._home_activity is not None:
if self._home_activity.is_journal():
palette = JournalPalette(self._home_activity)
else:
palette = CurrentActivityPalette(self._home_activity)
self.connect_to_palette_pop_events(palette)
else:
palette = None
return palette
def __active_activity_changed_cb(self, home_model, home_activity):
self._home_activity = home_activity
self._update()
class OwnerIcon(BuddyIcon):
__gtype_name__ = 'SugarFavoritesOwnerIcon'
__gsignals__ = {
'register-activate': (GObject.SignalFlags.RUN_FIRST, None,
([])),
}
def __init__(self, size):
BuddyIcon.__init__(self, buddy=get_owner_instance(), pixel_size=size)
# This is a workaround to skip the callback for
# enter-notify-event in the parent class the first time.
def __enter_notify_event_cb(icon, event):
self.unset_state_flags(Gtk.StateFlags.PRELIGHT)
self.disconnect(self._enter_notify_hid)
self._enter_notify_hid = self.connect('enter-notify-event',
__enter_notify_event_cb)
def create_palette(self):
palette = BuddyMenu(get_owner_instance())
settings = Gio.Settings('org.sugarlabs')
if settings.get_boolean('show-register'):
backup_url = settings.get_string('backup-url')
if not backup_url:
text = _('Register')
else:
text = _('Register again')
register_menu = PaletteMenuItem(text, 'media-record')
register_menu.connect('activate', self.__register_activate_cb)
palette.menu_box.pack_end(register_menu, True, True, 0)
register_menu.show()
self.connect_to_palette_pop_events(palette)
return palette
def __register_activate_cb(self, menuitem):
self.emit('register-activate')
class FavoritesSetting(object):
_DESKTOP_DIR = 'org.sugarlabs.desktop'
_HOMEVIEWS_KEY = 'homeviews'
def __init__(self, favorite_view):
self._favorite_view = int(favorite_view)
settings = Gio.Settings(self._DESKTOP_DIR)
homeviews = settings.get_value(self._HOMEVIEWS_KEY).unpack()
self._layout = homeviews[self._favorite_view]['layout']
logging.debug('FavoritesSetting layout %r', self._layout)
self._mode = None
self.changed = dispatch.Signal()
def get_layout(self):
return self._layout
def set_layout(self, layout):
logging.debug('set_layout %r %r', layout, self._layout)
if layout != self._layout:
self._layout = layout
settings = Gio.Settings(self._DESKTOP_DIR)
homeviews = settings.get_value(self._HOMEVIEWS_KEY).unpack()
homeviews[self._favorite_view]['layout'] = layout
variant = GLib.Variant('aa{ss}', homeviews)
settings.set_value(self._HOMEVIEWS_KEY, variant)
self.changed.send(self)
layout = property(get_layout, set_layout)
def get_settings(favorite_view=0):
global _favorites_settings
number_of_views = desktop.get_number_of_views()
if _favorites_settings is None:
_favorites_settings = []
for i in range(number_of_views):
_favorites_settings.append(FavoritesSetting(i))
elif len(_favorites_settings) < number_of_views:
for i in range(number_of_views - len(_favorites_settings)):
_favorites_settings.append(
FavoritesSetting(len(_favorites_settings)))
return _favorites_settings[favorite_view]<|fim▁end|> | self._press_start_y = None
self._hot_x = None
self._hot_y = None |
<|file_name|>param_utils.py<|end_file_name|><|fim▁begin|># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import json
import six
from jinja2 import Template, Environment, StrictUndefined, meta, exceptions
from st2common import log as logging
from st2common.constants.action import ACTION_KV_PREFIX
from st2common.constants.system import SYSTEM_KV_PREFIX
from st2common.exceptions import actionrunner
from st2common.services.keyvalues import KeyValueLookup
from st2common.util.casts import get_cast
from st2common.util.compat import to_unicode
LOG = logging.getLogger(__name__)
__all__ = [
'get_resolved_params',
'get_rendered_params',
'get_finalized_params',
]
def _split_params(runner_parameters, action_parameters, mixed_params):
def pf(params, skips):
result = {k: v for k, v in six.iteritems(mixed_params)
if k in params and k not in skips}
return result
return (pf(runner_parameters, {}), pf(action_parameters, runner_parameters))
def _get_resolved_runner_params(runner_parameters, action_parameters,
actionexec_runner_parameters):
# Runner parameters should use the defaults from the RunnerType object.
# The runner parameter defaults may be overridden by values provided in
# the Action and liveaction.
# Create runner parameter by merging default values with dynamic values
resolved_params = {k: v['default'] if 'default' in v else None
for k, v in six.iteritems(runner_parameters)}
# pick overrides from action_parameters & actionexec_runner_parameters
for param_name, param_value in six.iteritems(runner_parameters):
# No override if param is immutable
if param_value.get('immutable', False):
continue
# Check if param exists in action_parameters and if it has a default value then
# pickup the override.
if param_name in action_parameters:
action_param = action_parameters[param_name]
if 'default' in action_param:
resolved_params[param_name] = action_param['default']
# No further override (from liveaction) if param is immutable
if action_param.get('immutable', False):
continue
# Finally pick up override from actionexec_runner_parameters
if param_name in actionexec_runner_parameters:
resolved_params[param_name] = actionexec_runner_parameters[param_name]
return resolved_params
def _get_resolved_action_params(runner_parameters, action_parameters,
actionexec_action_parameters):
# Create action parameters by merging default values with dynamic values
resolved_params = {k: v['default'] if 'default' in v else None
for k, v in six.iteritems(action_parameters)
if k not in runner_parameters}
# pick overrides from actionexec_action_parameters
for param_name, param_value in six.iteritems(action_parameters):
# No override if param is immutable
if param_value.get('immutable', False):
continue
if param_name in actionexec_action_parameters and param_name not in runner_parameters:
resolved_params[param_name] = actionexec_action_parameters[param_name]
return resolved_params
def get_resolved_params(runnertype_parameter_info, action_parameter_info, actionexec_parameters):
'''
Looks at the parameter values from runner, action and action execution to fully resolve the
values. Resolution is the process of determinig the value of a parameter by taking into
consideration default, immutable and user supplied values.
'''
# Runner parameters should use the defaults from the RunnerType object.
# The runner parameter defaults may be overridden by values provided in
# the Action and liveaction.
actionexec_runner_parameters, actionexec_action_parameters = _split_params(
runnertype_parameter_info, action_parameter_info, actionexec_parameters)
runner_params = _get_resolved_runner_params(runnertype_parameter_info,
action_parameter_info,
actionexec_runner_parameters)
action_params = _get_resolved_action_params(runnertype_parameter_info,
action_parameter_info,
actionexec_action_parameters)
return runner_params, action_params
def _is_template(template_str):
template_str = to_unicode(template_str)
template = Template(template_str)
try:
return template_str != template.render({})
except exceptions.UndefinedError:
return True
def _renderable_context_param_split(action_parameters, runner_parameters, base_context=None):
# To render the params it is necessary to combine the params together so that cross
# parameter category references are resolved.
renderable_params = {}
# shallow copy since this will be updated
context_params = copy.copy(base_context) if base_context else {}
def do_render_context_split(source_params):
'''
Will split the supplied source_params into renderable_params and context_params. As part of
the split also makes sure that the all params are essentially strings.
'''
for k, v in six.iteritems(source_params):
renderable_v = v
# dict and list to be converted to str
if isinstance(renderable_v, dict) or isinstance(renderable_v, list):
renderable_v = json.dumps(renderable_v)
# only str can contain templates
if (isinstance(renderable_v, str) or isinstance(renderable_v, unicode)) and \
_is_template(renderable_v):
renderable_params[k] = renderable_v
elif isinstance(v, dict) or isinstance(v, list):
# For context use the renderable value for dict and list params. The template
# rendering by jinja yields a non json.loads compatible value leading to issues
# while performing casts.
context_params[k] = renderable_v
else:
# For context use the original value.
context_params[k] = v
do_render_context_split(action_parameters)
do_render_context_split(runner_parameters)
return (renderable_params, context_params)
def _check_availability(param, param_dependencies, renderable_params, context):
for dependency in param_dependencies:
if dependency not in renderable_params and dependency not in context:
return False
return True
def _check_cyclic(dep_chain, dependencies):
last_idx = len(dep_chain) - 1
last_value = dep_chain[last_idx]
for dependency in dependencies.get(last_value, []):
if dependency in dep_chain:
dep_chain.append(dependency)
return False
dep_chain.append(dependency)
if not _check_cyclic(dep_chain, dependencies):
return False
dep_chain.pop()
return True
def _validate_dependencies(renderable_params, context):
'''
Validates dependencies between the parameters.
e.g.
{
'a': '{{b}}',
'b': '{{a}}'
}
In this example 'a' requires 'b' for template rendering and vice-versa. There is no way for
these templates to be rendered and will be flagged with an ActionRunnerException.
'''
env = Environment(undefined=StrictUndefined)
dependencies = {}
for k, v in six.iteritems(renderable_params):
template_ast = env.parse(v)
dependencies[k] = meta.find_undeclared_variables(template_ast)
for k, v in six.iteritems(dependencies):
if not _check_availability(k, v, renderable_params, context):
msg = 'Dependecy unsatisfied - %s: %s.' % (k, v)
raise actionrunner.ActionRunnerException(msg)
dep_chain = []
dep_chain.append(k)
if not _check_cyclic(dep_chain, dependencies):
msg = 'Cyclic dependecy found - %s.' % dep_chain
raise actionrunner.ActionRunnerException(msg)
def _do_render_params(renderable_params, context):
'''
Will render the params per the context and will return best attempt to render. Render attempts
with missing params will leave blanks.
'''
if not renderable_params:
return renderable_params
_validate_dependencies(renderable_params, context)
env = Environment(undefined=StrictUndefined)
rendered_params = {}
rendered_params.update(context)
# Maps parameter key to render exception
# We save the exception so we can throw a more meaningful exception at the end if rendering of
# some parameter fails
parameter_render_exceptions = {}
num_parameters = len(renderable_params) + len(context)
# After how many attempts at failing to render parameter we should bail out
max_rendered_parameters_unchanged_count = num_parameters
rendered_params_unchanged_count = 0
while len(renderable_params) != 0:
renderable_params_pre_loop = renderable_params.copy()
for k, v in six.iteritems(renderable_params):
template = env.from_string(v)
try:
rendered = template.render(rendered_params)
rendered_params[k] = rendered
if k in parameter_render_exceptions:
del parameter_render_exceptions[k]
except Exception as e:
# Note: This sucks, but because we support multi level and out of order
# rendering, we can't throw an exception here yet since the parameter could get
# rendered in future iteration
LOG.debug('Failed to render %s: %s', k, v, exc_info=True)
parameter_render_exceptions[k] = e
for k in rendered_params:
if k in renderable_params:
del renderable_params[k]
if renderable_params_pre_loop == renderable_params:
rendered_params_unchanged_count += 1
# Make sure we terminate and don't end up in an infinite loop if we
# tried to render all the parameters but rendering of some parameters
# still fails
if rendered_params_unchanged_count >= max_rendered_parameters_unchanged_count:
k = parameter_render_exceptions.keys()[0]
e = parameter_render_exceptions[k]
msg = 'Failed to render parameter "%s": %s' % (k, str(e))
raise actionrunner.ActionRunnerException(msg)
return rendered_params
def _cast_params(rendered, parameter_schemas):
casted_params = {}
for k, v in six.iteritems(rendered):
# Add uncasted first and then override with casted param. Not all params will end up
# being cast.
casted_params[k] = v
# No casting if the value is None. It leads to weird cases like str(None) = 'None'
# leading to downstream failures as well as int(None) leading to TypeError.
if v is None:
continue
parameter_schema = parameter_schemas.get(k, None)
if not parameter_schema:
continue
parameter_type = parameter_schema.get('type', None)
if not parameter_type:
continue
cast = get_cast(cast_type=parameter_type)
if not cast:
continue
casted_params[k] = cast(v)
return casted_params
def get_rendered_params(runner_parameters, action_parameters, action_context,
runnertype_parameter_info, action_parameter_info):
'''
Renders the templates in runner_parameters and action_parameters. Using the type information
from *_parameter_info will appropriately cast the parameters.
'''
# To render the params it is necessary to combine the params together so that cross
# parameter category references are also rendered correctly. Particularly in the cases where
# a runner parameter is overridden in an action it is likely that a runner parameter could
# depend on an action parameter.
render_context = {SYSTEM_KV_PREFIX: KeyValueLookup()}
render_context[ACTION_KV_PREFIX] = action_context
renderable_params, context = _renderable_context_param_split(action_parameters,
runner_parameters,
render_context)
rendered_params = _do_render_params(renderable_params, context)
template_free_params = {}
template_free_params.update(rendered_params)
template_free_params.update(context)
r_runner_parameters, r_action_parameters = _split_params(runnertype_parameter_info,
action_parameter_info,
template_free_params)
return (_cast_params(r_runner_parameters, runnertype_parameter_info),
_cast_params(r_action_parameters, action_parameter_info))
def get_finalized_params(runnertype_parameter_info, action_parameter_info, liveaction_parameters,
action_context):
'''
Finalize the parameters for an action to execute by doing the following -
1. Split the parameters into those consumed by runner and action into separate dicts.
2. Render any templates in the parameters.
'''
runner_params, action_params = get_resolved_params(runnertype_parameter_info,<|fim▁hole|> runnertype_parameter_info,
action_parameter_info)
return (runner_params, action_params)<|fim▁end|> | action_parameter_info,
liveaction_parameters)
runner_params, action_params = get_rendered_params(runner_params, action_params,
action_context, |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>__title__ = 'fobi.contrib.plugins.form_elements.fields.select_multiple.apps'
__author__ = 'Artur Barseghyan <[email protected]>'
__copyright__ = '2014-2017 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('Config',)
<|fim▁hole|> from django.apps import AppConfig
class Config(AppConfig):
"""Config."""
name = 'fobi.contrib.plugins.form_elements.fields.select_multiple'
label = 'fobi_contrib_plugins_form_elements_fields_select_multiple'
except ImportError:
pass<|fim▁end|> | try: |
<|file_name|>math.py<|end_file_name|><|fim▁begin|># Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = [
'Neema Kotonya ([email protected])',
'Gun Pinyo ([email protected])'
]
import os
from xml.etree import cElementTree
import appengine_config
from common import schema_fields
from common import tags
from controllers import sites
from models import custom_modules
from models import services
from modules.math import messages
MATH_MODULE_URI = '/modules/math'
RESOURCES_URI = MATH_MODULE_URI + '/resources'
MATHJAX_URI = MATH_MODULE_URI + '/MathJax'
class MathTag(tags.ContextAwareTag):
"""Custom tag for mathematical notation using MathJax."""
binding_name = 'gcb-math'
@classmethod
def name(cls):
return 'Mathematical Formula'
@classmethod
def vendor(cls):
return 'gcb'
def render(self, node, context):
math_script = cElementTree.XML('<script/>')
# The formula is "text" type in the schema and so is presented in the
# tag's body.
math_script.text = node.text
input_type = node.attrib.get('input_type')
if input_type == 'MML':
math_script.set('type', 'math/mml')
else:
math_script.set('type', 'math/tex')
return math_script
def rollup_header_footer(self, context):
"""Include MathJax library only when a math tag is present."""
header = tags.html_string_to_element_tree("""
<script src="%s/MathJax.js?config=TeX-AMS-MML_HTMLorMML">
</script>""" % MATHJAX_URI)
footer = tags.html_string_to_element_tree('')
return (header, footer)
def get_icon_url(self):
return RESOURCES_URI + '/math.png'
def get_schema(self, unused_handler):<|fim▁hole|> reg = schema_fields.FieldRegistry(MathTag.name())
reg.add_property(
schema_fields.SchemaField(
'input_type', 'Type', 'string', i18n=False,
optional=True,
select_data=[('TeX', 'TeX'), ('MML', 'MathML')],
extra_schema_dict_values={'value': 'TeX'},
description=services.help_urls.make_learn_more_message(
messages.RTE_MATH_TYPE, 'math:math:input_type')))
reg.add_property(
schema_fields.SchemaField(
'formula', 'Mathematical Formula', 'text',
optional=True,
description=messages.RTE_MATH_MATHEMATICAL_FORMULA))
return reg
custom_module = None
def register_module():
"""Registers this module for use."""
def on_module_disable():
tags.Registry.remove_tag_binding(MathTag.binding_name)
def on_module_enable():
tags.Registry.add_tag_binding(MathTag.binding_name, MathTag)
global_routes = [
(RESOURCES_URI + '/.*', tags.ResourcesHandler),
(MATHJAX_URI + '/(fonts/.*)', sites.make_zip_handler(os.path.join(
appengine_config.BUNDLE_ROOT, 'lib', 'mathjax-fonts-2.3.0.zip'))),
(MATHJAX_URI + '/(.*)', sites.make_zip_handler(os.path.join(
appengine_config.BUNDLE_ROOT, 'lib', 'mathjax-2.3.0.zip')))]
namespaced_routes = []
global custom_module # pylint: disable=global-statement
custom_module = custom_modules.Module(
'Mathematical Formula Display',
'Provides a custom tag to embed mathematical formulas using TeX or MML.'
, global_routes, namespaced_routes,
notify_module_disabled=on_module_disable,
notify_module_enabled=on_module_enable)
return custom_module<|fim▁end|> | |
<|file_name|>issue-13872-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub enum A { B }<|fim▁end|> | |
<|file_name|>hr.py<|end_file_name|><|fim▁begin|>#-*- coding:utf-8 -*-
#
#
# Copyright (C) 2013 Michael Telahun Makonnen <[email protected]>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from datetime import datetime, date, timedelta
from dateutil.relativedelta import relativedelta
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as OE_DATEFORMAT
class hr_employee(osv.Model):
_inherit = 'hr.employee'
def _get_contracts_list(self, employee):
'''Return list of contracts in chronological order'''
contracts = []
for c in employee.contract_ids:
l = len(contracts)
if l == 0:
contracts.append(c)
else:
dCStart = datetime.strptime(c.date_start, OE_DATEFORMAT).date()
i = l - 1
while i >= 0:
dContractStart = datetime.strptime(
contracts[i].date_start, OE_DATEFORMAT).date()
if dContractStart < dCStart:
contracts = contracts[:i + 1] + [c] + contracts[i + 1:]
break
elif i == 0:
contracts = [c] + contracts
i -= 1
return contracts
def _get_days_in_month(self, d):
last_date = d - timedelta(days=(d.day - 1)) + relativedelta(
months= +1) + relativedelta(days= -1)
return last_date.day
def get_months_service_to_date(self, cr, uid, ids, dToday=None, context=None):
'''Returns a dictionary of floats. The key is the employee id, and the value is
number of months of employment.'''
res = dict.fromkeys(ids, 0)
if dToday == None:
dToday = date.today()
for ee in self.pool.get('hr.employee').browse(cr, uid, ids, context=context):
delta = relativedelta(dToday, dToday)<|fim▁hole|> contracts = self._get_contracts_list(ee)
if len(contracts) == 0:
res[ee.id] = (0.0, False)
continue
dInitial = datetime.strptime(
contracts[0].date_start, OE_DATEFORMAT).date()
if ee.initial_employment_date:
dFirstContract = dInitial
dInitial = datetime.strptime(
ee.initial_employment_date, '%Y-%m-%d').date()
if dFirstContract < dInitial:
raise osv.except_osv(_('Employment Date mismatch!'),
_('The initial employment date cannot be after the first contract in the system.\nEmployee: %s', ee.name))
delta = relativedelta(dFirstContract, dInitial)
for c in contracts:
dStart = datetime.strptime(c.date_start, '%Y-%m-%d').date()
if dStart >= dToday:
continue
# If the contract doesn't have an end date, use today's date
# If the contract has finished consider the entire duration of
# the contract, otherwise consider only the months in the
# contract until today.
#
if c.date_end:
dEnd = datetime.strptime(c.date_end, '%Y-%m-%d').date()
else:
dEnd = dToday
if dEnd > dToday:
dEnd = dToday
delta += relativedelta(dEnd, dStart)
# Set the number of months the employee has worked
date_part = float(delta.days) / float(
self._get_days_in_month(dInitial))
res[ee.id] = (
float((delta.years * 12) + delta.months) + date_part, dInitial)
return res
def _get_employed_months(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, 0.0)
_res = self.get_months_service_to_date(cr, uid, ids, context=context)
for k, v in _res.iteritems():
res[k] = v[0]
return res
def _search_amount(self, cr, uid, obj, name, args, context):
ids = set()
for cond in args:
amount = cond[2]
if isinstance(cond[2], (list, tuple)):
if cond[1] in ['in', 'not in']:
amount = tuple(cond[2])
else:
continue
else:
if cond[1] in ['=like', 'like', 'not like', 'ilike', 'not ilike', 'in', 'not in', 'child_of']:
continue
cr.execute("select id from hr_employee having %s %%s" %
(cond[1]), (amount,))
res_ids = set(id[0] for id in cr.fetchall())
ids = ids and (ids & res_ids) or res_ids
if ids:
return [('id', 'in', tuple(ids))]
return [('id', '=', '0')]
_columns = {
'initial_employment_date': fields.date('Initial Date of Employment', groups=False,
help='Date of first employment if it was before the start of the first contract in the system.'),
'length_of_service': fields.function(_get_employed_months, type='float', method=True,
groups=False,
string='Lenght of Service'),
}<|fim▁end|> | |
<|file_name|>StatsAccumulator.py<|end_file_name|><|fim▁begin|>#
# Copyright 2008-2015 Semantic Discovery, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import math
from threading import Lock
class StatsAccumulator:
'''
A low-memory helper class to collect statistical samples and provide
summary statistics.
'''
def __init__(self, label='', other=None):
self._modlock = Lock()
self.clear(label)
if not other == None:
if not label == '':
self._label = other._label
self._n = other._n
self._minimum = other._minimum
self._maximum = other._maximum
self._sum = other._sum
self._sos = other._sos
@property
def label(self):
return self._label
@label.setter
def label(self, val):
self._label = val
@property
def n(self):
return self._n
@property
def minimum(self):
return self._minimum
@property
def maximum(self):
return self._maximum
@property
def sum(self):
return self._sum
@property
def sumOfSquares(self):
return self._sos
@property
def mean(self):
return self.getMean()
@property
def standardDeviation(self):
return self.getStandardDeviation()<|fim▁hole|> @property
def variance(self):
return self.getVariance()
def clear(self, label=''):
self._modlock.acquire()
try:
self._label = label
self._n = 0
self._minimum = 0.0
self._maximum = 0.0
self._sum = 0.0
self._sos = 0.0
finally:
self._modlock.release()
def initialize(self, label='', n=0, minimum=0, maximum=0, mean=0, stddev=0, summaryInfo=None):
'''
Initialize with the given values, preferring existing values from the dictionary.
'''
if summaryInfo is not None:
if 'label' in summaryInfo:
label = summaryInfo['label']
if 'n' in summaryInfo:
n = summaryInfo['n']
if 'minimum' in summaryInfo:
minimum = summaryInfo['minimum']
if 'maximum' in summaryInfo:
maximum = summaryInfo['maximum']
if 'mean' in summaryInfo:
mean = summaryInfo['mean']
if 'stddev' in summaryInfo:
stddev = summaryInfo['stddev']
self._modlock.acquire()
try:
self._label = label
self._n = n
self._minimum = minimum
self._maximum = maximum
self._sum = mean * n
self._sos = 0 if n == 0 else stddev * stddev * (n - 1.0) + self._sum * self._sum / n
finally:
self._modlock.release()
def summaryInfo(self):
'''
Get a dictionary containing a summary of this instance's information.
'''
result = {
'label': self.label,
'n': self.n,
'minimum': self.minimum,
'maximum': self.maximum,
'mean': self.mean,
'stddev': self.standardDeviation
}
return result
def __str__(self):
return json.dumps(self.summaryInfo(), sort_keys=True)
def add(self, *values):
for value in values:
self._doAdd(value)
def _doAdd(self, value):
self._modlock.acquire()
try:
if self._n == 0:
self._minimum = value
self._maximum = value
else:
if value < self._minimum:
self._minimum = value
if value > self._maximum:
self._maximum = value
self._n += 1
self._sos += (value * value)
self._sum += value
finally:
self._modlock.release()
def getMean(self):
return 0 if self._n == 0 else self._sum / self._n
def getStandardDeviation(self):
return 0 if self._n < 2 else math.sqrt(self.variance)
def getVariance(self):
return 0 if self._n < 2 else (1.0 / (self._n - 1.0)) * (self._sos - (1.0 / self._n) * self._sum * self._sum)
@staticmethod
def combine(label, *statsAccumulators):
'''
Create a new statsAccumulator as if it had accumulated all data from
the given list of stats accumulators.
'''
result = StatsAccumulator(label)
for stats in statsAccumulators:
result.incorporate(stats)
return result
def incorporate(self, other):
'''
Incorporate the other statsAccumulator's data into this as if this had
accumulated the other's along with its own.
'''
if other is None:
return
self._modlock.acquire()
try:
if self._n == 0:
self._minimum = other._minimum
self._maximum = other._maximum
else:
if other._minimum < self._minimum:
self._minimum = other._minimum
if other._maximum > self._maximum:
self._maximum = other._maximum
self._n += other._n
self._sos += other._sos
self._sum += other._sum
finally:
self._modlock.release()<|fim▁end|> | |
<|file_name|>Installation.java<|end_file_name|><|fim▁begin|>package com.wabbit.libraries;
import android.content.Context;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.UUID;
public class Installation {
private static String sID = null;
private static final String INSTALLATION = "INSTALLATION";
public synchronized static String id(Context context) {
if (sID == null) {
File installation = new File(context.getFilesDir(), INSTALLATION);
try {
if (!installation.exists())
writeInstallationFile(installation);
sID = readInstallationFile(installation);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
return sID;
}<|fim▁hole|> f.readFully(bytes);
f.close();
return new String(bytes);
}
private static void writeInstallationFile(File installation) throws IOException {
FileOutputStream out = new FileOutputStream(installation);
String id = UUID.randomUUID().toString();
out.write(id.getBytes());
out.close();
}
}<|fim▁end|> |
private static String readInstallationFile(File installation) throws IOException {
RandomAccessFile f = new RandomAccessFile(installation, "r");
byte[] bytes = new byte[(int) f.length()]; |
<|file_name|>CastResolver.java<|end_file_name|><|fim▁begin|>/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.j2objc.translate;
import com.google.common.collect.Lists;
import com.google.devtools.j2objc.ast.CastExpression;
import com.google.devtools.j2objc.ast.Expression;
import com.google.devtools.j2objc.ast.ExpressionStatement;
import com.google.devtools.j2objc.ast.FieldAccess;
import com.google.devtools.j2objc.ast.FunctionInvocation;
import com.google.devtools.j2objc.ast.MethodDeclaration;
import com.google.devtools.j2objc.ast.MethodInvocation;
import com.google.devtools.j2objc.ast.ParenthesizedExpression;
import com.google.devtools.j2objc.ast.QualifiedName;
import com.google.devtools.j2objc.ast.ReturnStatement;
import com.google.devtools.j2objc.ast.SimpleName;
import com.google.devtools.j2objc.ast.SuperMethodInvocation;
import com.google.devtools.j2objc.ast.TreeUtil;
import com.google.devtools.j2objc.ast.TreeVisitor;
import com.google.devtools.j2objc.ast.VariableDeclarationFragment;
import com.google.devtools.j2objc.types.IOSMethodBinding;
import com.google.devtools.j2objc.types.Types;
import com.google.devtools.j2objc.util.BindingUtil;
import com.google.devtools.j2objc.util.NameTable;
import org.eclipse.jdt.core.dom.IMethodBinding;
import org.eclipse.jdt.core.dom.ITypeBinding;
import org.eclipse.jdt.core.dom.IVariableBinding;
import org.eclipse.jdt.core.dom.Modifier;
import java.util.Arrays;
import java.util.List;
/**
* Adds cast checks to existing java cast expressions.
* Adds casts as needed for Objective-C compilation. Usually this occurs when a
* method has a declared return type that is more generic than the resolved type
* of the expression.
*/
public class CastResolver extends TreeVisitor {
@Override
public void endVisit(CastExpression node) {
ITypeBinding type = node.getType().getTypeBinding();
Expression expr = node.getExpression();
ITypeBinding exprType = expr.getTypeBinding();
if (Types.isFloatingPointType(exprType)) {
if (Types.isLongType(type)) {
FunctionInvocation invocation = new FunctionInvocation("J2ObjCFpToLong", type, type, null);
invocation.getArguments().add(TreeUtil.remove(expr));
node.replaceWith(invocation);
return;
} else if (type.isEqualTo(Types.resolveJavaType("char"))) {
FunctionInvocation invocation =
new FunctionInvocation("J2ObjCFpToUnichar", type, type, null);
invocation.getArguments().add(TreeUtil.remove(expr));
node.replaceWith(invocation);
return;
} else if (Types.isIntegralType(type)) {
ITypeBinding intType = Types.resolveJavaType("int");
FunctionInvocation invocation =
new FunctionInvocation("J2ObjCFpToInt", intType, intType, null);
invocation.getArguments().add(TreeUtil.remove(expr));
Expression newExpr = invocation;
if (!type.isEqualTo(intType)) {
newExpr = new CastExpression(type, newExpr);
}
node.replaceWith(newExpr);
return;
}
// else fall-through.
}
// Lean on Java's type-checking.
if (!type.isPrimitive() && exprType.isAssignmentCompatible(type)) {
node.replaceWith(TreeUtil.remove(expr));
return;
}
FunctionInvocation castCheck = createCastCheck(type, expr);
if (castCheck != null) {
node.setExpression(castCheck);
}
}
private static FunctionInvocation createCastCheck(ITypeBinding type, Expression expr) {
// Find the first bound for a type variable.
while (type.isTypeVariable()) {
ITypeBinding[] bounds = type.getTypeBounds();
if (bounds.length == 0) {
break;
}
type = bounds[0];
}
ITypeBinding idType = Types.resolveIOSType("id");
FunctionInvocation invocation = null;
if (type.isInterface() && !type.isAnnotation()) {
invocation = new FunctionInvocation("check_protocol_cast", idType, idType, null);
invocation.getArguments().add(TreeUtil.remove(expr));
FunctionInvocation protocolLiteral =
new FunctionInvocation("@protocol", idType, idType, null);
protocolLiteral.getArguments().add(new SimpleName(type));
invocation.getArguments().add(protocolLiteral);
} else if (type.isClass() || type.isArray() || type.isAnnotation() || type.isEnum()) {
invocation = new FunctionInvocation("check_class_cast", idType, idType, null);
invocation.getArguments().add(TreeUtil.remove(expr));
IOSMethodBinding binding = IOSMethodBinding.newMethod("class", Modifier.STATIC, idType, type);
MethodInvocation classInvocation = new MethodInvocation(binding, new SimpleName(type));
invocation.getArguments().add(classInvocation);
}
return invocation;
}
private void addCast(Expression expr) {
ITypeBinding exprType = Types.mapType(expr.getTypeBinding().getTypeDeclaration());
CastExpression castExpr = new CastExpression(exprType, null);
expr.replaceWith(ParenthesizedExpression.parenthesize(castExpr));
castExpr.setExpression(expr);
}
private void maybeAddCast(Expression expr, boolean shouldCastFromId) {
if (needsCast(expr, shouldCastFromId)) {
addCast(expr);
}
}
private boolean needsCast(Expression expr, boolean shouldCastFromId) {
ITypeBinding declaredType = getDeclaredType(expr);
if (declaredType == null) {
return false;
}
ITypeBinding exprType = Types.mapType(expr.getTypeBinding().getTypeDeclaration());
declaredType = Types.mapType(declaredType.getTypeDeclaration());
if (declaredType.isAssignmentCompatible(exprType)) {
return false;
}
if (declaredType == Types.resolveIOSType("id") && !shouldCastFromId) {
return false;
}
if (exprType.isPrimitive() || Types.isVoidType(exprType)) {
return false;
}
String typeName = NameTable.getSpecificObjCType(exprType);
if (typeName.equals(NameTable.ID_TYPE)) {
return false;
}
return true;
}
private ITypeBinding getDeclaredType(Expression expr) {
IVariableBinding var = TreeUtil.getVariableBinding(expr);
if (var != null) {
return var.getVariableDeclaration().getType();
}
switch (expr.getKind()) {
case CLASS_INSTANCE_CREATION:
return Types.resolveIOSType("id");
case FUNCTION_INVOCATION:
return ((FunctionInvocation) expr).getDeclaredReturnType();
case METHOD_INVOCATION:
{
MethodInvocation invocation = (MethodInvocation) expr;
IMethodBinding method = invocation.getMethodBinding();
// Object receiving the message, or null if it's a method in this class.
Expression receiver = invocation.getExpression();
ITypeBinding receiverType = receiver != null ? receiver.getTypeBinding()
: method.getDeclaringClass();
return getDeclaredReturnType(method, receiverType);
}
case SUPER_METHOD_INVOCATION:
{
SuperMethodInvocation invocation = (SuperMethodInvocation) expr;
IMethodBinding method = invocation.getMethodBinding();
if (invocation.getQualifier() != null) {
// For a qualified super invocation, the statement generator will look
// up the IMP using instanceMethodForSelector.
if (!method.getReturnType().isPrimitive()) {
return Types.resolveIOSType("id");
} else {
return null;
}
}
return getDeclaredReturnType(
method, TreeUtil.getOwningType(invocation).getTypeBinding().getSuperclass());
}
default:
return null;
}
}
private static ITypeBinding getDeclaredReturnType(
IMethodBinding method, ITypeBinding receiverType) {
IMethodBinding actualDeclaration =
getFirstDeclaration(getObjCMethodSignature(method), receiverType);
if (actualDeclaration == null) {
actualDeclaration = method.getMethodDeclaration();
}
ITypeBinding returnType = actualDeclaration.getReturnType();
if (returnType.isTypeVariable()) {
return Types.resolveIOSType("id");
}
return returnType.getErasure();
}
/**
* Finds the declaration for a given method and receiver in the same way that
* the ObjC compiler will search for a declaration.
*/
private static IMethodBinding getFirstDeclaration(String methodSig, ITypeBinding type) {
if (type == null) {
return null;<|fim▁hole|> }
type = type.getTypeDeclaration();
for (IMethodBinding declaredMethod : type.getDeclaredMethods()) {
if (methodSig.equals(getObjCMethodSignature(declaredMethod))) {
return declaredMethod;
}
}
List<ITypeBinding> supertypes = Lists.newArrayList();
supertypes.addAll(Arrays.asList(type.getInterfaces()));
supertypes.add(type.isTypeVariable() ? 0 : supertypes.size(), type.getSuperclass());
for (ITypeBinding supertype : supertypes) {
IMethodBinding result = getFirstDeclaration(methodSig, supertype);
if (result != null) {
return result;
}
}
return null;
}
private static String getObjCMethodSignature(IMethodBinding method) {
StringBuilder sb = new StringBuilder(method.getName());
boolean first = true;
for (ITypeBinding paramType : method.getParameterTypes()) {
String keyword = NameTable.parameterKeyword(paramType);
if (first) {
first = false;
keyword = NameTable.capitalize(keyword);
}
sb.append(keyword + ":");
}
return sb.toString();
}
// Some native objective-c methods are declared to return NSUInteger.
private boolean returnValueNeedsIntCast(Expression arg) {
IMethodBinding methodBinding = TreeUtil.getMethodBinding(arg);
assert methodBinding != null;
if (arg.getParent() instanceof ExpressionStatement) {
// Avoid "unused return value" warning.
return false;
}
String methodName = NameTable.getMethodSelector(methodBinding);
if (methodName.equals("hash")
&& methodBinding.getReturnType().isEqualTo(Types.resolveJavaType("int"))) {
return true;
}
if (Types.isStringType(methodBinding.getDeclaringClass()) && methodName.equals("length")) {
return true;
}
return false;
}
@Override
public void endVisit(FieldAccess node) {
maybeAddCast(node.getExpression(), true);
}
@Override
public void endVisit(MethodInvocation node) {
Expression receiver = node.getExpression();
if (receiver != null && !BindingUtil.isStatic(node.getMethodBinding())) {
maybeAddCast(receiver, true);
}
if (returnValueNeedsIntCast(node)) {
addCast(node);
}
}
@Override
public void endVisit(QualifiedName node) {
if (needsCast(node.getQualifier(), true)) {
maybeAddCast(TreeUtil.convertToFieldAccess(node).getExpression(), true);
}
}
@Override
public void endVisit(ReturnStatement node) {
Expression expr = node.getExpression();
if (expr != null) {
maybeAddCast(expr, false);
}
}
@Override
public void endVisit(SuperMethodInvocation node) {
if (returnValueNeedsIntCast(node)) {
addCast(node);
}
}
@Override
public void endVisit(VariableDeclarationFragment node) {
Expression initializer = node.getInitializer();
if (initializer != null) {
maybeAddCast(initializer, false);
}
}
/**
* Adds a cast check to compareTo methods. This helps Comparable types behave
* well in sorted collections which rely on Java's runtime type checking.
*/
@Override
public void endVisit(MethodDeclaration node) {
IMethodBinding binding = node.getMethodBinding();
if (!binding.getName().equals("compareTo") || node.getBody() == null) {
return;
}
ITypeBinding comparableType =
BindingUtil.findInterface(binding.getDeclaringClass(), "java.lang.Comparable");
if (comparableType == null) {
return;
}
ITypeBinding[] typeArguments = comparableType.getTypeArguments();
ITypeBinding[] parameterTypes = binding.getParameterTypes();
if (typeArguments.length != 1 || parameterTypes.length != 1
|| !typeArguments[0].isEqualTo(parameterTypes[0])) {
return;
}
IVariableBinding param = node.getParameters().get(0).getVariableBinding();
FunctionInvocation castCheck = createCastCheck(typeArguments[0], new SimpleName(param));
if (castCheck != null) {
node.getBody().getStatements().add(0, new ExpressionStatement(castCheck));
}
}
}<|fim▁end|> | |
<|file_name|>TestTemplates.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""Test template support in VTK-Python
VTK-python decides which template specializations
to wrap according to which ones are used in typedefs
and which ones appear as superclasses of other classes.
In addition, the wrappers are hard-coded to wrap the
vtkDenseArray and vtkSparseArray classes over a broad
range of types.
Created on May 29, 2011 by David Gobbi
"""
import sys
import exceptions
import vtk
from vtk.test import Testing
arrayTypes = ['char', 'int8', 'uint8', 'int16', 'uint16',
'int32', 'uint32', int, 'uint', 'int64', 'uint64',
'float32', float, str, 'unicode', vtk.vtkVariant]
arrayCodes = ['c', 'b', 'B', 'h', 'H',
'i', 'I', 'l', 'L', 'q', 'Q',
'f', 'd']
class TestTemplates(Testing.vtkTest):
def testDenseArray(self):
"""Test vtkDenseArray template"""
for t in (arrayTypes + arrayCodes):
a = vtk.vtkDenseArray[t]()
a.Resize(1)
i = vtk.vtkArrayCoordinates(0)
if t in ['bool', '?']:
value = 1
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['float32', 'float64', 'float', 'f', 'd']:
value = 3.125
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['char', 'c']:
value = 'c'
a.SetValue(i, value)
<|fim▁hole|> result = a.GetValue(i)
self.assertEqual(value, result)
elif t in [str, 'str', 'unicode']:
value = unicode("hello")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['vtkVariant', vtk.vtkVariant]:
value = vtk.vtkVariant("world")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
else:
value = 12
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
def testSparseArray(self):
"""Test vtkSparseArray template"""
for t in (arrayTypes + arrayCodes):
a = vtk.vtkSparseArray[t]()
a.Resize(1)
i = vtk.vtkArrayCoordinates(0)
if t in ['bool', '?']:
value = 0
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['float32', 'float64', 'float', 'f', 'd']:
value = 3.125
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['char', 'c']:
value = 'c'
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in [str, 'str', 'unicode']:
value = unicode("hello")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['vtkVariant', vtk.vtkVariant]:
value = vtk.vtkVariant("world")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
else:
value = 12
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
def testArray(self):
"""Test array CreateArray"""
o = vtk.vtkArray.CreateArray(vtk.vtkArray.DENSE, vtk.VTK_DOUBLE)
self.assertEqual(o.__class__, vtk.vtkDenseArray[float])
def testVector(self):
"""Test vector templates"""
# make sure Rect inherits operators
r = vtk.vtkRectf(0, 0, 2, 2)
self.assertEqual(r[2], 2.0)
c = vtk.vtkColor4ub(0, 0, 0)
self.assertEqual(list(c), [0, 0, 0, 255])
e = vtk.vtkVector['float32', 3]([0.0, 1.0, 2.0])
self.assertEqual(list(e), [0.0, 1.0, 2.0])
i = vtk.vtkVector3['i'](0)
self.assertEqual(list(i), [0, 0, 0])
if __name__ == "__main__":
Testing.main([(TestTemplates, 'test')])<|fim▁end|> | |
<|file_name|>declaration_block.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A property declaration block.
#![deny(missing_docs)]
use context::QuirksMode;
use cssparser::{DeclarationListParser, parse_important, ParserInput, CompactCowStr};
use cssparser::{Parser, AtRuleParser, DeclarationParser, Delimiter};
use error_reporting::{ParseErrorReporter, ContextualParseError};
use parser::{ParserContext, log_css_error};
use properties::animated_properties::AnimationValue;
use selectors::parser::SelectorParseError;
use shared_lock::Locked;
use smallvec::SmallVec;
use std::fmt;
use std::slice::Iter;
use style_traits::{PARSING_MODE_DEFAULT, ToCss, ParseError, ParsingMode, StyleParseError};
use stylesheets::{CssRuleType, Origin, UrlExtraData};
use stylesheets::{MallocSizeOf, MallocSizeOfFn};
use super::*;
use values::computed::Context;
#[cfg(feature = "gecko")] use properties::animated_properties::AnimationValueMap;
/// The animation rules.
///
/// The first one is for Animation cascade level, and the second one is for
/// Transition cascade level.
pub struct AnimationRules<'a>(pub Option<&'a Arc<Locked<PropertyDeclarationBlock>>>,
pub Option<&'a Arc<Locked<PropertyDeclarationBlock>>>);
impl<'a> AnimationRules<'a> {
/// Returns whether these animation rules represents an actual rule or not.
pub fn is_empty(&self) -> bool {
self.0.is_none() && self.1.is_none()
}
}
/// A declaration [importance][importance].
///
/// [importance]: https://drafts.csswg.org/css-cascade/#importance
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum Importance {
/// Indicates a declaration without `!important`.
Normal,
/// Indicates a declaration with `!important`.
Important,
}
impl MallocSizeOf for Importance {
fn malloc_size_of_children(&self, _malloc_size_of: MallocSizeOfFn) -> usize {
0
}
}
impl Importance {
/// Return whether this is an important declaration.
pub fn important(self) -> bool {
match self {
Importance::Normal => false,
Importance::Important => true,
}
}
}
/// Overridden declarations are skipped.
#[derive(Clone)]
pub struct PropertyDeclarationBlock {
/// The group of declarations, along with their importance.
///
/// Only deduplicated declarations appear here.
declarations: Vec<(PropertyDeclaration, Importance)>,
/// The number of entries in `self.declaration` with `Importance::Important`
important_count: usize,
longhands: LonghandIdSet,
}
impl MallocSizeOf for PropertyDeclarationBlock {
fn malloc_size_of_children(&self, malloc_size_of: MallocSizeOfFn) -> usize {
self.declarations.malloc_size_of_children(malloc_size_of)
}
}
/// Iterator for PropertyDeclaration to be generated from PropertyDeclarationBlock.
#[derive(Clone)]
pub struct PropertyDeclarationIterator<'a> {
iter: Iter<'a, (PropertyDeclaration, Importance)>,
}
impl<'a> Iterator for PropertyDeclarationIterator<'a> {
type Item = &'a PropertyDeclaration;
#[inline]
fn next(&mut self) -> Option<&'a PropertyDeclaration> {
// we use this function because a closure won't be `Clone`
fn get_declaration(dec: &(PropertyDeclaration, Importance))
-> &PropertyDeclaration {
&dec.0
}
self.iter.next().map(get_declaration as fn(_) -> _)
}
}
/// Iterator for AnimationValue to be generated from PropertyDeclarationBlock.
pub struct AnimationValueIterator<'a, 'cx, 'cx_a:'cx> {
iter: Iter<'a, (PropertyDeclaration, Importance)>,
context: &'cx mut Context<'cx_a>,
default_values: &'a Arc<ComputedValues>,
}
impl<'a, 'cx, 'cx_a:'cx> AnimationValueIterator<'a, 'cx, 'cx_a> {
fn new(declarations: &'a PropertyDeclarationBlock,
context: &'cx mut Context<'cx_a>,
default_values: &'a Arc<ComputedValues>) -> AnimationValueIterator<'a, 'cx, 'cx_a> {
AnimationValueIterator {
iter: declarations.declarations().iter(),
context: context,
default_values: default_values,
}
}
}
impl<'a, 'cx, 'cx_a:'cx> Iterator for AnimationValueIterator<'a, 'cx, 'cx_a> {
type Item = (AnimatableLonghand, AnimationValue);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
use properties::Importance;
loop {
let next = self.iter.next();
match next {
Some(&(ref decl, importance)) => {
if importance == Importance::Normal {
let property = AnimatableLonghand::from_declaration(decl);
let animation = AnimationValue::from_declaration(decl, &mut self.context,
self.default_values);
debug_assert!(property.is_none() == animation.is_none(),
"The failure condition of AnimatableLonghand::from_declaration \
and AnimationValue::from_declaration should be the same");
// Skip the property if either ::from_declaration fails.
match (property, animation) {
(Some(p), Some(a)) => return Some((p, a)),
(_, _) => {},
}
}
},
None => return None,
}
}
}
}
impl fmt::Debug for PropertyDeclarationBlock {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.declarations.fmt(f)
}
}
impl PropertyDeclarationBlock {
/// Returns the number of declarations in the block.
pub fn len(&self) -> usize {
self.declarations.len()
}
/// Create an empty block
pub fn new() -> Self {
PropertyDeclarationBlock {
declarations: Vec::new(),
important_count: 0,
longhands: LonghandIdSet::new(),
}
}
/// Create a block with a single declaration
pub fn with_one(declaration: PropertyDeclaration, importance: Importance) -> Self {
let mut longhands = LonghandIdSet::new();
if let PropertyDeclarationId::Longhand(id) = declaration.id() {
longhands.insert(id);
}
PropertyDeclarationBlock {
declarations: vec![(declaration, importance)],
important_count: if importance.important() { 1 } else { 0 },
longhands: longhands,
}
}
/// The declarations in this block
pub fn declarations(&self) -> &[(PropertyDeclaration, Importance)] {
&self.declarations
}
/// Iterate over only PropertyDeclaration.
pub fn declarations_iter(&self) -> PropertyDeclarationIterator {
PropertyDeclarationIterator {
iter: self.declarations.iter(),
}
}
/// Return an iterator of (AnimatableLonghand, AnimationValue).
pub fn to_animation_value_iter<'a, 'cx, 'cx_a:'cx>(&'a self,
context: &'cx mut Context<'cx_a>,
default_values: &'a Arc<ComputedValues>)
-> AnimationValueIterator<'a, 'cx, 'cx_a> {
AnimationValueIterator::new(self, context, default_values)
}
/// Returns whether this block contains any declaration with `!important`.
///
/// This is based on the `important_count` counter,
/// which should be maintained whenever `declarations` is changed.
// FIXME: make fields private and maintain it here in methods?
pub fn any_important(&self) -> bool {
self.important_count > 0
}
/// Returns whether this block contains any declaration without `!important`.
///
/// This is based on the `important_count` counter,
/// which should be maintained whenever `declarations` is changed.
// FIXME: make fields private and maintain it here in methods?
pub fn any_normal(&self) -> bool {
self.declarations.len() > self.important_count
}
/// Get a declaration for a given property.
///
/// NOTE: This is linear time.
pub fn get(&self, property: PropertyDeclarationId) -> Option< &(PropertyDeclaration, Importance)> {
self.declarations.iter().find(|&&(ref decl, _)| decl.id() == property)
}
/// Find the value of the given property in this block and serialize it
///
/// https://dev.w3.org/csswg/cssom/#dom-cssstyledeclaration-getpropertyvalue
pub fn property_value_to_css<W>(&self, property: &PropertyId, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
// Step 1.1: done when parsing a string to PropertyId
// Step 1.2
match property.as_shorthand() {
Ok(shorthand) => {
// Step 1.2.1
let mut list = Vec::new();
let mut important_count = 0;
// Step 1.2.2
for &longhand in shorthand.longhands() {
// Step 1.2.2.1
let declaration = self.get(PropertyDeclarationId::Longhand(longhand));
// Step 1.2.2.2 & 1.2.2.3
match declaration {
Some(&(ref declaration, importance)) => {
list.push(declaration);
if importance.important() {
important_count += 1;
}
},
None => return Ok(()),
}
}
// If there is one or more longhand with important, and one or more
// without important, we don't serialize it as a shorthand.
if important_count > 0 && important_count != list.len() {
return Ok(());
}
// Step 1.2.3
// We don't print !important when serializing individual properties,
// so we treat this as a normal-importance property
match shorthand.get_shorthand_appendable_value(list) {
Some(appendable_value) =>
append_declaration_value(dest, appendable_value),
None => return Ok(()),
}
}
Err(longhand_or_custom) => {
if let Some(&(ref value, _importance)) = self.get(longhand_or_custom) {
// Step 2
value.to_css(dest)
} else {
// Step 3
Ok(())
}
}
}
}
/// https://dev.w3.org/csswg/cssom/#dom-cssstyledeclaration-getpropertypriority
pub fn property_priority(&self, property: &PropertyId) -> Importance {
// Step 1: done when parsing a string to PropertyId
// Step 2
match property.as_shorthand() {
Ok(shorthand) => {
// Step 2.1 & 2.2 & 2.3
if shorthand.longhands().iter().all(|&l| {
self.get(PropertyDeclarationId::Longhand(l))
.map_or(false, |&(_, importance)| importance.important())
}) {
Importance::Important
} else {
Importance::Normal
}
}
Err(longhand_or_custom) => {
// Step 3
self.get(longhand_or_custom).map_or(Importance::Normal, |&(_, importance)| importance)
}
}
}
/// Adds or overrides the declaration for a given property in this block,
/// **except** if an existing declaration for the same property is more
/// important.
///
/// Always ensures that the property declaration is at the end.
pub fn extend(&mut self, drain: SourcePropertyDeclarationDrain, importance: Importance) {
self.extend_common(drain, importance, false);
}
/// Adds or overrides the declaration for a given property in this block,
/// **even** if an existing declaration for the same property is more
/// important, and reuses the same position in the block.
///
/// Returns whether anything changed.
pub fn extend_reset(&mut self, drain: SourcePropertyDeclarationDrain,
importance: Importance) -> bool {
self.extend_common(drain, importance, true)
}
fn extend_common(
&mut self,
mut drain: SourcePropertyDeclarationDrain,
importance: Importance,
overwrite_more_important_and_reuse_slot: bool,
) -> bool {
let all_shorthand_len = match drain.all_shorthand {
AllShorthand::NotSet => 0,
AllShorthand::CSSWideKeyword(_) |
AllShorthand::WithVariables(_) => ShorthandId::All.longhands().len()
};
let push_calls_count = drain.declarations.len() + all_shorthand_len;
// With deduplication the actual length increase may be less than this.
self.declarations.reserve(push_calls_count);
let mut changed = false;
for decl in &mut drain.declarations {
changed |= self.push_common(
decl,
importance,
overwrite_more_important_and_reuse_slot,
);
}
match drain.all_shorthand {
AllShorthand::NotSet => {}
AllShorthand::CSSWideKeyword(keyword) => {
for &id in ShorthandId::All.longhands() {
let decl = PropertyDeclaration::CSSWideKeyword(id, keyword);
changed |= self.push_common(
decl,
importance,
overwrite_more_important_and_reuse_slot,
);
}
}
AllShorthand::WithVariables(unparsed) => {
for &id in ShorthandId::All.longhands() {
let decl = PropertyDeclaration::WithVariables(id, unparsed.clone());
changed |= self.push_common(
decl,
importance,
overwrite_more_important_and_reuse_slot,
);
}
}
}
changed
}
/// Adds or overrides the declaration for a given property in this block,
/// **except** if an existing declaration for the same property is more
/// important.
///
/// Ensures that, if inserted, it's inserted at the end of the declaration
/// block.
pub fn push(&mut self, declaration: PropertyDeclaration, importance: Importance) {
self.push_common(declaration, importance, false);
}
fn push_common(
&mut self,
declaration: PropertyDeclaration,
importance: Importance,
overwrite_more_important_and_reuse_slot: bool
) -> bool {
let definitely_new = if let PropertyDeclarationId::Longhand(id) = declaration.id() {
!self.longhands.contains(id)
} else {
false // For custom properties, always scan
};
<|fim▁hole|> if slot.0.id() == declaration.id() {
match (slot.1, importance) {
(Importance::Normal, Importance::Important) => {
self.important_count += 1;
}
(Importance::Important, Importance::Normal) => {
if overwrite_more_important_and_reuse_slot {
self.important_count -= 1;
} else {
return false
}
}
_ => if slot.0 == declaration {
return false;
}
}
if overwrite_more_important_and_reuse_slot {
*slot = (declaration, importance);
return true;
}
// NOTE(emilio): We could avoid this and just override for
// properties not affected by logical props, but it's not
// clear it's worth it given the `definitely_new` check.
index_to_remove = Some(i);
break;
}
}
if let Some(index) = index_to_remove {
self.declarations.remove(index);
self.declarations.push((declaration, importance));
return true;
}
}
if let PropertyDeclarationId::Longhand(id) = declaration.id() {
self.longhands.insert(id);
}
self.declarations.push((declaration, importance));
if importance.important() {
self.important_count += 1;
}
true
}
/// Set the declaration importance for a given property, if found.
///
/// Returns whether any declaration was updated.
pub fn set_importance(&mut self, property: &PropertyId, new_importance: Importance) -> bool {
let mut updated_at_least_one = false;
for &mut (ref declaration, ref mut importance) in &mut self.declarations {
if declaration.id().is_or_is_longhand_of(property) {
match (*importance, new_importance) {
(Importance::Normal, Importance::Important) => {
self.important_count += 1;
}
(Importance::Important, Importance::Normal) => {
self.important_count -= 1;
}
_ => {
continue;
}
}
updated_at_least_one = true;
*importance = new_importance;
}
}
updated_at_least_one
}
/// https://dev.w3.org/csswg/cssom/#dom-cssstyledeclaration-removeproperty
///
/// Returns whether any declaration was actually removed.
pub fn remove_property(&mut self, property: &PropertyId) -> bool {
if let PropertyId::Longhand(id) = *property {
if !self.longhands.contains(id) {
return false
}
}
let important_count = &mut self.important_count;
let mut removed_at_least_one = false;
let longhands = &mut self.longhands;
self.declarations.retain(|&(ref declaration, importance)| {
let id = declaration.id();
let remove = id.is_or_is_longhand_of(property);
if remove {
removed_at_least_one = true;
if let PropertyDeclarationId::Longhand(id) = id {
longhands.remove(id)
}
if importance.important() {
*important_count -= 1
}
}
!remove
});
if let PropertyId::Longhand(_) = *property {
debug_assert!(removed_at_least_one);
}
removed_at_least_one
}
/// Take a declaration block known to contain a single property and serialize it.
pub fn single_value_to_css<W>(&self, property: &PropertyId, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
match property.as_shorthand() {
Err(_longhand_or_custom) => {
if self.declarations.len() == 1 {
self.declarations[0].0.to_css(dest)
} else {
Err(fmt::Error)
}
}
Ok(shorthand) => {
if !self.declarations.iter().all(|decl| decl.0.shorthands().contains(&shorthand)) {
return Err(fmt::Error)
}
let iter = self.declarations_iter();
match shorthand.get_shorthand_appendable_value(iter) {
Some(AppendableValue::Css { css, .. }) => {
dest.write_str(css)
},
Some(AppendableValue::DeclarationsForShorthand(_, decls)) => {
shorthand.longhands_to_css(decls, dest)
}
_ => Ok(())
}
}
}
}
/// Convert AnimationValueMap to PropertyDeclarationBlock.
#[cfg(feature = "gecko")]
pub fn from_animation_value_map(animation_value_map: &AnimationValueMap) -> Self {
let mut declarations = vec![];
let mut longhands = LonghandIdSet::new();
for (property, animation_value) in animation_value_map.iter() {
longhands.set_animatable_longhand_bit(property);
declarations.push((animation_value.uncompute(), Importance::Normal));
}
PropertyDeclarationBlock {
declarations: declarations,
important_count: 0,
longhands: longhands,
}
}
/// Returns true if the declaration block has a CSSWideKeyword for the given
/// property.
#[cfg(feature = "gecko")]
pub fn has_css_wide_keyword(&self, property: &PropertyId) -> bool {
if let PropertyId::Longhand(id) = *property {
if !self.longhands.contains(id) {
return false
}
}
self.declarations.iter().any(|&(ref decl, _)|
decl.id().is_or_is_longhand_of(property) &&
decl.get_css_wide_keyword().is_some()
)
}
}
impl ToCss for PropertyDeclarationBlock {
// https://drafts.csswg.org/cssom/#serialize-a-css-declaration-block
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
let mut is_first_serialization = true; // trailing serializations should have a prepended space
// Step 1 -> dest = result list
// Step 2
let mut already_serialized = PropertyDeclarationIdSet::new();
// Step 3
for &(ref declaration, importance) in &*self.declarations {
// Step 3.1
let property = declaration.id();
// Step 3.2
if already_serialized.contains(property) {
continue;
}
// Step 3.3
let shorthands = declaration.shorthands();
if !shorthands.is_empty() {
// Step 3.3.1 is done by checking already_serialized while
// iterating below.
// Step 3.3.2
for &shorthand in shorthands {
let properties = shorthand.longhands();
// Substep 2 & 3
let mut current_longhands = SmallVec::<[_; 10]>::new();
let mut important_count = 0;
let mut found_system = None;
let is_system_font =
shorthand == ShorthandId::Font &&
self.declarations.iter().any(|&(ref l, _)| {
!already_serialized.contains(l.id()) &&
l.get_system().is_some()
});
if is_system_font {
for &(ref longhand, longhand_importance) in &self.declarations {
if already_serialized.contains(longhand.id()) {
continue;
}
if longhand.get_system().is_some() || longhand.is_default_line_height() {
current_longhands.push(longhand);
if found_system.is_none() {
found_system = longhand.get_system();
}
if longhand_importance.important() {
important_count += 1;
}
}
}
} else {
for &(ref longhand, longhand_importance) in &self.declarations {
if already_serialized.contains(longhand.id()) {
continue;
}
if longhand.id().is_longhand_of(shorthand) {
current_longhands.push(longhand);
if longhand_importance.important() {
important_count += 1;
}
}
}
// Substep 1:
//
// Assuming that the PropertyDeclarationBlock contains no
// duplicate entries, if the current_longhands length is
// equal to the properties length, it means that the
// properties that map to shorthand are present in longhands
if current_longhands.len() != properties.len() {
continue;
}
}
// Substep 4
let is_important = important_count > 0;
if is_important && important_count != current_longhands.len() {
continue;
}
let importance = if is_important {
Importance::Important
} else {
Importance::Normal
};
// Substep 5 - Let value be the result of invoking serialize
// a CSS value of current longhands.
let appendable_value =
match shorthand.get_shorthand_appendable_value(current_longhands.iter().cloned()) {
None => continue,
Some(appendable_value) => appendable_value,
};
// We avoid re-serializing if we're already an
// AppendableValue::Css.
let mut v = String::new();
let value = match (appendable_value, found_system) {
(AppendableValue::Css { css, with_variables }, _) => {
debug_assert!(!css.is_empty());
AppendableValue::Css {
css: css,
with_variables: with_variables,
}
}
#[cfg(feature = "gecko")]
(_, Some(sys)) => {
sys.to_css(&mut v)?;
AppendableValue::Css {
css: &v,
with_variables: false,
}
}
(other, _) => {
append_declaration_value(&mut v, other)?;
// Substep 6
if v.is_empty() {
continue;
}
AppendableValue::Css {
css: &v,
with_variables: false,
}
}
};
// Substeps 7 and 8
// We need to check the shorthand whether it's an alias property or not.
// If it's an alias property, it should be serialized like its longhand.
if shorthand.flags().contains(SHORTHAND_ALIAS_PROPERTY) {
append_serialization::<_, Cloned<slice::Iter< _>>, _>(
dest,
&property,
value,
importance,
&mut is_first_serialization)?;
} else {
append_serialization::<_, Cloned<slice::Iter< _>>, _>(
dest,
&shorthand,
value,
importance,
&mut is_first_serialization)?;
}
for current_longhand in ¤t_longhands {
// Substep 9
already_serialized.insert(current_longhand.id());
}
}
}
// Step 3.3.4
if already_serialized.contains(property) {
continue;
}
use std::iter::Cloned;
use std::slice;
// Steps 3.3.5, 3.3.6 & 3.3.7
// Need to specify an iterator type here even though it’s unused to work around
// "error: unable to infer enough type information about `_`;
// type annotations or generic parameter binding required [E0282]"
// Use the same type as earlier call to reuse generated code.
append_serialization::<_, Cloned<slice::Iter<_>>, _>(
dest,
&property,
AppendableValue::Declaration(declaration),
importance,
&mut is_first_serialization)?;
// Step 3.3.8
already_serialized.insert(property);
}
// Step 4
Ok(())
}
}
/// A convenient enum to represent different kinds of stuff that can represent a
/// _value_ in the serialization of a property declaration.
pub enum AppendableValue<'a, I>
where I: Iterator<Item=&'a PropertyDeclaration>,
{
/// A given declaration, of which we'll serialize just the value.
Declaration(&'a PropertyDeclaration),
/// A set of declarations for a given shorthand.
///
/// FIXME: This needs more docs, where are the shorthands expanded? We print
/// the property name before-hand, don't we?
DeclarationsForShorthand(ShorthandId, I),
/// A raw CSS string, coming for example from a property with CSS variables,
/// or when storing a serialized shorthand value before appending directly.
Css {
/// The raw CSS string.
css: &'a str,
/// Whether the original serialization contained variables or not.
with_variables: bool,
}
}
/// Potentially appends whitespace after the first (property: value;) pair.
fn handle_first_serialization<W>(dest: &mut W,
is_first_serialization: &mut bool)
-> fmt::Result
where W: fmt::Write,
{
if !*is_first_serialization {
dest.write_str(" ")
} else {
*is_first_serialization = false;
Ok(())
}
}
/// Append a given kind of appendable value to a serialization.
pub fn append_declaration_value<'a, W, I>(dest: &mut W,
appendable_value: AppendableValue<'a, I>)
-> fmt::Result
where W: fmt::Write,
I: Iterator<Item=&'a PropertyDeclaration>,
{
match appendable_value {
AppendableValue::Css { css, .. } => {
dest.write_str(css)
},
AppendableValue::Declaration(decl) => {
decl.to_css(dest)
},
AppendableValue::DeclarationsForShorthand(shorthand, decls) => {
shorthand.longhands_to_css(decls, dest)
}
}
}
/// Append a given property and value pair to a serialization.
pub fn append_serialization<'a, W, I, N>(dest: &mut W,
property_name: &N,
appendable_value: AppendableValue<'a, I>,
importance: Importance,
is_first_serialization: &mut bool)
-> fmt::Result
where W: fmt::Write,
I: Iterator<Item=&'a PropertyDeclaration>,
N: ToCss,
{
handle_first_serialization(dest, is_first_serialization)?;
property_name.to_css(dest)?;
dest.write_char(':')?;
// for normal parsed values, add a space between key: and value
match appendable_value {
AppendableValue::Declaration(decl) => {
if !decl.value_is_unparsed() {
// For normal parsed values, add a space between key: and value.
dest.write_str(" ")?
}
},
AppendableValue::Css { with_variables, .. } => {
if !with_variables {
dest.write_str(" ")?
}
}
// Currently append_serialization is only called with a Css or
// a Declaration AppendableValue.
AppendableValue::DeclarationsForShorthand(..) => unreachable!(),
}
append_declaration_value(dest, appendable_value)?;
if importance.important() {
dest.write_str(" !important")?;
}
dest.write_char(';')
}
/// A helper to parse the style attribute of an element, in order for this to be
/// shared between Servo and Gecko.
pub fn parse_style_attribute(input: &str,
url_data: &UrlExtraData,
error_reporter: &ParseErrorReporter,
quirks_mode: QuirksMode)
-> PropertyDeclarationBlock {
let context = ParserContext::new(Origin::Author,
url_data,
error_reporter,
Some(CssRuleType::Style),
PARSING_MODE_DEFAULT,
quirks_mode);
let mut input = ParserInput::new(input);
parse_property_declaration_list(&context, &mut Parser::new(&mut input))
}
/// Parse a given property declaration. Can result in multiple
/// `PropertyDeclaration`s when expanding a shorthand, for example.
///
/// This does not attempt to parse !important at all.
pub fn parse_one_declaration_into(declarations: &mut SourcePropertyDeclaration,
id: PropertyId,
input: &str,
url_data: &UrlExtraData,
error_reporter: &ParseErrorReporter,
parsing_mode: ParsingMode,
quirks_mode: QuirksMode)
-> Result<(), ()> {
let context = ParserContext::new(Origin::Author,
url_data,
error_reporter,
Some(CssRuleType::Style),
parsing_mode,
quirks_mode);
let mut input = ParserInput::new(input);
Parser::new(&mut input).parse_entirely(|parser| {
PropertyDeclaration::parse_into(declarations, id, &context, parser)
.map_err(|e| e.into())
}).map_err(|_| ())
}
/// A struct to parse property declarations.
struct PropertyDeclarationParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
declarations: &'a mut SourcePropertyDeclaration,
}
/// Default methods reject all at rules.
impl<'a, 'b, 'i> AtRuleParser<'i> for PropertyDeclarationParser<'a, 'b> {
type Prelude = ();
type AtRule = Importance;
type Error = SelectorParseError<'i, StyleParseError<'i>>;
}
impl<'a, 'b, 'i> DeclarationParser<'i> for PropertyDeclarationParser<'a, 'b> {
type Declaration = Importance;
type Error = SelectorParseError<'i, StyleParseError<'i>>;
fn parse_value<'t>(&mut self, name: CompactCowStr<'i>, input: &mut Parser<'i, 't>)
-> Result<Importance, ParseError<'i>> {
let id = PropertyId::parse(name)?;
input.parse_until_before(Delimiter::Bang, |input| {
PropertyDeclaration::parse_into(self.declarations, id, self.context, input)
.map_err(|e| e.into())
})?;
let importance = match input.try(parse_important) {
Ok(()) => Importance::Important,
Err(_) => Importance::Normal,
};
// In case there is still unparsed text in the declaration, we should roll back.
input.expect_exhausted()?;
Ok(importance)
}
}
/// Parse a list of property declarations and return a property declaration
/// block.
pub fn parse_property_declaration_list(context: &ParserContext,
input: &mut Parser)
-> PropertyDeclarationBlock {
let mut declarations = SourcePropertyDeclaration::new();
let mut block = PropertyDeclarationBlock::new();
let parser = PropertyDeclarationParser {
context: context,
declarations: &mut declarations,
};
let mut iter = DeclarationListParser::new(input, parser);
while let Some(declaration) = iter.next() {
match declaration {
Ok(importance) => {
block.extend(iter.parser.declarations.drain(), importance);
}
Err(err) => {
iter.parser.declarations.clear();
let pos = err.span.start;
let error = ContextualParseError::UnsupportedPropertyDeclaration(
iter.input.slice(err.span), err.error);
log_css_error(iter.input, pos, error, &context);
}
}
}
block
}<|fim▁end|> | if !definitely_new {
let mut index_to_remove = None;
for (i, slot) in self.declarations.iter_mut().enumerate() { |
<|file_name|>defines_3.js<|end_file_name|><|fim▁begin|>var searchData=
[<|fim▁hole|>];<|fim▁end|> | ['get_5fmodel_5fhandle',['GET_MODEL_HANDLE',['../serializer_8h.html#a887590f4b40f78bf359c8e56f03c56fe',1,'serializer.h']]] |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#from .engine import CheckVersion, CreateVersion<|fim▁hole|><|fim▁end|> | from .versioner import DBVersioner, DBVersionCommander |
<|file_name|>Model.java<|end_file_name|><|fim▁begin|>package com.github.pineasaurusrex.inference_engine;
<|fim▁hole|>
/**
* Partially or fully assigned model
* A model represents a possible representation of the propositional symbol states in the KB
*/
public class Model {
private HashMap<PropositionalSymbol, Boolean> symbolValues = new HashMap<>();
public boolean holdsTrue(Sentence sentence) {
if (sentence.isPropositionSymbol()) {
return symbolValues.get(sentence);
} else {
switch(sentence.getConnective()) {
case NOT:
return !holdsTrue(sentence.getOperand(0));
case AND:
return holdsTrue(sentence.getOperand(0)) && holdsTrue(sentence.getOperand(1));
case OR:
return holdsTrue(sentence.getOperand(0)) || holdsTrue(sentence.getOperand(1));
case IMPLICATION:
return !holdsTrue(sentence.getOperand(0)) || holdsTrue(sentence.getOperand(1));
case BICONDITIONAL:
return holdsTrue(sentence.getOperand(0)) == holdsTrue(sentence.getOperand(1));
}
}
return false;
}
public boolean holdsTrue(KnowledgeBase kb) {
return kb.getSentences().parallelStream()
.map(this::holdsTrue)
.allMatch(Boolean::booleanValue);
}
/**
* Returns a new model, with the union of the results of the old model and the result passed in
* @param symbol the symbol to merge in
* @param b the value to set
* @return a new Model object
*/
public Model union(PropositionalSymbol symbol, boolean b) {
Model m = new Model();
m.symbolValues.putAll(this.symbolValues);
m.symbolValues.put(symbol, b);
return m;
}
}<|fim▁end|> | import java.util.HashMap; |
<|file_name|>textwidget.py<|end_file_name|><|fim▁begin|>from roam.api import RoamEvents
from PyQt4.QtGui import QLineEdit, QPlainTextEdit
from PyQt4.QtCore import QEvent
from roam.editorwidgets.core import EditorWidget, registerwidgets
class TextWidget(EditorWidget):
widgettype = 'Text'
def __init__(self, *args):
super(TextWidget, self).__init__(*args)
def createWidget(self, parent):
return QLineEdit(parent)
def initWidget(self, widget):
widget.textChanged.connect(self.emitvaluechanged)
widget.installEventFilter(self)
def eventFilter(self, object, event):
# Hack I really don't like this but there doesn't seem to be a better way at the
# moment
if event.type() == QEvent.FocusIn:
RoamEvents.openkeyboard.emit()
return False
def validate(self, *args):
if not self.value():
return False
else:
return True
def setvalue(self, value):
# Not the best way but should cover most use cases
# for now
value = value or ''
value = unicode(value)
try:
self.widget.setPlainText(value)
except AttributeError:
self.widget.setText(value)
def value(self):
try:
return self.widget.toPlainText()
except AttributeError:
return self.widget.text()
class TextBlockWidget(TextWidget):
widgettype = 'TextBlock'
def __init__(self, *args):<|fim▁hole|> super(TextBlockWidget, self).__init__(*args)
def createWidget(self, parent):
return QPlainTextEdit(parent)<|fim▁end|> | |
<|file_name|>server.go<|end_file_name|><|fim▁begin|>package main
import (
"log"
"net/http"
"github.com/gophergala/melted_brains/http_handler"
"golang.org/x/net/websocket"
)
func main() {
http.HandleFunc("/game/", http_handler.GameHandler)
http.Handle("/events/", websocket.Handler(http_handler.EventsHandler))
http.Handle("/waiting/", websocket.Handler(http_handler.EventsHandler))
http.Handle("/", http.FileServer(http.Dir("./static")))
err := http.ListenAndServe(":8000", nil)
if err != nil {
log.Fatal("ListenAndServe: ", err)
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>h2o_data.py<|end_file_name|><|fim▁begin|>from ..daltools.util.full import init
Z = [8., 1., 1.]
Rc = init([0.00000000, 0.00000000, 0.48860959])
Dtot = [0, 0, -0.76539388]
Daa = init([
[ 0.00000000, 0.00000000, -0.28357300],
[ 0.15342658, 0.00000000, 0.12734703],
[-0.15342658, 0.00000000, 0.12734703],
])
QUc = init([-7.31176220, 0., 0., -5.43243232, 0., -6.36258665])
QUN = init([4.38968295, 0., 0., 0., 0., 1.75400326])
QUaa = init([
[-3.29253618, 0.00000000, 0.00000000, -4.54316657, 0.00000000, -4.00465380],
[-0.13213704, 0.00000000, 0.24980518, -0.44463288, 0.00000000, -0.26059139],
[-0.13213704, 0.00000000,-0.24980518, -0.44463288, 0.00000000, -0.26059139]
])
Fab = init([
[-0.11E-03, 0.55E-04, 0.55E-04],
[ 0.55E-04, -0.55E-04, 0.16E-30],
[ 0.55E-04, 0.16E-30, -0.55E-04]
])
Lab = init([
[0.11E-03, 0.28E-03, 0.28E-03],
[0.28E-03, 0.17E-03, 0.22E-03],
[0.28E-03, 0.22E-03, 0.17E-03]
])
la = init([
[0.0392366,-27.2474016 , 27.2081650],
[0.0358964, 27.2214515 ,-27.2573479],
[0.01211180, -0.04775576, 0.03564396],<|fim▁hole|>[0.01210615, -0.00594030, -0.00616584],
[10.69975088, -5.34987556, -5.34987532],
[-10.6565582, 5.3282791 , 5.3282791]
])
O = [
0.76145382,
-0.00001648, 1.75278523,
-0.00007538, 0.00035773, 1.39756345
]
H1O = [
3.11619527,
0.00019911, 1.25132346,
2.11363325, 0.00111442, 2.12790474
]
H1 = [
0.57935224,
0.00018083, 0.43312326,
0.11495546, 0.00004222, 0.45770123
]
H2O = [
3.11568759,
0.00019821, 1.25132443,
-2.11327482, -0.00142746, 2.12790473
]
H2H1 = [
0.04078206,
-0.00008380, -0.01712262,
-0.00000098, 0.00000084, -0.00200285
]
H2 = [
0.57930522,
0.00018221, 0.43312149,
-0.11493635, -0.00016407, 0.45770123
]
Aab = init([O, H1O, H1, H2O, H2H1, H2])
Aa = init([
[ 3.87739525, 0.00018217, 3.00410918, 0.00010384, 0.00020122, 3.52546819 ],
[ 2.15784091, 0.00023848, 1.05022368, 1.17177159, 0.00059985, 1.52065218 ],
[ 2.15754005, 0.00023941, 1.05022240, -1.17157425, -0.00087738, 1.52065217 ]
])
ff = 0.001
rMP = init([
#O
[
[-8.70343886, 0.00000000, 0.00000000, -0.39827574, -3.68114747, 0.00000000, 0.00000000, -4.58632761, 0.00000000, -4.24741556],
[-8.70343235, 0.00076124, 0.00000000, -0.39827535, -3.68114147, 0.00000000, 0.00193493, -4.58631888, 0.00000000, -4.24741290],
[-8.70343291,-0.00076166, 0.00000000, -0.39827505, -3.68114128, 0.00000000, -0.00193603, -4.58631789, 0.00000000, -4.24741229],
[-8.70343685,-0.00000006, 0.00175241, -0.39827457, -3.68114516, 0.00000000, 0.00000161, -4.58632717, 0.00053363, -4.24741642],
[-8.70343685, 0.00000000, -0.00175316, -0.39827456, -3.68114514, 0.00000000, 0.00000000, -4.58632711, -0.00053592, -4.24741639],
[-8.70166502, 0.00000000, 0.00000144, -0.39688042, -3.67884999, 0.00000000, 0.00000000, -4.58395384, 0.00000080, -4.24349307],
[-8.70520554, 0.00000000, 0.00000000, -0.39967554, -3.68344246, 0.00000000, 0.00000000, -4.58868836, 0.00000000, -4.25134640],
],
#H1O
[
[ 0.00000000, 0.10023328, 0.00000000, 0.11470275, 0.53710687, 0.00000000, 0.43066796, 0.04316104, 0.00000000, 0.36285790],
[ 0.00150789, 0.10111974, 0.00000000, 0.11541803, 0.53753360, 0.00000000, 0.43120945, 0.04333774, 0.00000000, 0.36314215],
[-0.00150230, 0.09934695, 0.00000000, 0.11398581, 0.53667861, 0.00000000, 0.43012612, 0.04298361, 0.00000000, 0.36257249],
[ 0.00000331, 0.10023328, 0.00125017, 0.11470067, 0.53710812, -0.00006107, 0.43066944, 0.04316020, 0.00015952, 0.36285848],
[ 0.00000100, 0.10023249, -0.00125247, 0.11470042, 0.53710716, 0.00006135, 0.43066837, 0.04316018, -0.00015966, 0.36285788],
[ 0.00088692, 0.10059268, -0.00000064, 0.11590322, 0.53754715, -0.00000006, 0.43071206, 0.04334198, -0.00000015, 0.36330053],
[-0.00088334, 0.09987383, 0.00000000, 0.11350091, 0.53666602, 0.00000000, 0.43062352, 0.04297910, 0.00000000, 0.36241326],
],
#H1
[
[-0.64828057, 0.10330994, 0.00000000, 0.07188960, -0.47568174, 0.00000000, -0.03144252, -0.46920879, 0.00000000, -0.50818752],
[-0.64978846, 0.10389186, 0.00000000, 0.07204462, -0.47729337, 0.00000000, -0.03154159, -0.47074619, 0.00000000, -0.50963693],
[-0.64677827, 0.10273316, 0.00000000, 0.07173584, -0.47408263, 0.00000000, -0.03134407, -0.46768337, 0.00000000, -0.50674873],
[-0.64828388, 0.10331167, 0.00043314, 0.07189029, -0.47568875, -0.00023642, -0.03144270, -0.46921635, -0.00021728, -0.50819386],
[-0.64828157, 0.10331095, -0.00043311, 0.07188988, -0.47568608, 0.00023641, -0.03144256, -0.46921346, 0.00021729, -0.50819095],
[-0.64916749, 0.10338629, -0.00000024, 0.07234862, -0.47634698, 0.00000013, -0.03159569, -0.47003679, 0.00000011, -0.50936853],
[-0.64739723, 0.10323524, 0.00000000, 0.07143322, -0.47502412, 0.00000000, -0.03129003, -0.46838912, 0.00000000, -0.50701656],
],
#H2O
[
[ 0.00000000,-0.10023328, 0.00000000, 0.11470275, 0.53710687, 0.00000000, -0.43066796, 0.04316104, 0.00000000, 0.36285790],
[-0.00150139,-0.09934749, 0.00000000, 0.11398482, 0.53667874, 0.00000000, -0.43012670, 0.04298387, 0.00000000, 0.36257240],
[ 0.00150826,-0.10112008, 0.00000000, 0.11541676, 0.53753350, 0.00000000, -0.43120982, 0.04333795, 0.00000000, 0.36314186],
[-0.00000130,-0.10023170, 0.00125018, 0.11470018, 0.53710620, 0.00006107, -0.43066732, 0.04316017, 0.00015952, 0.36285728],
[ 0.00000101,-0.10023249, -0.00125247, 0.11470042, 0.53710716, -0.00006135, -0.43066838, 0.04316018, -0.00015966, 0.36285788],
[ 0.00088692,-0.10059268, -0.00000064, 0.11590322, 0.53754715, 0.00000006, -0.43071206, 0.04334198, -0.00000015, 0.36330053],
[-0.00088334,-0.09987383, 0.00000000, 0.11350091, 0.53666602, 0.00000000, -0.43062352, 0.04297910, 0.00000000, 0.36241326],
],
#H2H1
[
[ 0.00000000, 0.00000000, 0.00000000, -0.00378789, 0.00148694, 0.00000000, 0.00000000, 0.00599079, 0.00000000, 0.01223822],
[ 0.00000000, 0.00004089, 0.00000000, -0.00378786, 0.00148338, 0.00000000, -0.00004858, 0.00599281, 0.00000000, 0.01224094],
[ 0.00000000,-0.00004067, 0.00000000, -0.00378785, 0.00148341, 0.00000000, 0.00004861, 0.00599277, 0.00000000, 0.01224093],
[ 0.00000000,-0.00000033, -0.00001707, -0.00378763, 0.00149017, 0.00000000, 0.00000001, 0.00599114, -0.00001229, 0.01223979],
[ 0.00000000, 0.00000000, 0.00001717, -0.00378763, 0.00149019, 0.00000000, 0.00000000, 0.00599114, 0.00001242, 0.01223980],
[ 0.00000000, 0.00000000, 0.00000000, -0.00378978, 0.00141897, 0.00000000, 0.00000000, 0.00590445, 0.00000002, 0.01210376],
[ 0.00000000, 0.00000000, 0.00000000, -0.00378577, 0.00155694, 0.00000000, 0.00000000, 0.00607799, 0.00000000, 0.01237393],
],
#H2
[
[-0.64828057,-0.10330994, 0.00000000, 0.07188960, -0.47568174, 0.00000000, 0.03144252, -0.46920879, 0.00000000, -0.50818752],
[-0.64677918,-0.10273369, 0.00000000, 0.07173576, -0.47408411, 0.00000000, 0.03134408, -0.46768486, 0.00000000, -0.50674986],
[-0.64978883,-0.10389230, 0.00000000, 0.07204446, -0.47729439, 0.00000000, 0.03154159, -0.47074717, 0.00000000, -0.50963754],
[-0.64827927,-0.10331022, 0.00043313, 0.07188947, -0.47568340, 0.00023642, 0.03144242, -0.46921057, -0.00021727, -0.50818804],
[-0.64828158,-0.10331095, -0.00043311, 0.07188988, -0.47568609, -0.00023641, 0.03144256, -0.46921348, 0.00021729, -0.50819097],
[-0.64916749,-0.10338629, -0.00000024, 0.07234862, -0.47634698, -0.00000013, 0.03159569, -0.47003679, 0.00000011, -0.50936853],
[-0.64739723,-0.10323524, 0.00000000, 0.07143322, -0.47502412, 0.00000000, 0.03129003, -0.46838912, 0.00000000, -0.50701656]
]
])
Am = init([
[8.186766009140, 0., 0.],
[0., 5.102747935447, 0.],
[0., 0., 6.565131856389]
])
Amw = init([
[11.98694996213, 0., 0.],
[0., 4.403583657738, 0.],
[0., 0., 2.835142058626]
])
R = [
[ 0.00000, 0.00000, 0.69801],
[-1.48150, 0.00000, -0.34901],
[ 1.48150, 0.00000, -0.34901]
]
Qtot = -10.0
Q = rMP[0, 0, (0, 2, 5)]
D = rMP[1:4, 0, :]
QU = rMP[4:, 0, :]
dQa = rMP[0, :, (0,2,5)]
dQab = rMP[0, :, (1, 3, 4)]
#These are string data for testing potential file
PAn0 = """AU
3 -1 0 1
1 0.000 0.000 0.698
1 -1.481 0.000 -0.349
1 1.481 0.000 -0.349
"""
PA00 = """AU
3 0 0 1
1 0.000 0.000 0.698 -0.703
1 -1.481 0.000 -0.349 0.352
1 1.481 0.000 -0.349 0.352
"""
PA10 = """AU
3 1 0 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127
"""
PA20 = """AU
3 2 0 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284 -3.293 0.000 -0.000 -4.543 -0.000 -4.005
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127 -0.132 0.000 0.250 -0.445 0.000 -0.261
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127 -0.132 -0.000 -0.250 -0.445 0.000 -0.261
"""
PA21 = """AU
3 2 1 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284 -3.293 0.000 -0.000 -4.543 -0.000 -4.005 3.466
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127 -0.132 0.000 0.250 -0.445 0.000 -0.261 1.576
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127 -0.132 -0.000 -0.250 -0.445 0.000 -0.261 1.576
"""
PA22 = """AU
3 2 2 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284 -3.293 0.000 -0.000 -4.543 -0.000 -4.005 3.875 -0.000 -0.000 3.000 -0.000 3.524
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127 -0.132 0.000 0.250 -0.445 0.000 -0.261 2.156 -0.000 1.106 1.051 -0.000 1.520
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127 -0.132 -0.000 -0.250 -0.445 0.000 -0.261 2.156 -0.000 -1.106 1.051 -0.000 1.520
"""
OUTPUT_n0_1 = """\
---------------
Atomic domain 1
---------------
Domain center: 0.00000 0.00000 0.69801
"""
OUTPUT_00_1 = OUTPUT_n0_1 + """\
Nuclear charge: 8.00000
Electronic charge: -8.70344
Total charge: -0.70344
"""
OUTPUT_10_1 = OUTPUT_00_1 + """\
Electronic dipole -0.00000 0.00000 -0.28357
"""
OUTPUT_20_1 = OUTPUT_10_1 + """\
Electronic quadrupole -3.29254 0.00000 -0.00000 -4.54317 0.00000 -4.00466
"""
OUTPUT_01_1 = OUTPUT_00_1 + """\
Isotropic polarizablity (w=0) 3.46639
"""
OUTPUT_02_1 = OUTPUT_00_1 + """\
Electronic polarizability (w=0) 3.87468 -0.00000 3.00027 -0.00000 -0.00000 3.52422
"""<|fim▁end|> | |
<|file_name|>repos.rs<|end_file_name|><|fim▁begin|>use std::env;
use futures::{Future, Stream};
use tokio::runtime::Runtime;
use hubcaps::{Credentials, Github, Result};
fn main() -> Result<()> {<|fim▁hole|> match env::var("GITHUB_TOKEN").ok() {
Some(token) => {
let mut rt = Runtime::new()?;
let github = Github::new(
concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")),
Credentials::Token(token),
)?;
let handle = rt.executor();
rt.block_on(
github
.user_repos("softprops")
.iter(&Default::default())
.for_each(move |repo| {
println!("{}", repo.name);
let f = repo.languages(github.clone()).map(|langs| {
for (language, bytes_of_code) in langs {
println!("{}: {} bytes", language, bytes_of_code)
}
});
handle.spawn(f.map_err(|_| ()));
Ok(())
}),
)?;
Ok(())
}
_ => Err("example missing GITHUB_TOKEN".into()),
}
}<|fim▁end|> | pretty_env_logger::init(); |
<|file_name|>csvsql.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding:utf-8 -*-
import os
import sys
from csvkit import sql
from csvkit import table
from csvkit import CSVKitWriter
from csvkit.cli import CSVKitUtility
class CSVSQL(CSVKitUtility):
description = 'Generate SQL statements for one or more CSV files, create execute those statements directly on a database, and execute one or more SQL queries.'
override_flags = ['l', 'f']
def add_arguments(self):
self.argparser.add_argument(metavar="FILE", nargs='*', dest='input_paths', default=['-'],
help='The CSV file(s) to operate on. If omitted, will accept input on STDIN.')
self.argparser.add_argument('-y', '--snifflimit', dest='snifflimit', type=int,
help='Limit CSV dialect sniffing to the specified number of bytes. Specify "0" to disable sniffing entirely.')
self.argparser.add_argument('-i', '--dialect', dest='dialect', choices=sql.DIALECTS,
help='Dialect of SQL to generate. Only valid when --db is not specified.')
self.argparser.add_argument('--db', dest='connection_string',
help='If present, a sqlalchemy connection string to use to directly execute generated SQL on a database.')
self.argparser.add_argument('--query', default=None,
help='Execute one or more SQL queries delimited by ";" and output the result of the last query as CSV.')
self.argparser.add_argument('--insert', dest='insert', action='store_true',
help='In addition to creating the table, also insert the data into the table. Only valid when --db is specified.')
self.argparser.add_argument('--tables', dest='table_names',
help='Specify one or more names for the tables to be created. If omitted, the filename (minus extension) or "stdin" will be used.')
self.argparser.add_argument('--no-constraints', dest='no_constraints', action='store_true',
help='Generate a schema without length limits or null checks. Useful when sampling big tables.')
self.argparser.add_argument('--no-create', dest='no_create', action='store_true',
help='Skip creating a table. Only valid when --insert is specified.')
self.argparser.add_argument('--blanks', dest='blanks', action='store_true',
help='Do not coerce empty strings to NULL values.')
self.argparser.add_argument('--no-inference', dest='no_inference', action='store_true',
help='Disable type inference when parsing the input.')
self.argparser.add_argument('--db-schema', dest='db_schema',
help='Optional name of database schema to create table(s) in.')
def main(self):
connection_string = self.args.connection_string
do_insert = self.args.insert
query = self.args.query
self.input_files = []
for path in self.args.input_paths:
self.input_files.append(self._open_input_file(path))
if self.args.table_names:
table_names = self.args.table_names.split(',')
else:
table_names = []
# If one or more filenames are specified, we need to add stdin ourselves (if available)
if sys.stdin not in self.input_files:
try:
if not sys.stdin.isatty():
self.input_files.insert(0, oepn("/dev/stdin", "r", encoding="utf-8"))
except:
pass
# Create an SQLite database in memory if no connection string is specified
if query and not connection_string:
connection_string = "sqlite:///:memory:"
do_insert = True
if self.args.dialect and connection_string:
self.argparser.error('The --dialect option is only valid when --db is not specified.')
if do_insert and not connection_string:
self.argparser.error('The --insert option is only valid when --db is also specified.')
if self.args.no_create and not do_insert:
self.argparser.error('The --no-create option is only valid --insert is also specified.')
# Establish database validity before reading CSV files
if connection_string:
try:
engine, metadata = sql.get_connection(connection_string)
except ImportError:
raise ImportError('You don\'t appear to have the necessary database backend installed for connection string you\'re trying to use. Available backends include:\n\nPostgresql:\tpip install psycopg2\nMySQL:\t\tpip install MySQL-python\n\nFor details on connection strings and other backends, please see the SQLAlchemy documentation on dialects at: \n\nhttp://www.sqlalchemy.org/docs/dialects/\n\n')
conn = engine.connect()
trans = conn.begin()
for f in self.input_files:
try:
# Try to use name specified via --table
table_name = table_names.pop(0)
except IndexError:
if f == sys.stdin:
table_name = "stdin"
else:
# Use filename as table name
table_name = os.path.splitext(os.path.split(f.name)[1])[0]
csv_table = table.Table.from_csv(
f,
name=table_name,
snifflimit=self.args.snifflimit,
blanks_as_nulls=(not self.args.blanks),
infer_types=(not self.args.no_inference),
no_header_row=self.args.no_header_row,
**self.reader_kwargs
)
f.close()
if connection_string:
sql_table = sql.make_table(
csv_table,
table_name,
self.args.no_constraints,
self.args.db_schema,
metadata
)
# Create table
if not self.args.no_create:
sql_table.create()
# Insert data
if do_insert and csv_table.count_rows() > 0:
insert = sql_table.insert()<|fim▁hole|> # Output SQL statements
else:
sql_table = sql.make_table(csv_table, table_name, self.args.no_constraints)
self.output_file.write('%s\n' % sql.make_create_table_statement(sql_table, dialect=self.args.dialect))
if connection_string:
if query:
# Execute specified SQL queries
queries = query.split(';')
rows = None
for q in queries:
if q:
rows = conn.execute(q)
# Output result of last query as CSV
try:
output = CSVKitWriter(self.output_file, **self.writer_kwargs)
if not self.args.no_header_row:
output.writerow(rows._metadata.keys)
for row in rows:
output.writerow(row)
except AttributeError:
pass
trans.commit()
conn.close()
def launch_new_instance():
utility = CSVSQL()
utility.main()
if __name__ == "__main__":
launch_new_instance()<|fim▁end|> | headers = csv_table.headers()
conn.execute(insert, [dict(zip(headers, row)) for row in csv_table.to_rows()])
|
<|file_name|>module_disk_select_test.py<|end_file_name|><|fim▁begin|>#
# Copyright (C) 2019 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Vendula Poncova <[email protected]><|fim▁hole|>from blivet.formats import get_format
from blivet.size import Size
from pyanaconda.modules.common.constants.objects import DISK_SELECTION
from pyanaconda.modules.common.errors.storage import UnavailableStorageError
from pyanaconda.modules.common.structures.validation import ValidationReport
from pyanaconda.modules.storage.disk_selection import DiskSelectionModule
from pyanaconda.modules.storage.disk_selection.selection_interface import DiskSelectionInterface
from pyanaconda.storage.initialization import create_storage
from tests.nosetests.pyanaconda_tests import check_dbus_property
class DiskSelectionInterfaceTestCase(unittest.TestCase):
"""Test DBus interface of the disk selection module."""
def setUp(self):
"""Set up the module."""
self.disk_selection_module = DiskSelectionModule()
self.disk_selection_interface = DiskSelectionInterface(self.disk_selection_module)
def _test_dbus_property(self, *args, **kwargs):
check_dbus_property(
self,
DISK_SELECTION,
self.disk_selection_interface,
*args, **kwargs
)
def selected_disks_property_test(self):
"""Test the selected disks property."""
self._test_dbus_property(
"SelectedDisks",
["sda", "sdb"]
)
def validate_selected_disks_test(self):
"""Test ValidateSelectedDisks."""
storage = create_storage()
self.disk_selection_module.on_storage_changed(storage)
dev1 = DiskDevice(
"dev1",
exists=False,
size=Size("15 GiB"),
fmt=get_format("disklabel")
)
dev2 = DiskDevice(
"dev2",
exists=False,
parents=[dev1],
size=Size("6 GiB"),
fmt=get_format("disklabel")
)
dev3 = DiskDevice(
"dev3",
exists=False,
parents=[dev2],
size=Size("6 GiB"),
fmt=get_format("disklabel")
)
storage.devicetree._add_device(dev1)
storage.devicetree._add_device(dev2)
storage.devicetree._add_device(dev3)
report = ValidationReport.from_structure(
self.disk_selection_interface.ValidateSelectedDisks([])
)
self.assertEqual(report.is_valid(), True)
report = ValidationReport.from_structure(
self.disk_selection_interface.ValidateSelectedDisks(["dev1"])
)
self.assertEqual(report.is_valid(), False)
self.assertEqual(report.error_messages, [
"You selected disk dev1, which contains devices that also use "
"unselected disks dev2, dev3. You must select or de-select "
"these disks as a set."
])
self.assertEqual(report.warning_messages, [])
report = ValidationReport.from_structure(
self.disk_selection_interface.ValidateSelectedDisks(["dev1", "dev2"])
)
self.assertEqual(report.is_valid(), False)
self.assertEqual(report.error_messages, [
"You selected disk dev1, which contains devices that also "
"use unselected disk dev3. You must select or de-select "
"these disks as a set.",
"You selected disk dev2, which contains devices that also "
"use unselected disk dev3. You must select or de-select "
"these disks as a set."
])
self.assertEqual(report.warning_messages, [])
report = ValidationReport.from_structure(
self.disk_selection_interface.ValidateSelectedDisks(["dev1", "dev2", "dev3"])
)
self.assertEqual(report.is_valid(), True)
def exclusive_disks_property_test(self):
"""Test the exclusive disks property."""
self._test_dbus_property(
"ExclusiveDisks",
["sda", "sdb"]
)
def ignored_disks_property_test(self):
"""Test the ignored disks property."""
self._test_dbus_property(
"IgnoredDisks",
["sda", "sdb"]
)
def protected_disks_property_test(self):
"""Test the protected disks property."""
self._test_dbus_property(
"ProtectedDevices",
["sda", "sdb"]
)
def disk_images_property_test(self):
"""Test the protected disks property."""
self._test_dbus_property(
"DiskImages",
{
"image_1": "/path/1",
"image_2": "/path/2"
}
)
def get_usable_disks_test(self):
"""Test the GetUsableDisks method."""
with self.assertRaises(UnavailableStorageError):
self.disk_selection_interface.GetUsableDisks()
self.disk_selection_module.on_storage_changed(create_storage())
self.assertEqual(self.disk_selection_interface.GetUsableDisks(), [])<|fim▁end|> | #
import unittest
from blivet.devices import DiskDevice |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|>import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "asteria.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)<|fim▁end|> | import os |
<|file_name|>brisketServerSpec.js<|end_file_name|><|fim▁begin|>"use strict";
describe("Brisket server", function() {
var Brisket = require("../../lib/brisket");
var MockBrisketApp = require("../mock/MockBrisketApp");
var nock = require("nock");
var supertest = require("supertest");
var brisketServer;
beforeEach(function() {
MockBrisketApp.initialize();
nock("http://api.example.com")
.get("/fetch200")
.reply(200, { ok: true })
.get("/fetch404")
.reply(404, { ok: false })
.get("/fetch500")
.reply(500, { ok: false })
.get("/fetch410")
.reply(410, { ok: false });
brisketServer = Brisket.createServer({
apis: {
"api": {
host: "http://api.example.com",
proxy: null
}
}
});
});
afterEach(function() {
MockBrisketApp.cleanup();
nock.cleanAll();
});
it("returns 200 when route is working", function(done) {
supertest(brisketServer)
.get("/working")
.expect(200, mocha(done));
});
it("returns 500 when route is failing", function(done) {
supertest(brisketServer)
.get("/failing")
.expect(500, mocha(done));
});
it("returns 302 when route is redirecting", function(done) {
supertest(brisketServer)
.get("/redirecting")
.expect(302, mocha(done));
});
it("returns status from route when route sets status", function(done) {
supertest(brisketServer)
.get("/setsStatus")
.expect(206, mocha(done));
});
it("returns 200 when data fetch is 200 AND route code doesn't error", function(done) {
supertest(brisketServer)
.get("/fetch200")
.expect(200, mocha(done));
});
it("returns 500 when data fetch is 200 BUT route code errors", function(done) {
supertest(brisketServer)
.get("/fetch200ButRouteFails")
.expect(500, mocha(done));
});
it("returns 302 when data fetch is 200 BUT route code redirects", function(done) {
supertest(brisketServer)
.get("/fetch200ButRouteRedirects")
.expect(302, mocha(done));
});
it("returns status from route when data fetch is 200 BUT route sets status", function(done) {
supertest(brisketServer)
.get("/fetch200ButRouteSetsStatus")
.expect(206, mocha(done));<|fim▁hole|>
it("returns 404 when route does NOT exist", function(done) {
supertest(brisketServer)
.get("/doesntexist")
.expect(404, mocha(done));
});
it("returns 404 when data fetch is 404", function(done) {
supertest(brisketServer)
.get("/fetch404")
.expect(404, mocha(done));
});
it("returns 500 when data fetch is 500", function(done) {
supertest(brisketServer)
.get("/fetch500")
.expect(500, mocha(done));
});
it("returns 500 when data fetch is unexpected error code", function(done) {
supertest(brisketServer)
.get("/fetch410")
.expect(500, mocha(done));
});
function mocha(done) {
return function(err) {
if (err) {
done.fail(err);
} else {
done();
}
};
}
});<|fim▁end|> | }); |
<|file_name|>soupparser.py<|end_file_name|><|fim▁begin|>__doc__ = """External interface to the BeautifulSoup HTML parser.
"""
__all__ = ["fromstring", "parse", "convert_tree"]
from lxml import etree, html
from calibre.ebooks.BeautifulSoup import \
BeautifulSoup, Tag, Comment, ProcessingInstruction, NavigableString
def fromstring(data, beautifulsoup=None, makeelement=None, **bsargs):
"""Parse a string of HTML data into an Element tree using the
BeautifulSoup parser.
Returns the root ``<html>`` Element of the tree.
You can pass a different BeautifulSoup parser through the
`beautifulsoup` keyword, and a diffent Element factory function
through the `makeelement` keyword. By default, the standard
``BeautifulSoup`` class and the default factory of `lxml.html` are
used.
"""
return _parse(data, beautifulsoup, makeelement, **bsargs)
def parse(file, beautifulsoup=None, makeelement=None, **bsargs):
"""Parse a file into an ElemenTree using the BeautifulSoup parser.
You can pass a different BeautifulSoup parser through the
`beautifulsoup` keyword, and a diffent Element factory function
through the `makeelement` keyword. By default, the standard
``BeautifulSoup`` class and the default factory of `lxml.html` are
used.
"""
if not hasattr(file, 'read'):
file = open(file)
root = _parse(file, beautifulsoup, makeelement, **bsargs)
return etree.ElementTree(root)
def convert_tree(beautiful_soup_tree, makeelement=None):
"""Convert a BeautifulSoup tree to a list of Element trees.
Returns a list instead of a single root Element to support
HTML-like soup with more than one root element.
You can pass a different Element factory through the `makeelement`
keyword.
"""<|fim▁hole|> root = _convert_tree(beautiful_soup_tree, makeelement)
children = root.getchildren()
for child in children:
root.remove(child)
return children
# helpers
def _parse(source, beautifulsoup, makeelement, **bsargs):
if beautifulsoup is None:
beautifulsoup = BeautifulSoup
if makeelement is None:
makeelement = html.html_parser.makeelement
if 'convertEntities' not in bsargs:
bsargs['convertEntities'] = 'xhtml' # Changed by Kovid, otherwise ' is mangled, see https://bugs.launchpad.net/calibre/+bug/1197585
tree = beautifulsoup(source, **bsargs)
root = _convert_tree(tree, makeelement)
# from ET: wrap the document in a html root element, if necessary
if len(root) == 1 and root[0].tag == "html":
return root[0]
root.tag = "html"
return root
def _convert_tree(beautiful_soup_tree, makeelement):
root = makeelement(beautiful_soup_tree.name,
attrib=dict(beautiful_soup_tree.attrs))
_convert_children(root, beautiful_soup_tree, makeelement)
return root
def _convert_children(parent, beautiful_soup_tree, makeelement):
SubElement = etree.SubElement
et_child = None
for child in beautiful_soup_tree:
if isinstance(child, Tag):
et_child = SubElement(parent, child.name, attrib=dict(
[(k, unescape(v)) for (k,v) in child.attrs]))
_convert_children(et_child, child, makeelement)
elif type(child) is NavigableString:
_append_text(parent, et_child, unescape(child))
else:
if isinstance(child, Comment):
parent.append(etree.Comment(child))
elif isinstance(child, ProcessingInstruction):
parent.append(etree.ProcessingInstruction(
*child.split(' ', 1)))
else: # CData
_append_text(parent, et_child, unescape(child))
def _append_text(parent, element, text):
if element is None:
parent.text = (parent.text or '') + text
else:
element.tail = (element.tail or '') + text
# copied from ET's ElementSoup
try:
from html.entities import name2codepoint # Python 3
name2codepoint
except ImportError:
from htmlentitydefs import name2codepoint
import re
handle_entities = re.compile("&(\w+);").sub
def unescape(string):
if not string:
return ''
# work around oddities in BeautifulSoup's entity handling
def unescape_entity(m):
try:
return unichr(name2codepoint[m.group(1)])
except KeyError:
return m.group(0) # use as is
return handle_entities(unescape_entity, string)<|fim▁end|> | if makeelement is None:
makeelement = html.html_parser.makeelement |
<|file_name|>tmpl_data_user.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The goyy Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package main
var tmplDataUser = `
<|fim▁hole|>insert into {{case "SYS_USER (ID, NAME, CODE, PASSWD, GENRE, EMAIL, TEL, MOBILE, AREA_ID, ORG_ID, DIMISSION_TIME, FREEZE_TIME, LOGIN_NAME, MEMO, CREATES, CREATER, CREATED, MODIFIER, MODIFIED, VERSION, DELETION, ARTIFICAL, HISTORY)"}}
values ('admin', '{{message "tmpl.data.user.admin"}}', '', '92d55a4a6b07', '10', '[email protected]', '', '', 'root', 'root', -62135596800, -62135596800, 'admin', '', '', '', ` + now + `, '', ` + now + `, 0, 0, 0, 0){{seperator}}
`<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// The middleware module is for defining middleware you need in your server.<|fim▁hole|><|fim▁end|> | //
// Middleware let you wrap your endpoint in arbitrary code that can manipulate
// an HTTP service. A middleware must implement the `Middleware` trait. |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>#encoding: utf-8
from flask.ext.restful import Resource, reqparse
class Test(Resource):
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument('id', type=int)
super(AccountAPI, self).__init__()
def get(self):<|fim▁hole|> return {'id': id}
def post(self):
pass
def put(self):
pass
def delete(self):
pass<|fim▁end|> | |
<|file_name|>display.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from pylab import *
data = loadtxt('Data/dummy_data.dat')
posterior_sample = atleast_2d(loadtxt('posterior_sample.txt'))
ion()
for i in xrange(0, posterior_sample.shape[0]):
hold(False)
plot(data[:,0], data[:,1], 'bo')
hold(True)
plot(data[:,0], posterior_sample[i, -data.shape[0]:], 'r-')
ylim([0, 1.1*data[:,1].max()])
draw()
ioff()
show()
hist(posterior_sample[:,9], 20)
xlabel('Number of Bursts')
show()
pos = posterior_sample[:, 10:110]
pos = pos[pos != 0.]
hist(pos, 1000)
xlabel('Time')
title('Positions of Bursts')
show()<|fim▁end|> | |
<|file_name|>latency.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# this program is used to test latency
# don't test RTT bigger than 3 secs - it will break
# we make sure that nothing breaks if there is a packet missing
# this can rarely happen
import select
import socket
import time
import sys
import struct
def pong():
# easy, receive and send back
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind(('0.0.0.0', 1234))
while True:
c, addr = s.recvfrom(1)
s.sendto(c, (addr[0], 1235))
if c == 'x':
break
print 'Finished'
return 0
def ping(addr, n):
# send and wait for it back
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind(('0.0.0.0', 1235))
succ = 0
errs = 0
while succ != n and errs < 3: # at most 3 lost packets
time.sleep(0.02) # wait a bit
start = time.time()
s.sendto('r', (addr, 1234))
h, _, _ = select.select([s], [], [], 3) # wait 3 seconds
end = time.time()
if h == []: # lost packet
# print '# lost packet'
errs += 1
continue
s.recv(1) # eat the response
succ += 1
print '%.8f' % (end - start)
for x in xrange(10):
# send many packets to be (almost) sure the other end is done
s.sendto('x', (addr, 1234))
return errs >= 3
if __name__ == '__main__':
if 'ping' in sys.argv:
ret = ping(sys.argv[2], int(sys.argv[3]))<|fim▁hole|> else:
print 'ping or pong?'
ret = 1
sys.exit(ret)<|fim▁end|> | elif 'pong' in sys.argv:
ret = pong() |
<|file_name|>sched.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use option::*;
use sys;
use cast::transmute;
use cell::Cell;
use super::work_queue::WorkQueue;
use super::stack::{StackPool, StackSegment};
use super::rtio::{EventLoop, EventLoopObject};
use super::context::Context;
use super::task::Task;
use rt::local_ptr;
use rt::local::Local;
/// The Scheduler is responsible for coordinating execution of Coroutines
/// on a single thread. When the scheduler is running it is owned by
/// thread local storage and the running task is owned by the
/// scheduler.
pub struct Scheduler {
priv work_queue: WorkQueue<~Coroutine>,
stack_pool: StackPool,
/// The event loop used to drive the scheduler and perform I/O
event_loop: ~EventLoopObject,
/// The scheduler's saved context.
/// Always valid when a task is executing, otherwise not
priv saved_context: Context,
/// The currently executing task
current_task: Option<~Coroutine>,
/// An action performed after a context switch on behalf of the
/// code running before the context switch
priv cleanup_job: Option<CleanupJob>
}
// XXX: Some hacks to put a &fn in Scheduler without borrowck
// complaining
type UnsafeTaskReceiver = sys::Closure;
trait ClosureConverter {
fn from_fn(&fn(~Coroutine)) -> Self;
fn to_fn(self) -> &fn(~Coroutine);
}
impl ClosureConverter for UnsafeTaskReceiver {
fn from_fn(f: &fn(~Coroutine)) -> UnsafeTaskReceiver { unsafe { transmute(f) } }
fn to_fn(self) -> &fn(~Coroutine) { unsafe { transmute(self) } }
}
enum CleanupJob {
DoNothing,
GiveTask(~Coroutine, UnsafeTaskReceiver)
}
impl Scheduler {
pub fn in_task_context(&self) -> bool { self.current_task.is_some() }
pub fn new(event_loop: ~EventLoopObject) -> Scheduler {
// Lazily initialize the runtime TLS key
local_ptr::init_tls_key();
Scheduler {
event_loop: event_loop,
work_queue: WorkQueue::new(),
stack_pool: StackPool::new(),
saved_context: Context::empty(),
current_task: None,
cleanup_job: None
}
}
// XXX: This may eventually need to be refactored so that
// the scheduler itself doesn't have to call event_loop.run.
// That will be important for embedding the runtime into external
// event loops.
pub fn run(~self) -> ~Scheduler {
assert!(!self.in_task_context());
let mut self_sched = self;
unsafe {
let event_loop: *mut ~EventLoopObject = {
let event_loop: *mut ~EventLoopObject = &mut self_sched.event_loop;
event_loop
};
// Give ownership of the scheduler (self) to the thread
Local::put(self_sched);
(*event_loop).run();
}
let sched = Local::take::<Scheduler>();
assert!(sched.work_queue.is_empty());
return sched;
}
/// Schedule a task to be executed later.
///
/// Pushes the task onto the work stealing queue and tells the event loop
/// to run it later. Always use this instead of pushing to the work queue
/// directly.
pub fn enqueue_task(&mut self, task: ~Coroutine) {
self.work_queue.push(task);
self.event_loop.callback(resume_task_from_queue);
fn resume_task_from_queue() {
let scheduler = Local::take::<Scheduler>();
scheduler.resume_task_from_queue();
}
}
// * Scheduler-context operations
pub fn resume_task_from_queue(~self) {
assert!(!self.in_task_context());
rtdebug!("looking in work queue for task to schedule");
let mut this = self;
match this.work_queue.pop() {
Some(task) => {
rtdebug!("resuming task from work queue");
this.resume_task_immediately(task);
}
None => {
rtdebug!("no tasks in queue");
Local::put(this);
}
}
}
// * Task-context operations
/// Called by a running task to end execution, after which it will
/// be recycled by the scheduler for reuse in a new task.
pub fn terminate_current_task(~self) {
assert!(self.in_task_context());
rtdebug!("ending running task");
do self.deschedule_running_task_and_then |dead_task| {
let dead_task = Cell::new(dead_task);
do Local::borrow::<Scheduler> |sched| {
dead_task.take().recycle(&mut sched.stack_pool);
}
}
abort!("control reached end of task");
}
pub fn schedule_new_task(~self, task: ~Coroutine) {
assert!(self.in_task_context());
do self.switch_running_tasks_and_then(task) |last_task| {<|fim▁hole|> }
}
}
pub fn schedule_task(~self, task: ~Coroutine) {
assert!(self.in_task_context());
do self.switch_running_tasks_and_then(task) |last_task| {
let last_task = Cell::new(last_task);
do Local::borrow::<Scheduler> |sched| {
sched.enqueue_task(last_task.take());
}
}
}
// Core scheduling ops
pub fn resume_task_immediately(~self, task: ~Coroutine) {
let mut this = self;
assert!(!this.in_task_context());
rtdebug!("scheduling a task");
// Store the task in the scheduler so it can be grabbed later
this.current_task = Some(task);
this.enqueue_cleanup_job(DoNothing);
Local::put(this);
// Take pointers to both the task and scheduler's saved registers.
unsafe {
let sched = Local::unsafe_borrow::<Scheduler>();
let (sched_context, _, next_task_context) = (*sched).get_contexts();
let next_task_context = next_task_context.unwrap();
// Context switch to the task, restoring it's registers
// and saving the scheduler's
Context::swap(sched_context, next_task_context);
let sched = Local::unsafe_borrow::<Scheduler>();
// The running task should have passed ownership elsewhere
assert!((*sched).current_task.is_none());
// Running tasks may have asked us to do some cleanup
(*sched).run_cleanup_job();
}
}
/// Block a running task, context switch to the scheduler, then pass the
/// blocked task to a closure.
///
/// # Safety note
///
/// The closure here is a *stack* closure that lives in the
/// running task. It gets transmuted to the scheduler's lifetime
/// and called while the task is blocked.
pub fn deschedule_running_task_and_then(~self, f: &fn(~Coroutine)) {
let mut this = self;
assert!(this.in_task_context());
rtdebug!("blocking task");
unsafe {
let blocked_task = this.current_task.swap_unwrap();
let f_fake_region = transmute::<&fn(~Coroutine), &fn(~Coroutine)>(f);
let f_opaque = ClosureConverter::from_fn(f_fake_region);
this.enqueue_cleanup_job(GiveTask(blocked_task, f_opaque));
}
Local::put(this);
unsafe {
let sched = Local::unsafe_borrow::<Scheduler>();
let (sched_context, last_task_context, _) = (*sched).get_contexts();
let last_task_context = last_task_context.unwrap();
Context::swap(last_task_context, sched_context);
// We could be executing in a different thread now
let sched = Local::unsafe_borrow::<Scheduler>();
(*sched).run_cleanup_job();
}
}
/// Switch directly to another task, without going through the scheduler.
/// You would want to think hard about doing this, e.g. if there are
/// pending I/O events it would be a bad idea.
pub fn switch_running_tasks_and_then(~self,
next_task: ~Coroutine,
f: &fn(~Coroutine)) {
let mut this = self;
assert!(this.in_task_context());
rtdebug!("switching tasks");
let old_running_task = this.current_task.swap_unwrap();
let f_fake_region = unsafe { transmute::<&fn(~Coroutine), &fn(~Coroutine)>(f) };
let f_opaque = ClosureConverter::from_fn(f_fake_region);
this.enqueue_cleanup_job(GiveTask(old_running_task, f_opaque));
this.current_task = Some(next_task);
Local::put(this);
unsafe {
let sched = Local::unsafe_borrow::<Scheduler>();
let (_, last_task_context, next_task_context) = (*sched).get_contexts();
let last_task_context = last_task_context.unwrap();
let next_task_context = next_task_context.unwrap();
Context::swap(last_task_context, next_task_context);
// We could be executing in a different thread now
let sched = Local::unsafe_borrow::<Scheduler>();
(*sched).run_cleanup_job();
}
}
// * Other stuff
pub fn enqueue_cleanup_job(&mut self, job: CleanupJob) {
assert!(self.cleanup_job.is_none());
self.cleanup_job = Some(job);
}
pub fn run_cleanup_job(&mut self) {
rtdebug!("running cleanup job");
assert!(self.cleanup_job.is_some());
let cleanup_job = self.cleanup_job.swap_unwrap();
match cleanup_job {
DoNothing => { }
GiveTask(task, f) => (f.to_fn())(task)
}
}
/// Get mutable references to all the contexts that may be involved in a
/// context switch.
///
/// Returns (the scheduler context, the optional context of the
/// task in the cleanup list, the optional context of the task in
/// the current task slot). When context switching to a task,
/// callers should first arrange for that task to be located in the
/// Scheduler's current_task slot and set up the
/// post-context-switch cleanup job.
pub fn get_contexts<'a>(&'a mut self) -> (&'a mut Context,
Option<&'a mut Context>,
Option<&'a mut Context>) {
let last_task = match self.cleanup_job {
Some(GiveTask(~ref task, _)) => {
Some(task)
}
Some(DoNothing) => {
None
}
None => fail!("all context switches should have a cleanup job")
};
// XXX: Pattern matching mutable pointers above doesn't work
// because borrowck thinks the three patterns are conflicting
// borrows
unsafe {
let last_task = transmute::<Option<&Coroutine>, Option<&mut Coroutine>>(last_task);
let last_task_context = match last_task {
Some(t) => Some(&mut t.saved_context), None => None
};
let next_task_context = match self.current_task {
Some(ref mut t) => Some(&mut t.saved_context), None => None
};
// XXX: These transmutes can be removed after snapshot
return (transmute(&mut self.saved_context),
last_task_context,
transmute(next_task_context));
}
}
}
static MIN_STACK_SIZE: uint = 10000000; // XXX: Too much stack
pub struct Coroutine {
/// The segment of stack on which the task is currently running or,
/// if the task is blocked, on which the task will resume execution
priv current_stack_segment: StackSegment,
/// These are always valid when the task is not running, unless
/// the task is dead
priv saved_context: Context,
/// The heap, GC, unwinding, local storage, logging
task: ~Task
}
impl Coroutine {
pub fn new(stack_pool: &mut StackPool, start: ~fn()) -> Coroutine {
Coroutine::with_task(stack_pool, ~Task::new(), start)
}
pub fn with_task(stack_pool: &mut StackPool,
task: ~Task,
start: ~fn()) -> Coroutine {
let start = Coroutine::build_start_wrapper(start);
let mut stack = stack_pool.take_segment(MIN_STACK_SIZE);
// NB: Context holds a pointer to that ~fn
let initial_context = Context::new(start, &mut stack);
return Coroutine {
current_stack_segment: stack,
saved_context: initial_context,
task: task
};
}
fn build_start_wrapper(start: ~fn()) -> ~fn() {
// XXX: The old code didn't have this extra allocation
let wrapper: ~fn() = || {
// This is the first code to execute after the initial
// context switch to the task. The previous context may
// have asked us to do some cleanup.
unsafe {
let sched = Local::unsafe_borrow::<Scheduler>();
(*sched).run_cleanup_job();
let sched = Local::unsafe_borrow::<Scheduler>();
let task = (*sched).current_task.get_mut_ref();
// FIXME #6141: shouldn't neet to put `start()` in another closure
task.task.run(||start());
}
let sched = Local::take::<Scheduler>();
sched.terminate_current_task();
};
return wrapper;
}
/// Destroy the task and try to reuse its components
pub fn recycle(~self, stack_pool: &mut StackPool) {
match self {
~Coroutine {current_stack_segment, _} => {
stack_pool.give_segment(current_stack_segment);
}
}
}
}
#[cfg(test)]
mod test {
use int;
use cell::Cell;
use rt::uv::uvio::UvEventLoop;
use unstable::run_in_bare_thread;
use task::spawn;
use rt::local::Local;
use rt::test::*;
use super::*;
#[test]
fn test_simple_scheduling() {
do run_in_bare_thread {
let mut task_ran = false;
let task_ran_ptr: *mut bool = &mut task_ran;
let mut sched = ~UvEventLoop::new_scheduler();
let task = ~do Coroutine::new(&mut sched.stack_pool) {
unsafe { *task_ran_ptr = true; }
};
sched.enqueue_task(task);
sched.run();
assert!(task_ran);
}
}
#[test]
fn test_several_tasks() {
do run_in_bare_thread {
let total = 10;
let mut task_count = 0;
let task_count_ptr: *mut int = &mut task_count;
let mut sched = ~UvEventLoop::new_scheduler();
for int::range(0, total) |_| {
let task = ~do Coroutine::new(&mut sched.stack_pool) {
unsafe { *task_count_ptr = *task_count_ptr + 1; }
};
sched.enqueue_task(task);
}
sched.run();
assert_eq!(task_count, total);
}
}
#[test]
fn test_swap_tasks_then() {
do run_in_bare_thread {
let mut count = 0;
let count_ptr: *mut int = &mut count;
let mut sched = ~UvEventLoop::new_scheduler();
let task1 = ~do Coroutine::new(&mut sched.stack_pool) {
unsafe { *count_ptr = *count_ptr + 1; }
let mut sched = Local::take::<Scheduler>();
let task2 = ~do Coroutine::new(&mut sched.stack_pool) {
unsafe { *count_ptr = *count_ptr + 1; }
};
// Context switch directly to the new task
do sched.switch_running_tasks_and_then(task2) |task1| {
let task1 = Cell::new(task1);
do Local::borrow::<Scheduler> |sched| {
sched.enqueue_task(task1.take());
}
}
unsafe { *count_ptr = *count_ptr + 1; }
};
sched.enqueue_task(task1);
sched.run();
assert_eq!(count, 3);
}
}
#[bench] #[test] #[ignore(reason = "long test")]
fn test_run_a_lot_of_tasks_queued() {
do run_in_bare_thread {
static MAX: int = 1000000;
let mut count = 0;
let count_ptr: *mut int = &mut count;
let mut sched = ~UvEventLoop::new_scheduler();
let start_task = ~do Coroutine::new(&mut sched.stack_pool) {
run_task(count_ptr);
};
sched.enqueue_task(start_task);
sched.run();
assert_eq!(count, MAX);
fn run_task(count_ptr: *mut int) {
do Local::borrow::<Scheduler> |sched| {
let task = ~do Coroutine::new(&mut sched.stack_pool) {
unsafe {
*count_ptr = *count_ptr + 1;
if *count_ptr != MAX {
run_task(count_ptr);
}
}
};
sched.enqueue_task(task);
}
};
}
}
#[test]
fn test_block_task() {
do run_in_bare_thread {
let mut sched = ~UvEventLoop::new_scheduler();
let task = ~do Coroutine::new(&mut sched.stack_pool) {
let sched = Local::take::<Scheduler>();
assert!(sched.in_task_context());
do sched.deschedule_running_task_and_then() |task| {
let task = Cell::new(task);
do Local::borrow::<Scheduler> |sched| {
assert!(!sched.in_task_context());
sched.enqueue_task(task.take());
}
}
};
sched.enqueue_task(task);
sched.run();
}
}
#[test]
fn test_io_callback() {
// This is a regression test that when there are no schedulable tasks
// in the work queue, but we are performing I/O, that once we do put
// something in the work queue again the scheduler picks it up and doesn't
// exit before emptying the work queue
do run_in_newsched_task {
do spawn {
let sched = Local::take::<Scheduler>();
do sched.deschedule_running_task_and_then |task| {
let mut sched = Local::take::<Scheduler>();
let task = Cell::new(task);
do sched.event_loop.callback_ms(10) {
rtdebug!("in callback");
let mut sched = Local::take::<Scheduler>();
sched.enqueue_task(task.take());
Local::put(sched);
}
Local::put(sched);
}
}
}
}
}<|fim▁end|> | let last_task = Cell::new(last_task);
do Local::borrow::<Scheduler> |sched| {
sched.enqueue_task(last_task.take()); |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># Copyright 2012 Twitter
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import calendar<|fim▁hole|>from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.contrib.auth.models import User
from django.contrib.admin.views.decorators import staff_member_required
from accounts.views import _backend_hackend
from stats.interface import get_top_recent_apps
@staff_member_required
def metrics(request):
TMPL = """
SELECT
date_trunc('day', %(date)s),
COUNT(1)
FROM %(table)s
GROUP BY date_trunc('day', %(date)s)
ORDER BY date_trunc('day', %(date)s) ASC
"""
def convert(lst):
return [(calendar.timegm(i[0].timetuple()), i[1]) for i in lst]
from django.db import connection
def execute(sql):
cursor = connection.cursor()
cursor.execute(sql)
return cursor.fetchall()
users = convert(execute(TMPL % {
'table': 'auth_user',
'date': 'date_joined',
}))
apps = convert(execute(TMPL % {
'table': 'dashboard_app',
'date': 'date_created',
}))
ab_signups = convert(execute(TMPL % {
'table': 'accounts_abtestingregistration',
'date': 'date_created',
}))
top_apps = get_top_recent_apps()
context = {
'title': 'Metrics',
'users': users,
'apps': apps,
'top_apps': top_apps,
'ab_signups': ab_signups,
}
return render_to_response('admin/metrics.html', context,
context_instance=RequestContext(request))
@staff_member_required
def admin_login(request, username):
user = get_object_or_404(User, username__iexact=username)
_backend_hackend(request, user)
return HttpResponseRedirect('/')<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// https://rust-lang-ja.github.io/the-rust-programming-language-ja/1.6/book/closures.html
#![allow(dead_code, unused_variables)]
fn main() {
////////////////////////////////////////////////////////////////////////////////
// Closures
////////////////////////////////////////////////////////////////////////////////
// Syntax
let plus_one = |x: i32| x + 1;
assert_eq!(2, plus_one(1));
let plus_two = |x| {
let mut result: i32 = x;
result += 1;
result += 1;
result
};
assert_eq!(4, plus_two(2));
let plus_one = |x: i32| -> i32 { x + 1 };
assert_eq!(2, plus_one(1));
fn plus_one_v1 (x: i32) -> i32 { x + 1 }
let plus_one_v2 = |x: i32| -> i32 { x + 1 };
let plus_one_v3 = |x: i32| x + 1 ;
////////////////////////////////////////////////////////////////////////////////
// Closures and their environment
let num = 5;
let plus_num = |x: i32| x + num;
assert_eq!(10, plus_num(5));
// let y = &mut num; // error: cannot borrow immutable local variable `num` as mutable
let mut num = 5;
{
let plus_num = |x: i32| x + num;
} // `plus_num` goes out of scope; borrow of `num` ends.
let y = &mut num;
let nums = vec![1, 2, 3];
let takes_num = || nums;
// println!("{:?}", nums); // error[E0382]: use of moved value: `nums`
////////////////////////////////////////////////////////////////////////////////
// move closures
let num = 5;
let owns_num = move |x: i32| x + num;
let mut num = 5;
{
let mut add_num = |x: i32| num += x;
add_num(5);
}
assert_eq!(10, num);
let mut num = 5;
{
let mut add_num = move |x: i32| num += x;
add_num(5);
}
assert_eq!(5, num);
////////////////////////////////////////////////////////////////////////////////
// Closure implementation
// pub trait Fn<Args> : FnMut<Args> {
// extern "rust-call" fn call(&self, args: Args) -> Self::Output;
// }
// pub trait FnMut<Args> : FnOnce<Args> {
// extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output;
// }
// pub trait FnOnce<Args> {
// type Output;
// extern "rust-call" fn call_once(self, args: Args) -> Self::Output;
// }
////////////////////////////////////////////////////////////////////////////////
// Taking closures as arguments
fn call_with_one<F>(some_closure: F) -> i32
where F : Fn(i32) -> i32 {
some_closure(1)
}
let answer = call_with_one(|x| x + 2);
assert_eq!(3, answer);
//fn call_with_one(some_closure: &Fn(i32) -> i32) -> i32 {
// some_closure(1)
//}
//let answer = call_with_one(&|x| x + 2);<|fim▁hole|>
//fn call_with_one(some_closure: &Fn(i32) -> i32) -> i32 {
// some_closure(1)
//}
//fn add_one(i: i32) -> i32 {
// i + 1
//}
//let f = add_one;
//let answer = call_with_one(&f);
//asswert_eq!(2, answer);
////////////////////////////////////////////////////////////////////////////////
// Returning closures
// error[E0277]: the trait bound `std::ops::Fn(i32) -> i32: std::marker::Sized` is not satisfied
// fn factory() -> (Fn(i32) -> i32) {
// let num = 5;
// |x| x + num
// }
// let f = factory();
// let answer = f(1);
// assert_eq!(6, answer);
// error[E0106]: missing lifetime specifier
// fn factory() -> &(Fn(i32) -> i32) {
// let num = 5;
// |x| x + num
// }
// let f = factory();
// let answer = f(1);
// assert_eq!(6, answer);
// error[E0373]: closure may outlive the current function, but it borrows `num`, which is owned by the current functi
// fn factory() -> Box<Fn(i32) -> i32> {
// let num = 5;
// Box::new(|x| x + num)
// }
// let f = factory();
// let answer = f(1);
// assert_eq!(6, answer);
fn factory() -> Box<Fn(i32) -> i32> {
let num = 5;
Box::new(move |x| x + num)
}
let f = factory();
let answer = f(1);
assert_eq!(6, answer);
}<|fim▁end|> | //assert_eq!(3, answer);
////////////////////////////////////////////////////////////////////////////////
// Function pointers ans closures |
<|file_name|>data_forwarding.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from sentry import tsdb, ratelimits
from sentry.api.serializers import serialize
from sentry.plugins.base import Plugin
from sentry.plugins.base.configuration import react_plugin_config<|fim▁hole|>class DataForwardingPlugin(Plugin):
status = PluginStatus.BETA
def configure(self, project, request):
return react_plugin_config(self, project, request)
def has_project_conf(self):
return True
def get_rate_limit(self):
# number of requests, number of seconds (window)
return (50, 1)
def forward_event(self, payload):
"""
Forward the event and return a boolean if it was successful.
"""
raise NotImplementedError
def get_event_payload(self, event):
return serialize(event)
def get_plugin_type(self):
return "data-forwarding"
def post_process(self, event, **kwargs):
rl_key = u"{}:{}".format(self.conf_key, event.project.organization_id)
# limit segment to 50 requests/second
limit, window = self.get_rate_limit()
if limit and window and ratelimits.is_limited(rl_key, limit=limit, window=window):
return
payload = self.get_event_payload(event)
success = self.forward_event(event, payload)
if success is False:
# TODO(dcramer): record failure
pass
tsdb.incr(tsdb.models.project_total_forwarded, event.project.id, count=1)<|fim▁end|> | from sentry.plugins.status import PluginStatus
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>mod graph;
mod index_type;
use std::io::{BufReader, BufRead};
use std::fs::File;
use std::str::FromStr;
use std::fmt::Debug;
use std::result::Result;
use graph::{MDGraph, WeightType, Unweighted};
use index_type::{IndexType, NodeIndex, EdgeIndex, DefIndex};
fn read_sgf<NodeWt, EdgeWt, NodeIx, EdgeIx, R, I>(rd: &mut R)
-> MDGraph<NodeWt, EdgeWt, NodeIx, EdgeIx>
where NodeWt: WeightType + FromStr<Err = I>,
EdgeWt: WeightType + FromStr<Err = I>,
NodeIx: IndexType,
EdgeIx: IndexType,
I: Debug,
R: BufRead
{
let mut meta: Option<(bool, usize, usize)> = None;
let mut graph = MDGraph::new();
for line in rd.lines() {
let line = line.unwrap();
let line = line.trim();
if line.starts_with("#") {
// skip comment
continue;
}
if meta.is_none() {
// First line is meta
let mut m = line.split_whitespace();
let directed = match m.next() {
Some("d") => {
true
}
//
// Some("u") => {
// false
// }
//
_ => {
panic!("Invalid format");
}
};
let num_nodes: usize = match m.next() {
Some(ns) => {
ns.parse().unwrap()
}
_ => {
panic!("Invalid format");
}
};
let num_edges: usize = match m.next() {
Some(ns) => {
ns.parse().unwrap()
}
_ => {
panic!("Invalid format");
}
};
meta = Some((directed, num_nodes, num_edges));
graph.reserve_nodes(num_nodes);
graph.reserve_edges(num_edges);
let _ = graph.add_nodes(num_nodes);
} else {
let mut i = line.splitn(2, '|');
let (node_id, node_weight) = match i.next() {
Some(ns) => {
let mut it = ns.splitn(2, ":");
let node_id: usize = it.next().unwrap().parse().unwrap();
let node_weight: Option<NodeWt> = it.next().map(|s| s.parse().unwrap());
(node_id, node_weight)
}
_ => {
panic!("Invalid format");
}
};
if let Some(nw) = node_weight {
*graph.get_node_weight_mut(NodeIndex::new(node_id)) = nw;
}
let edge_s = i.next().unwrap();
for es in edge_s.split(',') {
let mut it = es.splitn(2, ":");
let target_id: usize = it.next()
.unwrap()
.parse()
.unwrap();
let edge_weight: Option<EdgeWt> = it.next()
.map(|s| s.parse::<EdgeWt>().unwrap());
match edge_weight {
Some(ew) => {
let _ = graph.add_edge_with_weight(NodeIndex::new(node_id),
NodeIndex::new(target_id),
ew);
}
None => {
let _ = graph.add_edge(NodeIndex::new(node_id), NodeIndex::new(target_id));
}
}
}
}
}
assert!(meta.unwrap().1 == graph.node_count());
assert!(meta.unwrap().2 == graph.edge_count());
return graph;
}
<|fim▁hole|> // let graph: MDGraph<f32, f32> = read_sgf(&mut f);
let graph: MDGraph = read_sgf(&mut f);
println!("{:?}", graph);
}<|fim▁end|> |
fn main() {
let f = File::open("er_100_0_1.sgf").unwrap();
let mut f = BufReader::new(f); |
<|file_name|>Keyboard.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2019 Google LLC
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* version 3 as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
import WebMidi from 'webmidi'
import { EventEmitter } from 'events'
export class MidiKeyboard extends EventEmitter {
constructor(){
super()
this.connectedDevices = new Map()
if (WebMidi.supported){
this.ready = new Promise((done, error) => {<|fim▁hole|> if (e){
error(e)
}
WebMidi.inputs.forEach(i => this._addListeners(i))
WebMidi.addListener('connected', (e) => {
if (e.port.type === 'input'){
this._addListeners(e.port)
}
})
WebMidi.addListener('disconnected', (e) => {
this._removeListeners(e.port)
})
done()
})
})
} else {
this.ready = Promise.resolve()
}
}
_addListeners(device){
if (!this.connectedDevices.has(device.id)){
this.connectedDevices.set(device.id, device)
device.addListener('noteon', 'all', (event) => {
this.emit('keyDown', `${event.note.name}${event.note.octave}`, event.velocity)
})
device.addListener('noteoff', 'all', (event) => {
this.emit('keyUp', `${event.note.name}${event.note.octave}`, event.velocity)
})
device.addListener('controlchange', 'all', (event) => {
if (event.controller.name === 'holdpedal'){
this.emit(event.value ? 'pedalDown' : 'pedalUp')
}
})
}
}
_removeListeners(event){
if (this.connectedDevices.has(event.id)){
const device = this.connectedDevices.get(event.id)
this.connectedDevices.delete(event.id)
device.removeListener('noteon')
device.removeListener('noteoff')
device.removeListener('controlchange')
}
}
}<|fim▁end|> | WebMidi.enable((e) => { |
<|file_name|>ExtraSuccessHandler.hpp<|end_file_name|><|fim▁begin|>#ifndef SOBER_NETWORK_HTTP_EXTRA_SUCCESS_HANDLER_HPP_
#define SOBER_NETWORK_HTTP_EXTRA_SUCCESS_HANDLER_HPP_
// Copyright (c) 2014, Ruslan Baratov
// All rights reserved.
#include <functional> // std::function
namespace sober {<|fim▁hole|>namespace http {
using ExtraSuccessHandler = std::function<void()>;
} // namespace http
} // namespace network
} // namespace sober
#endif // SOBER_NETWORK_HTTP_EXTRA_SUCCESS_HANDLER_HPP_<|fim▁end|> | namespace network { |
<|file_name|>Test.java<|end_file_name|><|fim▁begin|>class Test {
public static int[] test() {<|fim▁hole|> System.out.println(arr);
}<|fim▁end|> | return null;
}
public static void main(String[] args) {
int [] arr = test(); |
<|file_name|>HeatmapsTile.tsx<|end_file_name|><|fim▁begin|>import React from 'react';
import Heatmap, { HeatmapProps, background } from '../../sandboxes/visx-heatmap/Example';
import GalleryTile from '../GalleryTile';
export { default as packageJson } from '../../sandboxes/visx-heatmap/package.json';
const tileStyles = { background };
const detailsStyles = { color: 'rgba(255,255,255,0.3)' };
export default function HeatmapsTile() {
return (
<GalleryTile<HeatmapProps>
title="Heatmaps"
description="<HeatmapCircle /> & <HeatmapRect />"
exampleRenderer={Heatmap}
exampleUrl="/heatmaps"<|fim▁hole|> detailsStyles={detailsStyles}
/>
);
}<|fim▁end|> | tileStyles={tileStyles} |
<|file_name|>dnsdist-rings.cc<|end_file_name|><|fim▁begin|>#include "dnsdist.hh"
#include "lock.hh"
unsigned int Rings::numDistinctRequestors()
{
std::set<ComboAddress, ComboAddress::addressOnlyLessThan> s;
WriteLock wl(&queryLock);
for(const auto& q : queryRing)
s.insert(q.requestor);
return s.size();
}
vector<pair<unsigned int,ComboAddress> > Rings::getTopBandwidth(unsigned int numentries)
{
map<ComboAddress, unsigned int, ComboAddress::addressOnlyLessThan> counts;
{
WriteLock wl(&queryLock);
for(const auto& q : queryRing)
counts[q.requestor]+=q.size;
}
{
std::lock_guard<std::mutex> lock(respMutex);
for(const auto& r : respRing)
counts[r.requestor]+=r.size;
}
typedef vector<pair<unsigned int, ComboAddress>> ret_t;
ret_t ret;
for(const auto& p : counts)
ret.push_back({p.second, p.first});<|fim▁hole|> return(b.second < a.second);
});
ret.resize(numentries);
return ret;
}<|fim▁end|> | numentries = ret.size() < numentries ? ret.size() : numentries;
partial_sort(ret.begin(), ret.begin()+numentries, ret.end(), [](const ret_t::value_type&a, const ret_t::value_type&b)
{ |
<|file_name|>InteriorPointArea.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2011 by The Authors.
* Published under the LGPL 2.1 license.
* See /license-notice.txt for the full text of the license notice.
* See /license.txt for the full text of the license.
*/
describe('jsts.algorithm.InteriorPointArea', function() {
var ipa;
it('can be constructed', function() {
var shell = new jsts.geom.LinearRing([
new jsts.geom.Coordinate(2, 2),
new jsts.geom.Coordinate(6, 2),
new jsts.geom.Coordinate(6, 6),
new jsts.geom.Coordinate(2, 6),
new jsts.geom.Coordinate(2, 2)
]);
var polygon = new jsts.geom.Polygon(shell);
ipa = new jsts.algorithm.InteriorPointArea(polygon);
expect(ipa).toBeDefined();
});
it('interior point of simple square', function() {
var validCoord = new jsts.geom.Coordinate(4, 4);
expect(ipa.getInteriorPoint().distance(validCoord)).toEqual(0);
});
it('interior point of square with hole', function() {
var shell = new jsts.geom.LinearRing([
new jsts.geom.Coordinate(2, 2),
new jsts.geom.Coordinate(8, 2),
new jsts.geom.Coordinate(8, 6),
new jsts.geom.Coordinate(2, 6),
new jsts.geom.Coordinate(2, 2)
]);
var hole = new jsts.geom.LinearRing([
new jsts.geom.Coordinate(3, 3),
new jsts.geom.Coordinate(3, 5),
new jsts.geom.Coordinate(6, 5),
new jsts.geom.Coordinate(6, 3),
new jsts.geom.Coordinate(3, 3)
]);
var polygon = new jsts.geom.Polygon(shell, [hole]);
ipa = new jsts.algorithm.InteriorPointArea(polygon);
var validCoord = new jsts.geom.Coordinate(7, 4);
expect(ipa.getInteriorPoint().distance(validCoord)).toEqual(0);<|fim▁hole|> });
it('interior point of the widest horizontal intersection in geometry collection', function() {
var shell1 = new jsts.geom.LinearRing([
new jsts.geom.Coordinate(2, 2),
new jsts.geom.Coordinate(4, 2),
new jsts.geom.Coordinate(4, 4),
new jsts.geom.Coordinate(2, 4),
new jsts.geom.Coordinate(2, 2)
]);
var polygon1 = new jsts.geom.Polygon(shell1);
var shell2 = new jsts.geom.LinearRing([
new jsts.geom.Coordinate(5, 5),
new jsts.geom.Coordinate(11, 5),
new jsts.geom.Coordinate(11, 7),
new jsts.geom.Coordinate(5, 7),
new jsts.geom.Coordinate(5, 5)
]);
var polygon2 = new jsts.geom.Polygon(shell2);
var shell3 = new jsts.geom.LinearRing([
new jsts.geom.Coordinate(12, 12),
new jsts.geom.Coordinate(15, 12),
new jsts.geom.Coordinate(15, 25),
new jsts.geom.Coordinate(12, 25),
new jsts.geom.Coordinate(12, 12)
]);
var polygon3 = new jsts.geom.Polygon(shell3);
var gc = new jsts.geom.GeometryCollection([polygon1, polygon2, polygon3]);
ipa = new jsts.algorithm.InteriorPointArea(gc);
var validCoord = new jsts.geom.Coordinate(8, 6);
expect(ipa.getInteriorPoint().distance(validCoord)).toEqual(0);
});
});<|fim▁end|> | |
<|file_name|>BeforeRecover.ts<|end_file_name|><|fim▁begin|>import {getMetadataArgsStorage} from "../../globals";
import {EventListenerTypes} from "../../metadata/types/EventListenerTypes";
import {EntityListenerMetadataArgs} from "../../metadata-args/EntityListenerMetadataArgs";
<|fim▁hole|> return function (object: Object, propertyName: string) {
getMetadataArgsStorage().entityListeners.push({
target: object.constructor,
propertyName: propertyName,
type: EventListenerTypes.BEFORE_RECOVER
} as EntityListenerMetadataArgs);
};
}<|fim▁end|> | /**
* Calls a method on which this decorator is applied before this entity soft removal.
*/
export function BeforeRecover(): PropertyDecorator { |
<|file_name|>NewForm.js<|end_file_name|><|fim▁begin|>export class NewForm {
<|fim▁hole|> update() {
}
}<|fim▁end|> | |
<|file_name|>broker.py<|end_file_name|><|fim▁begin|>"""Defines the base broker class"""
from abc import ABCMeta
from collections import namedtuple
"""
FileDownload tuple contains an additional partial flag for defining whether the file
is allowed to be accessed directly or must be copied into running container. This is
currently only applicable to the S3Broker and requires that the host_path also be defined
on the input workspace.
"""
FileDownload = namedtuple('FileDownload', ['file', 'local_path', 'partial'])
FileMove = namedtuple('FileMove', ['file', 'new_path'])
FileUpload = namedtuple('FileUpload', ['file', 'local_path'])
FileDetails = namedtuple('FileDetails', ['file', 'size'])
class Broker(object):
"""Abstract class for a broker that can download and upload files for a given storage backend
"""
__metaclass__ = ABCMeta
def __init__(self, broker_type):
"""Constructor
:param broker_type: The type of this broker
:type broker_type: string
"""
self._broker_type = broker_type
self._volume = None
@property
def broker_type(self):
"""The type of this broker
:returns: The broker type
:rtype: string
"""
return self._broker_type
@property
def volume(self):
"""If this broker uses a container volume, this property returns the information needed to set up a volume that
can be mounted into the task container. If this broker does not use a container volume, this property should be
None.
:returns: The container volume information needed for this broker, possibly None
:rtype: :class:`storage.brokers.broker.BrokerVolume`
"""
return self._volume
def delete_files(self, volume_path, files, update_model=True):
"""Deletes the given files.
If this broker uses a container volume, volume_path will contain the absolute local container location where
that volume file system is mounted. This means that the path to where a ScaleFile currently exists is the result
of os.path.join(volume_path, files[i].file_path). If this broker does not use a container volume, None will be
given for volume_path.
The files list contains the ScaleFile models representing the files to be deleted. The broker should only delete
each file itself and not any parent directories. If the update model flag is set each file model should be
updated and saved when a delete is successful, including fields such as is_deleted and deleted.
:param volume_path: Absolute path to the local container location onto which the volume file system was mounted,
None if this broker does not use a container volume
:type volume_path: string
:param files: List of files to delete
:type files: [:class:`storage.models.ScaleFile`]
:param update_model: Flag to determine if ScaleFile model should be updated after delete
:type update_model: bool
"""
raise NotImplementedError
def download_files(self, volume_path, file_downloads):
"""Downloads the given files to the given local file system paths.
If this broker uses a container volume, volume_path will contain the absolute local container location where
that volume file system is mounted. This means that the path to a ScaleFile that is accessible to the container
is the result of os.path.join(volume_path, file_downloads[i].file.file_path). If this broker does not use a
container volume, None will be given for volume_path.
The file_downloads list contains named tuples that each contain a ScaleFile model to be downloaded and the
absolute local container path where the file should be downloaded. Typically, no changes are needed to file
models during a download, but any changes should be saved by the broker. Any directories in the absolute local
container paths should already exist.
If a file does not exist in its expected location, raise a MissingFile exception.
:param volume_path: Absolute path to the local container location onto which the volume file system was mounted,
None if this broker does not use a container volume
:type volume_path: string
:param file_downloads: List of files to download
:type file_downloads: [:class:`storage.brokers.broker.FileDownload`]
:raises :class:`storage.exceptions.MissingFile`: If a file to download does not exist at the expected path
"""
raise NotImplementedError
def get_file_system_paths(self, volume_path, files):
"""Returns the local file system paths for the given files, if supported by the broker.
If this broker uses a container volume, volume_path will contain the absolute local container location where
that volume file system is mounted. This means that the path to a ScaleFile that is accessible to the container
is the result of os.path.join(volume_path, scale_files[i].file_path). If this broker does not use a container
volume, None will be given for volume_path. If this method is not supported by the broker, None will be
returned.
:param volume_path: Absolute path to the local container location onto which the volume file system was mounted,
None if this broker does not use a container volume
:type volume_path: string
:param files: List of files
:type files: [:class:`storage.models.ScaleFile`]
:returns: The list of local file system paths if supported, None otherwise
:rtype: [string]
"""
return None
def list_files(self, volume_path, recursive):
"""List the files under the given file system paths.
If this broker uses a container volume, volume_path will contain the absolute local container location where
that volume file system is mounted. If this broker does not use a container volume, None will be given for
volume_path.
Retrieval of objects is provided by the boto3 paginator over
list_objects. This allows for simple paging support with unbounded
object counts. As a result of the time that may be required for the full
result set to be returned, the results are returned via a generator.
This generator will contain objects of type `storage.brokers.broker.FileDetails`.
:param volume_path: Absolute path to the local container location onto which the volume file system was mounted,
None if this broker does not use a container volume
:type volume_path: string
:param recursive: Flag to indicate whether file searching should be done recursively
:type recursive: boolean
:return: Generator of files matching given expression
:rtype: Generator[:class:`storage.brokers.broker.FileDetails`]
"""
raise NotImplementedError
def load_configuration(self, config):
"""Loads the given configuration
:param config: The configuration as a dictionary
:type config: dict
"""
raise NotImplementedError
def move_files(self, volume_path, file_moves):
"""Moves the given files to the new file system paths.
If this broker uses a container volume, volume_path will contain the absolute local container location where
that volume file system is mounted. This means that the path to where a ScaleFile currently exists is the result
of os.path.join(volume_path, files_moves[i].file.file_path) and the new path is given by
os.path.join(volume_path, files_moves[i].file.new_path). If this broker does not use a container volume, None
will be given for volume_path.
The file_moves list contains named tuples that each contain a ScaleFile model to be moved and the new relative
file_path field for the new location of the file. The broker is expected to set the file_path field of each
ScaleFile model to its new location (which the broker may alter) and is free to alter any additional fields as
necessary. The broker is responsible for saving any changes to models when a move is successful The directories
in the new file_path may not exist, so it is the responsibility of the broker to create them if necessary.
If a file does not exist in its expected location, raise a MissingFile exception.
:param volume_path: Absolute path to the local container location onto which the volume file system was mounted,
None if this broker does not use a container volume
:type volume_path: string
:param file_moves: List of files to move
:type file_moves: [:class:`storage.brokers.broker.FileMove`]
:raises :class:`storage.exceptions.MissingFile`: If a file to move does not exist at the expected path
"""
raise NotImplementedError
def upload_files(self, volume_path, file_uploads):
"""Uploads the given files from the given local file system paths.
If this broker uses a container volume, volume_path will contain the absolute local container location where
that volume file system is mounted. This means that the path to where a ScaleFile should be uploaded is the
result of os.path.join(volume_path, file_uploads[i].file.file_path). If this broker does not use a container
volume, None will be given for volume_path.
The file_uploads list contains named tuples that each contain a ScaleFile model to be uploaded and the absolute
local container path where the file currently exists. The broker is free to alter the ScaleFile fields of the
uploaded files, including the final file_path (the given file_path is a recommendation by Scale that guarantees
path uniqueness). The ScaleFile models may not have been saved to the database yet and so may not have their id
field populated. The broker should perform a model save/update to the database for any files that are
successfully uploaded. The directories in the remote file_path may not exist, so it is the responsibility of the
broker to create them if necessary.
:param volume_path: Absolute path to the local container location onto which the volume file system was mounted,
None if this broker does not use a container volume
:type volume_path: string
:param file_uploads: List of files to upload
:type file_uploads: [:class:`storage.brokers.broker.FileUpload`]
"""
raise NotImplementedError
def validate_configuration(self, config):
"""Validates the given configuration
:param config: The configuration as a dictionary
:type config: dict
:returns: A list of warnings discovered during validation.
:rtype: [:class:`storage.configuration.workspace_configuration.ValidationWarning`]<|fim▁hole|>
:raises :class:`storage.brokers.exceptions.InvalidBrokerConfiguration`: If the given configuration is invalid
"""
raise NotImplementedError
class BrokerVolume(object):
"""Represents the properties of a container volume that must be mounted into the container for a broker to work
"""
def __init__(self, driver, remote_path):
"""Constructor
:param driver: The driver used by the volume, None indicates that the default volume driver should be used
:type driver: string
:param remote_path: The remote path for the storage backend to which the container volume is connecting
:type remote_path: string
"""
self.driver = driver
self.remote_path = remote_path
# Special flag to indicate a host mount, which has different behavior
self.host = False<|fim▁end|> | |
<|file_name|>test_transactions.py<|end_file_name|><|fim▁begin|># Copyright 2018-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Execute Transactions Spec tests."""
import os
import sys
from io import BytesIO
sys.path[0:0] = [""]
from test import client_context, unittest
from test.utils import (
OvertCommandListener,
TestCreator,
rs_client,
single_client,
wait_until,
)
from test.utils_spec_runner import SpecRunner
from gridfs import GridFS, GridFSBucket
from pymongo import WriteConcern, client_session
from pymongo.client_session import TransactionOptions
from pymongo.errors import (
CollectionInvalid,
ConfigurationError,
ConnectionFailure,
InvalidOperation,
OperationFailure,
)
from pymongo.operations import IndexModel, InsertOne
from pymongo.read_concern import ReadConcern
from pymongo.read_preferences import ReadPreference
# Location of JSON test specifications.
TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "transactions", "legacy")
_TXN_TESTS_DEBUG = os.environ.get("TRANSACTION_TESTS_DEBUG")
# Max number of operations to perform after a transaction to prove unpinning
# occurs. Chosen so that there's a low false positive rate. With 2 mongoses,
# 50 attempts yields a one in a quadrillion chance of a false positive
# (1/(0.5^50)).
UNPIN_TEST_MAX_ATTEMPTS = 50
class TransactionsBase(SpecRunner):
@classmethod
def setUpClass(cls):
super(TransactionsBase, cls).setUpClass()
if client_context.supports_transactions():
for address in client_context.mongoses:
cls.mongos_clients.append(single_client("%s:%s" % address))
@classmethod
def tearDownClass(cls):
for client in cls.mongos_clients:
client.close()
super(TransactionsBase, cls).tearDownClass()
def maybe_skip_scenario(self, test):
super(TransactionsBase, self).maybe_skip_scenario(test)
if (
"secondary" in self.id()
and not client_context.is_mongos
and not client_context.has_secondaries
):
raise unittest.SkipTest("No secondaries")
class TestTransactions(TransactionsBase):
RUN_ON_SERVERLESS = True
@client_context.require_transactions
def test_transaction_options_validation(self):
default_options = TransactionOptions()
self.assertIsNone(default_options.read_concern)
self.assertIsNone(default_options.write_concern)
self.assertIsNone(default_options.read_preference)
self.assertIsNone(default_options.max_commit_time_ms)
# No error when valid options are provided.
TransactionOptions(
read_concern=ReadConcern(),
write_concern=WriteConcern(),
read_preference=ReadPreference.PRIMARY,
max_commit_time_ms=10000,
)
with self.assertRaisesRegex(TypeError, "read_concern must be "):
TransactionOptions(read_concern={}) # type: ignore
with self.assertRaisesRegex(TypeError, "write_concern must be "):
TransactionOptions(write_concern={}) # type: ignore
with self.assertRaisesRegex(
ConfigurationError, "transactions do not support unacknowledged write concern"
):
TransactionOptions(write_concern=WriteConcern(w=0))
with self.assertRaisesRegex(TypeError, "is not valid for read_preference"):
TransactionOptions(read_preference={}) # type: ignore
with self.assertRaisesRegex(TypeError, "max_commit_time_ms must be an integer or None"):
TransactionOptions(max_commit_time_ms="10000") # type: ignore
@client_context.require_transactions
def test_transaction_write_concern_override(self):
"""Test txn overrides Client/Database/Collection write_concern."""
client = rs_client(w=0)
self.addCleanup(client.close)
db = client.test
coll = db.test
coll.insert_one({})
with client.start_session() as s:
with s.start_transaction(write_concern=WriteConcern(w=1)):
self.assertTrue(coll.insert_one({}, session=s).acknowledged)
self.assertTrue(coll.insert_many([{}, {}], session=s).acknowledged)
self.assertTrue(coll.bulk_write([InsertOne({})], session=s).acknowledged)
self.assertTrue(coll.replace_one({}, {}, session=s).acknowledged)
self.assertTrue(coll.update_one({}, {"$set": {"a": 1}}, session=s).acknowledged)
self.assertTrue(coll.update_many({}, {"$set": {"a": 1}}, session=s).acknowledged)
self.assertTrue(coll.delete_one({}, session=s).acknowledged)
self.assertTrue(coll.delete_many({}, session=s).acknowledged)
coll.find_one_and_delete({}, session=s)
coll.find_one_and_replace({}, {}, session=s)
coll.find_one_and_update({}, {"$set": {"a": 1}}, session=s)
unsupported_txn_writes: list = [
(client.drop_database, [db.name], {}),
(db.drop_collection, ["collection"], {}),
(coll.drop, [], {}),
(coll.rename, ["collection2"], {}),
# Drop collection2 between tests of "rename", above.
(coll.database.drop_collection, ["collection2"], {}),
(coll.create_indexes, [[IndexModel("a")]], {}),
(coll.create_index, ["a"], {}),
(coll.drop_index, ["a_1"], {}),
(coll.drop_indexes, [], {}),
(coll.aggregate, [[{"$out": "aggout"}]], {}),
]
# Creating a collection in a transaction requires MongoDB 4.4+.
if client_context.version < (4, 3, 4):
unsupported_txn_writes.extend(
[
(db.create_collection, ["collection"], {}),
]
)
for op in unsupported_txn_writes:
op, args, kwargs = op
with client.start_session() as s:
kwargs["session"] = s
s.start_transaction(write_concern=WriteConcern(w=1))
with self.assertRaises(OperationFailure):
op(*args, **kwargs)
s.abort_transaction()
@client_context.require_transactions
@client_context.require_multiple_mongoses
def test_unpin_for_next_transaction(self):
# Increase localThresholdMS and wait until both nodes are discovered
# to avoid false positives.
client = rs_client(client_context.mongos_seeds(), localThresholdMS=1000)
wait_until(lambda: len(client.nodes) > 1, "discover both mongoses")
coll = client.test.test
# Create the collection.
coll.insert_one({})
self.addCleanup(client.close)
with client.start_session() as s:
# Session is pinned to Mongos.
with s.start_transaction():
coll.insert_one({}, session=s)
addresses = set()
for _ in range(UNPIN_TEST_MAX_ATTEMPTS):
with s.start_transaction():
cursor = coll.find({}, session=s)
self.assertTrue(next(cursor))
addresses.add(cursor.address)
# Break early if we can.
if len(addresses) > 1:
break
self.assertGreater(len(addresses), 1)
@client_context.require_transactions
@client_context.require_multiple_mongoses
def test_unpin_for_non_transaction_operation(self):
# Increase localThresholdMS and wait until both nodes are discovered
# to avoid false positives.
client = rs_client(client_context.mongos_seeds(), localThresholdMS=1000)
wait_until(lambda: len(client.nodes) > 1, "discover both mongoses")
coll = client.test.test
# Create the collection.
coll.insert_one({})
self.addCleanup(client.close)
with client.start_session() as s:
# Session is pinned to Mongos.
with s.start_transaction():
coll.insert_one({}, session=s)
addresses = set()
for _ in range(UNPIN_TEST_MAX_ATTEMPTS):
cursor = coll.find({}, session=s)
self.assertTrue(next(cursor))
addresses.add(cursor.address)
# Break early if we can.
if len(addresses) > 1:
break
self.assertGreater(len(addresses), 1)
@client_context.require_transactions
@client_context.require_version_min(4, 3, 4)
def test_create_collection(self):
client = client_context.client
db = client.pymongo_test
coll = db.test_create_collection
self.addCleanup(coll.drop)
# Use with_transaction to avoid StaleConfig errors on sharded clusters.
def create_and_insert(session):
coll2 = db.create_collection(coll.name, session=session)
self.assertEqual(coll, coll2)
coll.insert_one({}, session=session)
with client.start_session() as s:
s.with_transaction(create_and_insert)
# Outside a transaction we raise CollectionInvalid on existing colls.
with self.assertRaises(CollectionInvalid):
db.create_collection(coll.name)
# Inside a transaction we raise the OperationFailure from create.
with client.start_session() as s:
s.start_transaction()
with self.assertRaises(OperationFailure) as ctx:
db.create_collection(coll.name, session=s)
self.assertEqual(ctx.exception.code, 48) # NamespaceExists
@client_context.require_transactions
def test_gridfs_does_not_support_transactions(self):
client = client_context.client
db = client.pymongo_test
gfs = GridFS(db)
bucket = GridFSBucket(db)
def gridfs_find(*args, **kwargs):
return gfs.find(*args, **kwargs).next()
def gridfs_open_upload_stream(*args, **kwargs):
bucket.open_upload_stream(*args, **kwargs).write(b"1")
gridfs_ops = [
(gfs.put, (b"123",)),
(gfs.get, (1,)),
(gfs.get_version, ("name",)),
(gfs.get_last_version, ("name",)),
(gfs.delete, (1,)),
(gfs.list, ()),
(gfs.find_one, ()),
(gridfs_find, ()),
(gfs.exists, ()),
(gridfs_open_upload_stream, ("name",)),
(
bucket.upload_from_stream,
(
"name",
b"data",
),
),
(
bucket.download_to_stream,
(
1,
BytesIO(),
),
),
(
bucket.download_to_stream_by_name,
(
"name",
BytesIO(),
),
),
(bucket.delete, (1,)),
(bucket.find, ()),
(bucket.open_download_stream, (1,)),
(bucket.open_download_stream_by_name, ("name",)),
(
bucket.rename,
(
1,
"new-name",
),
),
]
with client.start_session() as s, s.start_transaction():
for op, args in gridfs_ops:
with self.assertRaisesRegex(
InvalidOperation,
"GridFS does not support multi-document transactions",
):
op(*args, session=s) # type: ignore
# Require 4.2+ for large (16MB+) transactions.
@client_context.require_version_min(4, 2)
@client_context.require_transactions
@unittest.skipIf(sys.platform == "win32", "Our Windows machines are too slow to pass this test")
def test_transaction_starts_with_batched_write(self):
if "PyPy" in sys.version and client_context.tls:
self.skipTest(
"PYTHON-2937 PyPy is so slow sending large "
"messages over TLS that this test fails"
)
# Start a transaction with a batch of operations that needs to be
# split.
listener = OvertCommandListener()
client = rs_client(event_listeners=[listener])
coll = client[self.db.name].test
coll.delete_many({})
listener.reset()
self.addCleanup(client.close)
self.addCleanup(coll.drop)
large_str = "\0" * (10 * 1024 * 1024)
ops = [InsertOne({"a": large_str}) for _ in range(10)]
with client.start_session() as session:
with session.start_transaction():
coll.bulk_write(ops, session=session)
# Assert commands were constructed properly.
self.assertEqual(
["insert", "insert", "insert", "commitTransaction"], listener.started_command_names()
)
first_cmd = listener.results["started"][0].command
self.assertTrue(first_cmd["startTransaction"])
lsid = first_cmd["lsid"]
txn_number = first_cmd["txnNumber"]
for event in listener.results["started"][1:]:
self.assertNotIn("startTransaction", event.command)
self.assertEqual(lsid, event.command["lsid"])
self.assertEqual(txn_number, event.command["txnNumber"])
self.assertEqual(10, coll.count_documents({}))
class PatchSessionTimeout(object):
"""Patches the client_session's with_transaction timeout for testing."""
def __init__(self, mock_timeout):
self.real_timeout = client_session._WITH_TRANSACTION_RETRY_TIME_LIMIT
self.mock_timeout = mock_timeout
def __enter__(self):
client_session._WITH_TRANSACTION_RETRY_TIME_LIMIT = self.mock_timeout
return self
def __exit__(self, exc_type, exc_val, exc_tb):
client_session._WITH_TRANSACTION_RETRY_TIME_LIMIT = self.real_timeout
class TestTransactionsConvenientAPI(TransactionsBase):
TEST_PATH = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "transactions-convenient-api"
)
@client_context.require_transactions
def test_callback_raises_custom_error(self):
class _MyException(Exception):
pass
def raise_error(_):
raise _MyException()
with self.client.start_session() as s:
with self.assertRaises(_MyException):
s.with_transaction(raise_error)
@client_context.require_transactions
def test_callback_returns_value(self):
def callback(_):
return "Foo"
with self.client.start_session() as s:
self.assertEqual(s.with_transaction(callback), "Foo")
self.db.test.insert_one({})
def callback2(session):
self.db.test.insert_one({}, session=session)
return "Foo"
with self.client.start_session() as s:
self.assertEqual(s.with_transaction(callback2), "Foo")
@client_context.require_transactions
def test_callback_not_retried_after_timeout(self):
listener = OvertCommandListener()
client = rs_client(event_listeners=[listener])
self.addCleanup(client.close)
coll = client[self.db.name].test<|fim▁hole|> def callback(session):
coll.insert_one({}, session=session)
err: dict = {
"ok": 0,
"errmsg": "Transaction 7819 has been aborted.",
"code": 251,
"codeName": "NoSuchTransaction",
"errorLabels": ["TransientTransactionError"],
}
raise OperationFailure(err["errmsg"], err["code"], err)
# Create the collection.
coll.insert_one({})
listener.results.clear()
with client.start_session() as s:
with PatchSessionTimeout(0):
with self.assertRaises(OperationFailure):
s.with_transaction(callback)
self.assertEqual(listener.started_command_names(), ["insert", "abortTransaction"])
@client_context.require_test_commands
@client_context.require_transactions
def test_callback_not_retried_after_commit_timeout(self):
listener = OvertCommandListener()
client = rs_client(event_listeners=[listener])
self.addCleanup(client.close)
coll = client[self.db.name].test
def callback(session):
coll.insert_one({}, session=session)
# Create the collection.
coll.insert_one({})
self.set_fail_point(
{
"configureFailPoint": "failCommand",
"mode": {"times": 1},
"data": {
"failCommands": ["commitTransaction"],
"errorCode": 251, # NoSuchTransaction
},
}
)
self.addCleanup(self.set_fail_point, {"configureFailPoint": "failCommand", "mode": "off"})
listener.results.clear()
with client.start_session() as s:
with PatchSessionTimeout(0):
with self.assertRaises(OperationFailure):
s.with_transaction(callback)
self.assertEqual(listener.started_command_names(), ["insert", "commitTransaction"])
@client_context.require_test_commands
@client_context.require_transactions
def test_commit_not_retried_after_timeout(self):
listener = OvertCommandListener()
client = rs_client(event_listeners=[listener])
self.addCleanup(client.close)
coll = client[self.db.name].test
def callback(session):
coll.insert_one({}, session=session)
# Create the collection.
coll.insert_one({})
self.set_fail_point(
{
"configureFailPoint": "failCommand",
"mode": {"times": 2},
"data": {"failCommands": ["commitTransaction"], "closeConnection": True},
}
)
self.addCleanup(self.set_fail_point, {"configureFailPoint": "failCommand", "mode": "off"})
listener.results.clear()
with client.start_session() as s:
with PatchSessionTimeout(0):
with self.assertRaises(ConnectionFailure):
s.with_transaction(callback)
# One insert for the callback and two commits (includes the automatic
# retry).
self.assertEqual(
listener.started_command_names(), ["insert", "commitTransaction", "commitTransaction"]
)
# Tested here because this supports Motor's convenient transactions API.
@client_context.require_transactions
def test_in_transaction_property(self):
client = client_context.client
coll = client.test.testcollection
coll.insert_one({})
self.addCleanup(coll.drop)
with client.start_session() as s:
self.assertFalse(s.in_transaction)
s.start_transaction()
self.assertTrue(s.in_transaction)
coll.insert_one({}, session=s)
self.assertTrue(s.in_transaction)
s.commit_transaction()
self.assertFalse(s.in_transaction)
with client.start_session() as s:
s.start_transaction()
# commit empty transaction
s.commit_transaction()
self.assertFalse(s.in_transaction)
with client.start_session() as s:
s.start_transaction()
s.abort_transaction()
self.assertFalse(s.in_transaction)
# Using a callback
def callback(session):
self.assertTrue(session.in_transaction)
with client.start_session() as s:
self.assertFalse(s.in_transaction)
s.with_transaction(callback)
self.assertFalse(s.in_transaction)
def create_test(scenario_def, test, name):
@client_context.require_test_commands
@client_context.require_transactions
def run_scenario(self):
self.run_scenario(scenario_def, test)
return run_scenario
test_creator = TestCreator(create_test, TestTransactions, TEST_PATH)
test_creator.create_tests()
TestCreator(
create_test, TestTransactionsConvenientAPI, TestTransactionsConvenientAPI.TEST_PATH
).create_tests()
if __name__ == "__main__":
unittest.main()<|fim▁end|> | |
<|file_name|>user.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Copyright 2010 cloudControl UG (haftungsbeschraenkt)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys
import json
from pycclib.cclib import GoneError, NotImplementedError, ForbiddenError
from pycclib.cclib import ConflictDuplicateError
from pycclib import cclib
from cctrl.error import PasswordsDontMatchException, InputErrorException, \
messages
from cctrl.auth import get_credentials, set_user_config, get_user_config
from cctrl.output import print_keys
from cctrl.common import get_email_and_password
from output import print_key
from oshelpers import readContentOf
from keyhelpers import is_key_valid, ask_user_to_use_default_ssh_public_key, \
create_new_default_ssh_keys, get_default_ssh_key_path
class UserController(object):
"""
This controller handles all user related actions.
"""
api = None
def __init__(self, api, settings):
self.api = api
self.settings = settings
def checktoken(self, args):
try:
self.api.read_users()<|fim▁hole|> except cclib.TokenRequiredError:
sys.exit(1)
sys.exit(0)
def create(self, args):
"""
Create a new user.
"""
if not self.settings.user_registration_enabled:
print messages['RegisterDisabled'].format(self.settings.user_registration_url)
return
self.api.set_token(None)
if args.name and args.email and args.password:
name = args.name[0]
email = args.email[0]
password = args.password[0]
else:
name = raw_input('Username: ')
try:
email, password = get_credentials(self.settings, create=True)
except PasswordsDontMatchException:
return
try:
self.api.create_user(name, email, password)
except NotImplementedError:
raise InputErrorException('CommandNotImplemented')
print messages['UserCreatedNowCheckEmail']
def activate(self, args):
"""
Activate a new user using the information from the
activation email.
"""
self.api.set_token(None)
try:
self.api.update_user(
args.user_name[0],
activation_code=args.activation_code[0])
except GoneError:
raise InputErrorException('WrongUsername')
except NotImplementedError:
raise InputErrorException('CommandNotImplemented')
def delete(self, args):
"""
Delete your user account.
"""
users = self.api.read_users()
if not args.force_delete:
question = raw_input('Do you really want to delete your user? ' +
'Type "Yes" without the quotes to delete: ')
else:
question = 'Yes'
if question.lower() == 'yes':
try:
self.api.delete_user(users[0]['username'])
except NotImplementedError:
raise InputErrorException('CommandNotImplemented')
except ForbiddenError:
raise InputErrorException('DeleteAppsBeforeUser')
# After we have deleted our user we should also delete
# the token_file to avoid confusion
self.api.set_token(None)
else:
raise InputErrorException('SecurityQuestionDenied')
def addKey(self, args):
"""
Add a given public key to cloudControl user account.
"""
default_key_path = get_default_ssh_key_path()
# Possibility #1: User is providing a non-default SSH key
key_to_read = args.public_key
if not is_key_valid(key_to_read):
# Possibility #2: Try the default RSA public key
print >> sys.stderr, "Key '{0}' seems to not be a RSA public key or not found!".format(key_to_read)
ask_user_to_use_default_ssh_public_key()
# Possibility #3: All failed! Let's just create new keys for user!
if not is_key_valid(default_key_path):
if key_to_read != default_key_path:
print >> sys.stderr, "Default key '{0}' seems to not be a RSA public key or not found!".format(default_key_path)
create_new_default_ssh_keys()
# We've filtered all cases: the key must be the default one!
key_to_read = default_key_path
# Good, we have the key! Now, read the content of the key!
public_rsa_key_content = readContentOf(key_to_read)
# Add public RSA-key to cloudControl user account
try:
users = self.api.read_users()
self.api.create_user_key(
users[0]['username'],
public_rsa_key_content)
except ConflictDuplicateError:
raise InputErrorException('KeyDuplicate')
def listKeys(self, args):
"""
List your public keys.
"""
users = self.api.read_users()
if args.id:
key = self.api.read_user_key(users[0]['username'], args.id)
print_key(key)
else:
keys = self.api.read_user_keys(users[0]['username'])
print_keys(keys)
def removeKey(self, args):
"""
Remove one of your public keys specified by key_id.
listKeys() shows the key_ids.
"""
users = self.api.read_users()
if not args.force_delete:
question = raw_input('Do you really want to remove your key? ' +
'Type "Yes" without the quotes to remove: ')
else:
question = 'Yes'
if question.lower() == 'yes':
self.api.delete_user_key(users[0]['username'], args.id[0])
else:
raise InputErrorException('SecurityQuestionDenied')
def logout(self, args):
"""
Logout a user by deleting the token.json file.
"""
self.api.set_token(None)
def registerAddon(self, args):
file_content = readContentOf(args.manifest)
email, password = get_email_and_password(self.settings)
try:
self.api.register_addon(email, password, json.loads(file_content))
except cclib.UnauthorizedError:
sys.exit(messages['NotAuthorized'])
except cclib.ForbiddenError, e:
sys.exit(messages['NotAllowed'])
except cclib.ConnectionException:
sys.exit(messages['APIUnreachable'])
except Exception as e:
sys.exit(e)
def setup(self, args):
user_config = get_user_config(self.settings)
ssh_key_path = self._get_setup_ssh_key_path(user_config, args)
if not is_key_valid(ssh_key_path):
# If given key path is not default and does not exist
# we raise an error
if ssh_key_path != get_default_ssh_key_path():
raise InputErrorException('WrongPublicKey')
# If given key path was the default one, we create the key
# pair for the user
print >> sys.stderr, "Key '{0}' seems to not be a RSA public key or not found!".format(ssh_key_path)
create_new_default_ssh_keys()
ssh_key_content = readContentOf(ssh_key_path)
ssh_auth = self._get_setup_ssh_auth(self.settings, user_config, args)
if args.email:
set_user_config(self.settings, email=args.email)
try:
users = self.api.read_users()
self.api.create_user_key(
users[0]['username'],
ssh_key_content)
except ConflictDuplicateError:
# Key already added, nothing to do.
pass
set_user_config(self.settings,
ssh_auth=ssh_auth,
ssh_path=ssh_key_path)
def _get_setup_ssh_key_path(self, user_config, args):
if args.ssh_key_path:
return os.path.abspath(args.ssh_key_path)
if user_config.get('ssh_path'):
return user_config.get('ssh_path')
return get_default_ssh_key_path()
def _get_setup_ssh_auth(self, settings, user_config, args):
if not settings.ssh_auth:
return False
if args.ssh_auth:
return args.ssh_auth == 'yes'
return user_config.get('ssh_auth', True)<|fim▁end|> | |
<|file_name|>rtf.py<|end_file_name|><|fim▁begin|>__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
"""
Edit metadata in RTF files.
"""
import re, cStringIO, codecs
from calibre import force_unicode
from calibre.ebooks.metadata import MetaInformation, string_to_authors
title_pat = re.compile(r'\{\\info.*?\{\\title(.*?)(?<!\\)\}', re.DOTALL)
author_pat = re.compile(r'\{\\info.*?\{\\author(.*?)(?<!\\)\}', re.DOTALL)
comment_pat = re.compile(r'\{\\info.*?\{\\subject(.*?)(?<!\\)\}', re.DOTALL)
tags_pat = re.compile(r'\{\\info.*?\{\\category(.*?)(?<!\\)\}', re.DOTALL)
publisher_pat = re.compile(r'\{\\info.*?\{\\manager(.*?)(?<!\\)\}', re.DOTALL)
def get_document_info(stream):
"""
Extract the \info block from an RTF file.
Return the info block as a string and the position in the file at which it
starts.
@param stream: File like object pointing to the RTF file.
"""
block_size = 4096
stream.seek(0)
found, block = False, ""
while not found:
prefix = block[-6:]
block = prefix + stream.read(block_size)
actual_block_size = len(block) - len(prefix)
if len(block) == len(prefix):
break
idx = block.find(r'{\info')
if idx >= 0:
found = True
pos = stream.tell() - actual_block_size + idx - len(prefix)
stream.seek(pos)
else:
if block.find(r'\sect') > -1:
break
if not found:
return None, 0
data, count, = cStringIO.StringIO(), 0
pos = stream.tell()
while True:
ch = stream.read(1)
if ch == '\\':
data.write(ch + stream.read(1))
continue
if ch == '{':
count += 1
elif ch == '}':
count -= 1
data.write(ch)
if count == 0:
break
return data.getvalue(), pos
def detect_codepage(stream):
pat = re.compile(r'\\ansicpg(\d+)')
match = pat.search(stream.read(512))
if match is not None:
num = match.group(1)
if num == '0':
num = '1252'
codec = 'cp'+num
try:
codecs.lookup(codec)
return codec
except:
pass
def encode(unistr):
if not isinstance(unistr, unicode):
unistr = force_unicode(unistr)
return ''.join([str(c) if ord(c) < 128 else '\\u' + str(ord(c)) + '?' for c in unistr])
def decode(raw, codec):
if codec is not None:
def codepage(match):
return chr(int(match.group(1), 16))
raw = re.sub(r"\\'([a-fA-F0-9]{2})", codepage, raw)
raw = raw.decode(codec)
def uni(match):
return unichr(int(match.group(1)))
raw = re.sub(r'\\u([0-9]{3,4}).', uni, raw)
return raw
def get_metadata(stream):
"""
Return metadata as a L{MetaInfo} object
"""
stream.seek(0)
if stream.read(5) != r'{\rtf':
return MetaInformation(_('Unknown'))
block = get_document_info(stream)[0]
if not block:
return MetaInformation(_('Unknown'))
stream.seek(0)
cpg = detect_codepage(stream)
stream.seek(0)
title_match = title_pat.search(block)
if title_match is not None:
title = decode(title_match.group(1).strip(), cpg)
else:
title = _('Unknown')
author_match = author_pat.search(block)
if author_match is not None:
author = decode(author_match.group(1).strip(), cpg)
else:
author = None
mi = MetaInformation(title)
if author:
mi.authors = string_to_authors(author)
comment_match = comment_pat.search(block)
if comment_match is not None:
comment = decode(comment_match.group(1).strip(), cpg)
mi.comments = comment
tags_match = tags_pat.search(block)
if tags_match is not None:
tags = decode(tags_match.group(1).strip(), cpg)
mi.tags = list(filter(None, (x.strip() for x in tags.split(','))))
publisher_match = publisher_pat.search(block)
if publisher_match is not None:
publisher = decode(publisher_match.group(1).strip(), cpg)
mi.publisher = publisher
return mi
def create_metadata(stream, options):
md = [r'{\info']
if options.title:
title = encode(options.title)
md.append(r'{\title %s}'%(title,))
if options.authors:
au = options.authors
if not isinstance(au, basestring):
au = u', '.join(au)
author = encode(au)
md.append(r'{\author %s}'%(author,))
comp = options.comment if hasattr(options, 'comment') else options.comments
if comp:
comment = encode(comp)
md.append(r'{\subject %s}'%(comment,))
if options.publisher:
publisher = encode(options.publisher)
md.append(r'{\manager %s}'%(publisher,))
if options.tags:
tags = u', '.join(options.tags)
tags = encode(tags)
md.append(r'{\category %s}'%(tags,))
if len(md) > 1:
md.append('}')
stream.seek(0)
src = stream.read()
ans = src[:6] + u''.join(md) + src[6:]
stream.seek(0)
stream.write(ans)
def set_metadata(stream, options):
'''
Modify/add RTF metadata in stream
@param options: Object with metadata attributes title, author, comment, category
'''
def add_metadata_item(src, name, val):
index = src.rindex('}')
return src[:index] + r'{\ '[:-1] + name + ' ' + val + '}}'
src, pos = get_document_info(stream)
if src is None:
create_metadata(stream, options)
else:
olen = len(src)
base_pat = r'\{\\name(.*?)(?<!\\)\}'
title = options.title
if title is not None:
title = encode(title)
pat = re.compile(base_pat.replace('name', 'title'), re.DOTALL)
if pat.search(src):
src = pat.sub(r'{\\title ' + title + r'}', src)
else:
src = add_metadata_item(src, 'title', title)<|fim▁hole|> comment = options.comments
if comment is not None:
comment = encode(comment)
pat = re.compile(base_pat.replace('name', 'subject'), re.DOTALL)
if pat.search(src):
src = pat.sub(r'{\\subject ' + comment + r'}', src)
else:
src = add_metadata_item(src, 'subject', comment)
author = options.authors
if author is not None:
author = ', '.join(author)
author = encode(author)
pat = re.compile(base_pat.replace('name', 'author'), re.DOTALL)
if pat.search(src):
src = pat.sub(r'{\\author ' + author + r'}', src)
else:
src = add_metadata_item(src, 'author', author)
tags = options.tags
if tags is not None:
tags = ', '.join(tags)
tags = encode(tags)
pat = re.compile(base_pat.replace('name', 'category'), re.DOTALL)
if pat.search(src):
src = pat.sub(r'{\\category ' + tags + r'}', src)
else:
src = add_metadata_item(src, 'category', tags)
publisher = options.publisher
if publisher is not None:
publisher = encode(publisher)
pat = re.compile(base_pat.replace('name', 'manager'), re.DOTALL)
if pat.search(src):
src = pat.sub(r'{\\manager ' + publisher + r'}', src)
else:
src = add_metadata_item(src, 'manager', publisher)
stream.seek(pos + olen)
after = stream.read()
stream.seek(pos)
stream.truncate()
stream.write(src)
stream.write(after)<|fim▁end|> | |
<|file_name|>state.rs<|end_file_name|><|fim▁begin|>use Handle;
use gl;
use std::default::Default;
/// Represents the current OpenGL state.
///
/// The current state is passed to each function and can be freely updated.
pub struct GLState {
/// Whether GL_BLEND is enabled
pub enabled_blend: bool,
/// Whether GL_CULL_FACE is enabled
pub enabled_cull_face: bool,
/// Whether GL_DEBUG_OUTPUT is enabled. None means "unknown".
pub enabled_debug_output: Option<bool>,
/// Whether GL_DEBUG_OUTPUT_SYNCHRONOUS is enabled
pub enabled_debug_output_synchronous: bool,
/// Whether GL_DEPTH_TEST is enabled
pub enabled_depth_test: bool,
/// Whether GL_DITHER is enabled
pub enabled_dither: bool,
/// Whether GL_MULTISAMPLE is enabled
pub enabled_multisample: bool,
/// Whether GL_POLYGON_OFFSET_FILL is enabled
pub enabled_polygon_offset_fill: bool,
/// Whether GL_RASTERIZER_DISCARD is enabled
pub enabled_rasterizer_discard: bool,
/// Whether GL_SAMPLE_ALPHA_TO_COVERAGE is enabled
pub enabled_sample_alpha_to_coverage: bool,
/// Whether GL_SAMPLE_COVERAGE is enabled
pub enabled_sample_coverage: bool,
/// Whether GL_SCISSOR_TEST is enabled
pub enabled_scissor_test: bool,
/// Whether GL_STENCIL_TEST is enabled
pub enabled_stencil_test: bool,
// The latest value passed to `glUseProgram`.
pub program: Handle,
// The latest value passed to `glBindVertexArray`.
pub vertex_array: gl::types::GLuint,
// The latest value passed to `glClearColor`.
pub clear_color: (gl::types::GLclampf, gl::types::GLclampf,
gl::types::GLclampf, gl::types::GLclampf),
// The latest value passed to `glClearDepthf`.
pub clear_depth: gl::types::GLclampf,
// The latest value passed to `glClearStencil`.
pub clear_stencil: gl::types::GLint,
/// The latest buffer bound to `GL_ARRAY_BUFFER`.
pub array_buffer_binding: gl::types::GLuint,
/// The latest buffer bound to `GL_PIXEL_PACK_BUFFER`.
pub pixel_pack_buffer_binding: gl::types::GLuint,
/// The latest buffer bound to `GL_PIXEL_UNPACK_BUFFER`.
pub pixel_unpack_buffer_binding: gl::types::GLuint,
/// The latest buffer bound to `GL_UNIFORM_BUFFER`.
pub uniform_buffer_binding: gl::types::GLuint,
/// The latest buffer bound to `GL_READ_FRAMEBUFFER`.
pub read_framebuffer: gl::types::GLuint,
/// The latest buffer bound to `GL_DRAW_FRAMEBUFFER`.
pub draw_framebuffer: gl::types::GLuint,
/// The latest values passed to `glReadBuffer` with the default framebuffer.
/// `None` means "unknown".
pub default_framebuffer_read: Option<gl::types::GLenum>,
/// The latest render buffer bound with `glBindRenderbuffer`.
pub renderbuffer: gl::types::GLuint,
/// The latest values passed to `glBlendEquation`.
pub blend_equation: gl::types::GLenum,
/// The latest values passed to `glBlendFunc`.
pub blend_func: (gl::types::GLenum, gl::types::GLenum),
/// The latest value passed to `glDepthFunc`.
pub depth_func: gl::types::GLenum,
/// The latest value passed to `glDepthMask`.
pub depth_mask: bool,
/// The latest values passed to `glDepthRange`.
pub depth_range: (f32, f32),
/// The latest values passed to `glViewport`. `None` means unknown.
pub viewport: Option<(gl::types::GLint, gl::types::GLint,
gl::types::GLsizei, gl::types::GLsizei)>,
/// The latest values passed to `glScissor`. `None` means unknown.
pub scissor: Option<(gl::types::GLint, gl::types::GLint,
gl::types::GLsizei, gl::types::GLsizei)>,
/// The latest value passed to `glLineWidth`.
pub line_width: gl::types::GLfloat,
/// The latest value passed to `glCullFace`.
pub cull_face: gl::types::GLenum,
/// The latest value passed to `glPolygonMode`.
pub polygon_mode: gl::types::GLenum,
<|fim▁hole|>
/// The latest value passed to `glPixelStore` with `GL_PACK_ALIGNMENT`.
pub pixel_store_pack_alignment: gl::types::GLint,
/// The latest value passed to `glPatchParameter` with `GL_PATCH_VERTICES`.
pub patch_patch_vertices: gl::types::GLint,
/// The latest value passed to `glActiveTexture`.
pub active_texture: gl::types::GLenum,
}
impl Default for GLState {
fn default() -> GLState {
GLState {
enabled_blend: false,
enabled_cull_face: false,
enabled_debug_output: None,
enabled_debug_output_synchronous: false,
enabled_depth_test: false,
enabled_dither: false,
enabled_multisample: true,
enabled_polygon_offset_fill: false,
enabled_rasterizer_discard: false,
enabled_sample_alpha_to_coverage: false,
enabled_sample_coverage: false,
enabled_scissor_test: false,
enabled_stencil_test: false,
program: Handle::Id(0),
vertex_array: 0,
clear_color: (0.0, 0.0, 0.0, 0.0),
clear_depth: 1.0,
clear_stencil: 0,
array_buffer_binding: 0,
pixel_pack_buffer_binding: 0,
pixel_unpack_buffer_binding: 0,
uniform_buffer_binding: 0,
read_framebuffer: 0,
draw_framebuffer: 0,
default_framebuffer_read: None,
renderbuffer: 0,
depth_func: gl::LESS,
depth_mask: true,
depth_range: (0.0, 1.0),
blend_equation: gl::FUNC_ADD,
blend_func: (gl::ONE, gl::ZERO),
viewport: None,
scissor: None,
line_width: 1.0,
cull_face: gl::BACK,
polygon_mode: gl::FILL,
pixel_store_unpack_alignment: 4,
pixel_store_pack_alignment: 4,
patch_patch_vertices: 3,
active_texture: gl::TEXTURE0,
}
}
}<|fim▁end|> | /// The latest value passed to `glPixelStore` with `GL_UNPACK_ALIGNMENT`.
pub pixel_store_unpack_alignment: gl::types::GLint, |
<|file_name|>block3rdPartyContent.js<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
function blockReferer () {
if (document.referrer) {
// Blocks cross-origin referer
var parser = document.createElement('a')
parser.href = document.referrer
if (parser.origin !== document.location.origin) {
window.Document.prototype.__defineGetter__('referrer', () => { return document.location.origin })
}
}
}
<|fim▁hole|>function getBlockRefererScript () {
return '(' + Function.prototype.toString.call(blockReferer) + '());'
}
if (chrome.contentSettings.referer != 'allow' &&
document.location.origin && document.location.origin !== 'https://youtube.googleapis.com') {
executeScript(getBlockRefererScript())
}<|fim▁end|> | |
<|file_name|>element.cpp<|end_file_name|><|fim▁begin|>/* -*- mode: C++; c-basic-offset: 2; indent-tabs-mode: nil -*- */
/*
* Main authors:
* Christian Schulte <[email protected]>
*
* Copyright:
* Christian Schulte, 2004
*
* Last modified:
* $Date: 2010-03-04 03:40:32 +1100 (Thu, 04 Mar 2010) $ by $Author: schulte $
* $Revision: 10365 $
*
* This file is part of Gecode, the generic constraint
* development environment:
* http://www.gecode.org
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
#include <gecode/int/element.hh>
namespace Gecode {
using namespace Int;
void
element(Home home, IntSharedArray c, IntVar x0, IntVar x1,
IntConLevel) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
if (home.failed()) return;
for (int i = c.size(); i--; )
Limits::check(c[i],"Int::element");
GECODE_ES_FAIL((Element::post_int<IntView,IntView>(home,c,x0,x1)));
}
void
element(Home home, IntSharedArray c, IntVar x0, BoolVar x1,
IntConLevel) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
if (home.failed()) return;
for (int i = c.size(); i--; )
Limits::check(c[i],"Int::element");
GECODE_ES_FAIL((Element::post_int<IntView,BoolView>(home,c,x0,x1)));
}
void
element(Home home, IntSharedArray c, IntVar x0, int x1,
IntConLevel) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
Limits::check(x1,"Int::element");
if (home.failed()) return;
for (int i = c.size(); i--; )
Limits::check(c[i],"Int::element");
ConstIntView cx1(x1);
GECODE_ES_FAIL(
(Element::post_int<IntView,ConstIntView>(home,c,x0,cx1)));
}
void
element(Home home, const IntVarArgs& c, IntVar x0, IntVar x1,
IntConLevel icl) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
if (home.failed()) return;
Element::IdxViewArray<IntView> iv(home,c);
if ((icl == ICL_DOM) || (icl == ICL_DEF)) {
GECODE_ES_FAIL((Element::ViewDom<IntView,IntView,IntView>
::post(home,iv,x0,x1)));
} else {
GECODE_ES_FAIL((Element::ViewBnd<IntView,IntView,IntView>
::post(home,iv,x0,x1)));
}
}
void
element(Home home, const IntVarArgs& c, IntVar x0, int x1,
IntConLevel icl) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
Limits::check(x1,"Int::element");
if (home.failed()) return;
Element::IdxViewArray<IntView> iv(home,c);
ConstIntView v1(x1);
if ((icl == ICL_DOM) || (icl == ICL_DEF)) {
GECODE_ES_FAIL((Element::ViewDom<IntView,IntView,ConstIntView>
::post(home,iv,x0,v1)));
} else {
GECODE_ES_FAIL((Element::ViewBnd<IntView,IntView,ConstIntView>
::post(home,iv,x0,v1)));
}
}
void
element(Home home, const BoolVarArgs& c, IntVar x0, BoolVar x1,
IntConLevel) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
if (home.failed()) return;
Element::IdxViewArray<BoolView> iv(home,c);
GECODE_ES_FAIL((Element::ViewBnd<BoolView,IntView,BoolView>
::post(home,iv,x0,x1)));
}
void
element(Home home, const BoolVarArgs& c, IntVar x0, int x1,
IntConLevel) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
Limits::check(x1,"Int::element");
if (home.failed()) return;
Element::IdxViewArray<BoolView> iv(home,c);
ConstIntView v1(x1);
GECODE_ES_FAIL((Element::ViewBnd<BoolView,IntView,ConstIntView>
::post(home,iv,x0,v1)));
}
namespace {
IntVar
pair(Home home, IntVar x, int w, IntVar y, int h) {
IntVar xy(home,0,w*h-1);
if (Element::Pair::post(home,x,y,xy,w,h) != ES_OK)
home.fail();
return xy;
}
}
void
element(Home home, IntSharedArray a,
IntVar x, int w, IntVar y, int h, IntVar z,
IntConLevel icl) {
if (a.size() != w*h)
throw Int::ArgumentSizeMismatch("Int::element");
if (home.failed()) return;
element(home, a, pair(home,x,w,y,h), z, icl);<|fim▁hole|> }
void
element(Home home, IntSharedArray a,
IntVar x, int w, IntVar y, int h, BoolVar z,
IntConLevel icl) {
if (a.size() != w*h)
throw Int::ArgumentSizeMismatch("Int::element");
if (home.failed()) return;
element(home, a, pair(home,x,w,y,h), z, icl);
}
void
element(Home home, const IntVarArgs& a,
IntVar x, int w, IntVar y, int h, IntVar z,
IntConLevel icl) {
if (a.size() != w*h)
throw Int::ArgumentSizeMismatch("Int::element");
if (home.failed()) return;
element(home, a, pair(home,x,w,y,h), z, icl);
}
void
element(Home home, const BoolVarArgs& a,
IntVar x, int w, IntVar y, int h, BoolVar z,
IntConLevel icl) {
if (a.size() != w*h)
throw Int::ArgumentSizeMismatch("Int::element");
if (home.failed()) return;
element(home, a, pair(home,x,w,y,h), z, icl);
}
}
// STATISTICS: int-post<|fim▁end|> | |
<|file_name|>place-caret-at-end-spec.js<|end_file_name|><|fim▁begin|>/*global document, describe, it, expect, require, window, afterEach */
const jQuery = require('jquery');
require('../../src/browser/place-caret-at-end');
describe('placeCaretAtEnd', () => {
'use strict';
let underTest;
afterEach(() => {
underTest.remove();
});
it('works on contenteditable divs', () => {
underTest = jQuery('<span>').html('some text').appendTo('body');
underTest.placeCaretAtEnd();
const selection = window.getSelection(),
range = selection.getRangeAt(0);
expect(selection.type).toEqual('Caret');
expect(selection.rangeCount).toEqual(1);
range.surroundContents(document.createElement('i'));<|fim▁hole|> expect(underTest.html()).toEqual('some text<i></i>');
});
});<|fim▁end|> | |
<|file_name|>CountNegativesMatrix.go<|end_file_name|><|fim▁begin|>/*
Given a m * n matrix grid which is sorted in non-increasing order both row-wise and column-wise.
Return the number of negative numbers in grid.
Example 1:
Input: grid = [[4,3,2,-1],[3,2,1,-1],[1,1,-1,-2],[-1,-1,-2,-3]]
Output: 8
Explanation: There are 8 negatives number in the matrix.
Example 2:
Input: grid = [[3,2],[1,0]]
Output: 0
Example 3:
Input: grid = [[1,-1],[-1,-1]]
Output: 3
Example 4:
Input: grid = [[-1]]
Output: 1
Constraints:
m == grid.length
n == grid[i].length
1 <= m, n <= 100
-100 <= grid[i][j] <= 100
*/
package main
<|fim▁hole|>func main() {
tests := [][][]int{{{4, 3, 2, -1}, {3, 2, 1, -1}, {1, 1, -1, -2}, {-1, -1, -2, -3}}, {{3, 2}, {1, 0}}, {{1, -1}, {-1, -1}}, {{-1}}}
for _, test := range tests {
log.Printf("countNegatives(%v) = %d\n", test, countNegatives(test))
}
}
func countNegatives(grid [][]int) int {
res := 0
for row := 0; row < len(grid); row++ {
for col := 0; col < len(grid[row]); col++ {
if grid[row][col] < 0 {
res++
}
}
}
return res
}<|fim▁end|> | import (
"log"
)
|
<|file_name|>session.rs<|end_file_name|><|fim▁begin|>use alloc::boxed::Box;<|fim▁hole|>use collections::string::{String, ToString};
use collections::vec::Vec;
use scheduler;
use schemes::KScheme;
use schemes::{Resource, Url, VecResource};
/// A session
pub struct Session {
/// The scheme items
pub items: Vec<Box<KScheme>>,
}
impl Session {
/// Create new session
pub fn new() -> Box<Self> {
box Session {
items: Vec::new(),
}
}
pub unsafe fn on_irq(&mut self, irq: u8) {
let reenable = scheduler::start_no_ints();
for mut item in self.items.iter_mut() {
item.on_irq(irq);
}
scheduler::end_no_ints(reenable);
}
pub unsafe fn on_poll(&mut self) {
let reenable = scheduler::start_no_ints();
for mut item in self.items.iter_mut() {
item.on_poll();
}
scheduler::end_no_ints(reenable);
}
/// Open a new resource
pub fn open(&mut self, url: &Url, flags: usize) -> Option<Box<Resource>> {
if url.scheme().len() == 0 {
let mut list = String::new();
for item in self.items.iter() {
let scheme = item.scheme();
if !scheme.is_empty() {
if !list.is_empty() {
list = list + "\n" + scheme;
} else {
list = scheme.to_string();
}
}
}
Some(box VecResource::new(Url::new(), list.into_bytes()))
} else {
for mut item in self.items.iter_mut() {
if item.scheme() == url.scheme() {
return item.open(url, flags);
}
}
None
}
}
}<|fim▁end|> | |
<|file_name|>de-ch.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'div', 'de-ch', {
IdInputLabel: 'Kennung',
advisoryTitleInputLabel: 'Tooltip',
cssClassInputLabel: 'Formatvorlagenklasse',
edit: 'Div bearbeiten',
inlineStyleInputLabel: 'Inline Stil',
langDirLTRLabel: 'Links nach Rechs (LTR)',
langDirLabel: 'Sprachrichtung',<|fim▁hole|> title: 'Div Container erzeugen',
toolbar: 'Div Container erzeugen'
} );<|fim▁end|> | langDirRTLLabel: 'Rechs nach Links (RTL)',
languageCodeInputLabel: 'Sprachcode',
remove: 'Div entfernen',
styleSelectLabel: 'Stil', |
<|file_name|>TreeTraversal.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
class Graph:
def __init__(self, number_of_vertices):
self.number_of_vertices = number_of_vertices
self.vertex_details = {}
self.visited = {}
def add_edge(self, vertex_label, edge):
if self.vertex_details.has_key(vertex_label):
self.vertex_details[vertex_label].append(edge)
else:
self.vertex_details[vertex_label] = []
self.vertex_details[vertex_label].append(edge)
self.visited[vertex_label] = 0
def bfs(self, starting_vertex):
print "Starting breath first search from vertex: ", starting_vertex
bfs_queue = Queue.Queue()
bfs_trace = []
bfs_queue.put(starting_vertex)
self.visited[starting_vertex] = 1
while(not bfs_queue.empty()):
current_vertex = bfs_queue.get()
bfs_trace.append(current_vertex)
adjacent_vertices = self.vertex_details[current_vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
bfs_queue.put(adjacent_vertex)
self.visited[adjacent_vertex] = 1
return bfs_trace
def dfs(self, vertex):
self.visited[vertex] = 1
print vertex," ",
adjacent_vertices = self.vertex_details[vertex]
for adjacent_vertex in adjacent_vertices:
if self.visited[adjacent_vertex] == 0:
self.dfs(adjacent_vertex)
def print_bfs(self, bfs_trace):
print bfs_trace
def main():
g = Graph(4)
g.add_edge(0, 1);
g.add_edge(0, 2);
g.add_edge(1, 2);
g.add_edge(2, 0);
g.add_edge(2, 3);
g.add_edge(3, 3);
# bfs_trace = g.bfs(2)
# g.print_bfs(bfs_trace)
g.dfs(2)
if __name__ == '__main__':
main()<|fim▁end|> | import Queue |
<|file_name|>collection.js<|end_file_name|><|fim▁begin|>/*
* collection
* A collection of posts
*
* If fetch items are specified, then only gets those posts.
* Otherwise, gets the posts specified from a configuration endpoint.
*/
define([
"lodash",
"backbone",
"helpers/urls",
"helpers/types",
"helpers/params",
"components/content/entities/parser",
"module"
], function(_, Backbone, urls, types, params, parser, module) {
// Definition of a post collection
var PostCollection = Backbone.Collection.extend({
urlRoot: module.config().urlRoot,
initialize: function(models, options) {
options = options || {};
// preserve any options specified to constructor
this.options = _.extend(this.options || {}, options);
},
// remove fetch items as they become models
_maintainItems: function(model) {
this.options.items = _.reject(this.options.items, function(item) {
return item.object_id == model[types.objectIdType(item.object_id)];
});
if (this.options.items.length === 0) {
this.off("add", this.maintainItems, this);
}
},
// merge in additional fetch items after initialize
mergeItems: function(items) {
// remove any new fetch items that are already fetched
items = _.reject(items, function(item) {
return this.get(item.object_id);
}, this);<|fim▁hole|> url: function() {
var fetchItems = this.options.items;
// if fetch items are specified, get the specific items
// object_ids all have to be homogenous (same types)
if (fetchItems && fetchItems.length > 0) {
var method = types.objectIdType(fetchItems[0].object_id);
var posts = params.collection[method](_.pluck(fetchItems, "object_id"));
// maintain the fetchItems as they are added
this.on("add", this._maintainItems, this);
return urls.normalizeUrlRoot(this.urlRoot) +
"?post_type=any" +
"&"+posts +
"&"+params.meta.custom_fields;
} else {
return module.config().endpoint;
}
},
parse: function(data) {
return parser(data);
}
});
return PostCollection;
});<|fim▁end|> | // create a union of previous fetch items and the new fetch items
this.options.items = _.union(this.options.items, items);
},
|
<|file_name|>iterables.py<|end_file_name|><|fim▁begin|>"""
Functions and decorators for making sure the parameters they work on are of
iterable types.
Copyright 2014-2015, Outernet Inc.
Some rights reserved.
This software is free software licensed under the terms of GPLv3. See COPYING
file that comes with the source code, or http://www.gnu.org/licenses/gpl.txt.
"""
import functools
import numbers
def is_integral(obj):
"""
Determine whether the passed in object is a number of integral type.
"""
return isinstance(obj, numbers.Integral)
def is_string(obj):
"""
Determine if the passed in object is a string.
"""
try:
return isinstance(obj, basestring)
except NameError:
return isinstance(obj, str)
def is_iterable(obj):
"""
Determine if the passed in object is an iterable, but not a string or dict.
"""
return (hasattr(obj, '__iter__') and
not isinstance(obj, dict) and
not is_string(obj))
def as_iterable(params=None):
"""
Make sure the marked parameters are iterable. In case a single-unwrapped
parameter is found among them (e.g. an int, string, ...), wrap it in a<|fim▁hole|> list and forward like that to the wrapped function. The marked parameters,
if not explicitly specified, defaults to the 1st argument (``args[1]``).
"""
# set up default converter and separate positional from keyword arguments
params = params or [1]
indexes = [i for i in params if is_integral(i)]
keys = [k for k in params if is_string(k)]
def decorator(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
# patch positional arguments, if needed
if indexes:
# copy `args` into a new list and wrap it's elements in a list
# on the specified indexes, which are not iterables themselves
args = [[x] if i in indexes and not is_iterable(x) else x
for (i, x) in enumerate(args)]
# patch keyword arguments, if needed
if keys:
for key in keys:
if not is_iterable(kwargs[key]):
kwargs[key] = [kwargs[key]]
# invoke ``fn`` with patched parameters
return fn(*args, **kwargs)
return wrapper
return decorator<|fim▁end|> | |
<|file_name|>forestFindThr.cpp<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Piotr's Image&Video Toolbox Version 3.01
* Copyright 2012 Piotr Dollar. [pdollar-at-caltech.edu]
* Please email me if you find bugs, or have suggestions or questions!
* Licensed under the Simplified BSD License [see external/bsd.txt]
*******************************************************************************/
#include <string.h>
#include <mex.h>
typedef unsigned int uint32;
void rtreeFindThr( int H, int N, int F, const float *data, const uint32 *hs,
const float *ws, const uint32 *order, uint32 &fid, float &thr, double &gini )<|fim▁hole|> double gini1, s=0, sl, sr, g=0, gl, gr;
L=new double[H]; R=new double[H]; T=new double[H];
for( i=0; i<H; i++ ) T[i] = 0;
for( j=0; j<N; j++ ) { s+=ws[j]; T[hs[j]-1]+=ws[j]; }
for( i=0; i<H; i++ ) g+=T[i]*T[i];
fid = 1; thr = 0; gini = 1e5;
for( i=0; i<F; i++ ) {
order1=(uint32*) order+i*N; data1=(float*) data+i*N;
for( j=0; j<H; j++ ) { L[j]=0; R[j]=T[j]; } gl=sl=0; gr=g;
for( j=0; j<N-1; j++ ) {
int j1=order1[j], j2=order1[j+1]; h=hs[j1]-1; sl+=ws[j1]; sr=s-sl;
gl-=L[h]*L[h]; L[h]+=ws[j1]; gl+=L[h]*L[h];
gr-=R[h]*R[h]; R[h]-=ws[j1]; gr+=R[h]*R[h];
if( data1[j2]-data1[j1]<1e-6f ) continue;
gini1 = (sl-gl/sl)/s + (sr-gr/sr)/s; // + (sl*sl+sr*sr)/(s*s*100);
if(gini1<gini) { gini=gini1; fid=i+1; thr=0.5f*(data1[j1]+data1[j2]); }
}
}
delete [] L; delete [] R; delete [] T;
}
// [fid,thr,gini] = rtreeFindThr(data,hs,ws,order,H);
void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[]) {
int H, N, F; float *data, *ws, thr; double gini; uint32 *hs, *order, fid;
data = (float*) mxGetData(prhs[0]);
hs = (uint32*) mxGetData(prhs[1]);
ws = (float*) mxGetData(prhs[2]);
order = (uint32*) mxGetData(prhs[3]);
H = (int) mxGetScalar(prhs[4]);
N = (int) mxGetM(prhs[0]);
F = (int) mxGetN(prhs[0]);
rtreeFindThr(H,N,F,data,hs,ws,order,fid,thr,gini);
plhs[0] = mxCreateDoubleScalar(fid);
plhs[1] = mxCreateDoubleScalar(thr);
plhs[2] = mxCreateDoubleScalar(gini);
}<|fim▁end|> | {
int i, j, h; double *L, *R, *T; float *data1; uint32 *order1; |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! A library for using the Branch by Abstraction pattern to test for performance and correctness.
//!
//! # Examples
//! ```
//! use dexter::*;
//! struct ExamplePublisher;
//!
//! impl Publisher<Vec<char>, String, String> for ExamplePublisher {
//! fn publish(&mut self, result: ExperimentResult<String, String>) {
//! println!("{:#?}", result);
//! }
//!
//! fn compare(&mut self, current_result: &String, new_result: &String) -> bool {
//! current_result == new_result
//! }
//! }
//!
//! fn main() {
//! let chars = vec!['a', 'b', 'c'];
//! let mut p = ExamplePublisher;
//! let result = Experiment::new("experiment",
//! |a: &Vec<char>| {
//! a.clone().into_iter().collect()
//! },
//! |a: &Vec<char>| {
//! a.clone().into_iter().collect()
//! })
//! .run_if(|p| { p.len() == 3 })
//! .carry_out(chars.clone(), &mut p);
//! println!("{}", result);
//! }
//! ```
#![warn(missing_docs, missing_debug_implementations,
missing_copy_implementations, trivial_casts,
trivial_numeric_casts, unsafe_code,
unstable_features, unused_extern_crates,
unused_import_braces, unused_qualifications,
unused_results, variant_size_differences)]
extern crate rand;
extern crate time;
use std::fmt;
use rand::{thread_rng, Rng};
use time::precise_time_ns;
/// Result for a subject in an experiment.
#[derive(Debug)]
pub struct SubjectResult<T: Clone> {
/// Time spent running this subject's code
pub duration: f64,
/// The value produced by this subject's code
pub result: T
}
impl<T: Clone> SubjectResult<T> {
fn new(duration: f64, result: &T) -> Self {
SubjectResult {
duration: duration,
result: result.clone()
}
}
}
/// Result of an experiment.
#[derive(Debug)]
pub struct ExperimentResult<Cr: Clone, Nr: Clone> {
/// Name of the experiment
pub name: &'static str,
/// Result for the current subject
pub current: SubjectResult<Cr>,
/// Result for the new subject
pub new: SubjectResult<Nr>,
/// The match type for this experiment
pub match_type: MatchType
}
/// Matching type for experiments.
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum MatchType {
/// The two subjects had matching results
Match,
/// The two subjects did not have matching results
NoMatch,
/// The matching state was ignored
Ignored
}
impl<Cr: Clone, Nr: Clone> ExperimentResult<Cr, Nr> {
fn new(name: &'static str, current: SubjectResult<Cr>, new: SubjectResult<Nr>, match_type: MatchType) -> Self {
ExperimentResult {
name: name,
current: current,
new: new,
match_type: match_type
}
}
}
/// A struct for building Dexter experiments.
#[must_use]
pub struct Experiment<'a, P, Cr: Clone, Nr: Clone> {
name: &'static str,
current: Box<FnMut(&P) -> Cr + 'a>,
new: Box<FnMut(&P) -> Nr + 'a>,
setup: Option<Box<FnMut(P) -> P + 'a>>,
run_if: Option<Box<FnMut(&P) -> bool + 'a>>,
ignore_if: Option<Box<FnMut(&P) -> bool + 'a>>
}
impl<'a, P, Cr: Clone, Nr: Clone> Experiment<'a, P, Cr, Nr> {
/// Constructs a new `Experiment` with a current and given subject.
pub fn new<C, N>(name: &'static str, current_subject: C, new_subject: N) -> Self
where C: FnMut(&P) -> Cr + 'a,
N: FnMut(&P) -> Nr + 'a {
Experiment {
name: name,
current: Box::new(current_subject),
new: Box::new(new_subject),
setup: None,
run_if: None,
ignore_if: None
}<|fim▁hole|> }
/// Adds a setup step to the experiment.
///
/// The setup function can alter the parameter that's passed into the experiment before it is
/// passed to the `run_if` closure, the `ignore_if` closure, and the two subject closures.
pub fn setup<S>(mut self, setup: S) -> Self
where S: FnMut(P) -> P + 'a {
self.setup = Some(Box::new(setup));
self
}
/// Adds a check step that will disable the experiment in certain cases.
///
/// If the passed closure returns false when passed the experiment's parameter, then the
/// experiment will return the current subject's result without publishing.
pub fn run_if<R>(mut self, run_if: R) -> Self
where R: FnMut(&P) -> bool + 'a {
self.run_if = Some(Box::new(run_if));
self
}
/// Adds an check step that will ignore mismatches in certain cases.
///
/// If the passed closure returns true when passed the experiment's parameter, then the
/// result will have a `MatchType` of `Ignored`.
pub fn ignore_if<I>(mut self, ignore_if: I) -> Self
where I: FnMut(&P) -> bool + 'a {
self.ignore_if = Some(Box::new(ignore_if));
self
}
/// Carry out the experiment given a parameter and a publisher.
///
/// Returns the result of the current subject closure.
pub fn carry_out<Pub: Publisher<P, Cr, Nr>>(mut self, mut param: P, publisher: &mut Pub) -> Cr {
if !publisher.enabled() {
return (self.current)(¶m);
}
if let Some(mut setup) = self.setup {
param = setup(param);
}
if let Some(mut run_if) = self.run_if {
if !run_if(¶m) {
return (self.current)(¶m);
}
}
let mut rng = thread_rng();
let mut current_val = None;
let mut new_val = None;
let mut current_duration = 0;
let mut new_duration = 0;
let mut order = [0, 1];
rng.shuffle(&mut order);
for i in &order {
match *i {
0 => {
let start = precise_time_ns();
current_val = Some((self.current)(¶m));
current_duration = precise_time_ns() - start;
}
_ => {
let start = precise_time_ns();
new_val = Some((self.new)(¶m));
new_duration = precise_time_ns() - start;
}
}
}
let ignore = if let Some(mut ignore_if) = self.ignore_if {
ignore_if(¶m)
} else {
false
};
let comparison = publisher.compare(¤t_val.as_ref().unwrap(), &new_val.as_ref().unwrap());
publisher.publish(ExperimentResult::new(
self.name,
SubjectResult::new(current_duration as f64 * 1e-9, ¤t_val.as_ref().unwrap()),
SubjectResult::new(new_duration as f64 * 1e-9, &new_val.as_ref().unwrap()),
if ignore {
MatchType::Ignored
} else if comparison {
MatchType::Match
} else {
MatchType::NoMatch
}
));
current_val.unwrap()
}
}
impl<'a, P, Cr: Clone, Nr: Clone> fmt::Debug for Experiment<'a, P, Cr, Nr> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let fn_str = "Fn(...)";
let some_fn_str = "Some(Fn(...))";
let none_str = "None";
f.debug_struct("Experiment")
.field("name", &self.name)
.field("current", &fn_str)
.field("new", &fn_str)
.field("setup", if self.setup.is_some() {
&some_fn_str
} else {
&none_str
})
.field("run_if", if self.run_if.is_some() {
&some_fn_str
} else {
&none_str
})
.field("ignore_if", if self.ignore_if.is_some() {
&some_fn_str
} else {
&none_str
})
.finish()
}
}
/// Trait for publishers, which are used by Dexter to store results of experiments.
pub trait Publisher<P, Cr: Clone, Nr: Clone> {
/// Publish the result of an experiment.
fn publish(&mut self, result: ExperimentResult<Cr, Nr>);
/// Comparison function for the results of the subjects.
///
/// This function should return `true` if the results "match," and `false` if they do not.
fn compare(&mut self, current_result: &Cr, new_result: &Nr) -> bool;
/// Only run the experiment in some cases.
///
/// If `enabled` returns false, then the result of the current subject is used and no results
/// are published. This is meant to be an inexpensive function that gets called when every
/// experiment runs.
fn enabled(&mut self) -> bool {
true
}
}<|fim▁end|> | |
<|file_name|>docextract_webinterface.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##<|fim▁hole|>## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""DocExtract REST and Web API
Exposes document extration facilities to the world
"""
from tempfile import NamedTemporaryFile
from invenio.webinterface_handler import WebInterfaceDirectory
from invenio.webuser import collect_user_info
from invenio.webpage import page
from invenio.config import CFG_TMPSHAREDDIR, CFG_ETCDIR
from invenio.refextract_api import extract_references_from_file_xml, \
extract_references_from_url_xml, \
extract_references_from_string_xml
from invenio.bibformat_engine import format_record
def check_login(req):
"""Check that the user is logged in"""
user_info = collect_user_info(req)
if user_info['email'] == 'guest':
# 1. User is guest: must login prior to upload
# return 'Please login before uploading file.'
pass
def check_url(url):
"""Check that the url we received is not gibberish"""
return url.startswith('http://') or \
url.startswith('https://') or \
url.startswith('ftp://')
def extract_from_pdf_string(pdf):
"""Extract references from a pdf stored in a string
Given a string representing a pdf, this function writes the string to
disk and passes it to refextract.
We need to create a temoporary file because we need to run pdf2text on it"""
# Save new record to file
tf = NamedTemporaryFile(prefix='docextract-pdf',
dir=CFG_TMPSHAREDDIR)
try:
tf.write(pdf)
tf.flush()
refs = extract_references_from_file_xml(tf.name)
finally:
# Also deletes the file
tf.close()
return refs
def make_arxiv_url(arxiv_id):
"""Make a url we can use to download a pdf from arxiv
Arguments:
arxiv_id -- the arxiv id of the record to link to
"""
return "http://arxiv.org/pdf/%s.pdf" % arxiv_id
class WebInterfaceAPIDocExtract(WebInterfaceDirectory):
"""DocExtract REST API"""
_exports = [
('extract-references-pdf', 'extract_references_pdf'),
('extract-references-pdf-url', 'extract_references_pdf_url'),
('extract-references-txt', 'extract_references_txt'),
]
def extract_references_pdf(self, req, form):
"""Extract references from uploaded pdf"""
check_login(req)
if 'pdf' not in form:
return 'No PDF file uploaded'
return extract_from_pdf_string(form['pdf'].stream.read())
def extract_references_pdf_url(self, req, form):
"""Extract references from the pdf pointed by the passed url"""
check_login(req)
if 'url' not in form:
return 'No URL specified'
url = form['url']
if not check_url(url):
return 'Invalid URL specified'
return extract_references_from_url_xml(url)
def extract_references_txt(self, req, form):
"""Extract references from plain text"""
check_login(req)
if 'txt' not in form:
return 'No text specified'
txt = form['txt'].stream.read()
return extract_references_from_string_xml(txt)
class WebInterfaceDocExtract(WebInterfaceDirectory):
"""DocExtract API"""
_exports = ['api',
('extract-references', 'extract_references'),
('example.pdf', 'example_pdf'),
]
api = WebInterfaceAPIDocExtract()
def example_pdf(self, req, _form):
"""Serve a test pdf for tests"""
f = open("%s/docextract/example.pdf" % CFG_ETCDIR, 'rb')
try:
req.write(f.read())
finally:
f.close()
def extract_references_template(self):
"""Template for reference extraction page"""
return """Please specify a pdf or a url or some references to parse
<form action="extract-references" method="post"
enctype="multipart/form-data">
<p>PDF: <input type="file" name="pdf" /></p>
<p>arXiv: <input type="text" name="arxiv" /></p>
<p>URL: <input type="text" name="url" style="width: 600px;"/></p>
<textarea name="txt" style="width: 500px; height: 500px;"></textarea>
<p><input type="submit" /></p>
</form>
"""
def extract_references(self, req, form):
"""Refrences extraction page
This page can be used for authors to test their pdfs against our
refrences extraction process"""
user_info = collect_user_info(req)
# Handle the 3 POST parameters
if 'pdf' in form and form['pdf']:
pdf = form['pdf']
references_xml = extract_from_pdf_string(pdf)
elif 'arxiv' in form and form['arxiv']:
url = make_arxiv_url(arxiv_id=form['arxiv'])
references_xml = extract_references_from_url_xml(url)
elif 'url' in form and form['url']:
url = form['url']
references_xml = extract_references_from_url_xml(url)
elif 'txt' in form and form['txt']:
txt = form['txt']
references_xml = extract_references_from_string_xml(txt)
else:
references_xml = None
# If we have not uploaded anything yet
# Display the form that allows us to do so
if not references_xml:
out = self.extract_references_template()
else:
out = """
<style type="text/css">
#referenceinp_link { display: none; }
</style>
"""
out += format_record(0,
'hdref',
xml_record=references_xml.encode('utf-8'),
user_info=user_info)
# Render the page (including header, footer)
return page(title='References Extractor',
body=out,
uid=user_info['uid'],
req=req)<|fim▁end|> | ## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
## |
<|file_name|>Redstone.java<|end_file_name|><|fim▁begin|>package cn.nukkit.redstone;
import cn.nukkit.block.Block;
import cn.nukkit.block.BlockRedstoneWire;
import cn.nukkit.block.BlockSolid;
import cn.nukkit.math.Vector3;
import java.util.*;
/**
* author: Angelic47
* Nukkit Project
*/
public class Redstone {
public static final int POWER_NONE = 0;
public static final int POWER_WEAKEST = 1;
public static final int POWER_STRONGEST = 16;
//NOTICE: Here POWER_STRONGEST is 16, not 15.
//I set it to 16 in order to calculate the energy in blocks, such as the redstone torch under the cobblestone.
//At that time, the cobblestone's energy is 16, not 15. If you put a redstone wire next to it, the redstone wire will got 15 energy.
//So, POWER_WEAKEST also means that energy in blocks, not redstone wire it self. So set it to 1.
private static final Comparator<UpdateObject> orderIsdn = new Comparator<UpdateObject>() {
@Override
public int compare(UpdateObject o1, UpdateObject o2) {
if (o1.getPopulation() > o2.getPopulation()) {
return -1;
} else if (o1.getPopulation() < o2.getPopulation()) {
return 1;
} else {
return 0;
}
}
};
public static void active(Block source) {
Queue<UpdateObject> updateQueue = new PriorityQueue<>(1, orderIsdn);
int currentLevel = source.getPowerLevel() - 1;
if (currentLevel <= 0) {
return;
}
addToQueue(updateQueue, source);
while (!updateQueue.isEmpty()) {
UpdateObject updatingObj = updateQueue.poll();
Block updating = updatingObj.getLocation();
currentLevel = updatingObj.getPopulation();
if (currentLevel > updating.getPowerLevel()) {
updating.setPowerLevel(currentLevel);
updating.getLevel().setBlock(updating, updating, true, true);
addToQueue(updateQueue, updating);
}
}
}
public static void active(Block source, Map<String, Block> allBlocks) {
Queue<UpdateObject> updateQueue = new PriorityQueue<>(1, orderIsdn);
int currentLevel = source.getPowerLevel() - 1;
if (currentLevel <= 0) {
return;
}
addToQueue(updateQueue, source);
while (!updateQueue.isEmpty()) {
UpdateObject updatingObj = updateQueue.poll();
Block updating = updatingObj.getLocation();
currentLevel = updatingObj.getPopulation();
if (currentLevel > updating.getPowerLevel()) {
updating.setPowerLevel(currentLevel);
updating.getLevel().setBlock(updating, updating, true, true);
if (allBlocks.containsKey(updating.getLocationHash())) {
allBlocks.remove(updating.getLocationHash());
}
addToQueue(updateQueue, updating);
}
}
}
public static void deactive(Block source, int updateLevel) {
//Step 1: find blocks which need to update
Queue<UpdateObject> updateQueue = new PriorityQueue<>(1, orderIsdn);
Queue<UpdateObject> sourceList = new PriorityQueue<>(1, orderIsdn);
Map<String, Block> updateMap = new HashMap<>();
Map<String, Block> closedMap = new HashMap<>();
int currentLevel = updateLevel;
if (currentLevel <= 0) {
return;
}
addToDeactiveQueue(updateQueue, source, closedMap, sourceList, currentLevel);
while (!updateQueue.isEmpty()) {
UpdateObject updateObject = updateQueue.poll();
Block updating = updateObject.getLocation();
currentLevel = updateObject.getPopulation();
if (currentLevel >= updating.getPowerLevel()) {
updating.setPowerLevel(0);
updateMap.put(updating.getLocationHash(), updating);
addToDeactiveQueue(updateQueue, updating, closedMap, sourceList, currentLevel);
} else {
sourceList.add(new UpdateObject(updating.getPowerLevel(), updating));
}
}
//Step 2: recalculate redstone power
while (!sourceList.isEmpty()) {
active(sourceList.poll().getLocation(), updateMap);
}
for (Block block : updateMap.values()) {
block.setPowerLevel(0);
block.getLevel().setBlock(block, block, true, true);
}
}
private static void addToQueue(Queue<UpdateObject> updateQueue, Block location) {
if (location.getPowerLevel() <= 0) {
return;
}
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_SOUTH, Vector3.SIDE_EAST, Vector3.SIDE_WEST, Vector3.SIDE_UP, Vector3.SIDE_DOWN}) {
if (location.getSide(side) instanceof BlockRedstoneWire) {
updateQueue.add(new UpdateObject(location.getPowerLevel() - 1, location.getSide(side)));
}
}
if (location instanceof BlockRedstoneWire) {
Block block = location.getSide(Vector3.SIDE_UP);
if (!(block instanceof BlockSolid)) {
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_SOUTH, Vector3.SIDE_EAST, Vector3.SIDE_WEST}) {
if (block.getSide(side) instanceof BlockRedstoneWire) {
updateQueue.add(new UpdateObject(location.getPowerLevel() - 1, block.getSide(side)));
}
}
}
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_WEST, Vector3.SIDE_EAST, Vector3.SIDE_SOUTH}) {
block = location.getSide(side);
if (!(block instanceof BlockSolid)) {
Block blockDown;
blockDown = block.getSide(Vector3.SIDE_DOWN);
if (blockDown instanceof BlockRedstoneWire) {
updateQueue.add(new UpdateObject(location.getPowerLevel() - 1, blockDown));
}
}
}
}
}
private static void addToDeactiveQueue(Queue<UpdateObject> updateQueue, Block location, Map<String, Block> closedMap, Queue<UpdateObject> sourceList, int updateLevel) {
if (updateLevel < 0) {
return;
}
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_SOUTH, Vector3.SIDE_EAST, Vector3.SIDE_WEST, Vector3.SIDE_UP, Vector3.SIDE_DOWN}) {
if (location.getSide(side).isPowerSource() || (updateLevel == 0 && location.getSide(side).isPowered())) {
sourceList.add(new UpdateObject(location.getPowerLevel(side), location.getSide(side)));
} else if (location.getSide(side) instanceof BlockRedstoneWire) {
if (!closedMap.containsKey(location.getSide(side).getLocationHash())) {
closedMap.put(location.getSide(side).getLocationHash(), location.getSide(side));
updateQueue.add(new UpdateObject(updateLevel - 1, location.getSide(side)));
}
}
}
if (location instanceof BlockRedstoneWire) {
Block block = location.getSide(Vector3.SIDE_UP);
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_SOUTH, Vector3.SIDE_EAST, Vector3.SIDE_WEST}) {
if (block.getSide(side) instanceof BlockRedstoneWire) {<|fim▁hole|> }
}
Block blockDown;
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_SOUTH, Vector3.SIDE_EAST, Vector3.SIDE_WEST}) {
block = location.getSide(side);
blockDown = block.getSide(Vector3.SIDE_DOWN);
if (blockDown instanceof BlockRedstoneWire) {
if (!closedMap.containsKey(blockDown.getLocationHash())) {
closedMap.put(blockDown.getLocationHash(), blockDown);
updateQueue.add(new UpdateObject(updateLevel - 1, blockDown));
}
}
}
}
}
}<|fim▁end|> | if (!closedMap.containsKey(block.getSide(side).getLocationHash())) {
closedMap.put(block.getSide(side).getLocationHash(), block.getSide(side));
updateQueue.add(new UpdateObject(updateLevel - 1, block.getSide(side)));
} |
<|file_name|>bootstrap.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import predict
import sklearn.metrics
import argparse, sys
import os
import numpy as np
import glob
import re
import matplotlib.pyplot as plt
def calc_auc(predictions):
y_true =[]
y_score=[]
for line in predictions:
values= line.split(" ")
y_true.append(float(values[1]))
y_score.append(float(values[0]))
auc = sklearn.metrics.roc_auc_score(y_true,y_score)
return auc
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='bootstrap(sampling with replacement) test')
parser.add_argument('-m','--model',type=str,required=True,help="Model template. Must use TESTFILE with unshuffled, unbalanced input")
parser.add_argument('-w','--weights',type=str,required=True,help="Model weights (.caffemodel)")
parser.add_argument('-i','--input',type=str,required=True,help="Input .types file to predict")
parser.add_argument('-g','--gpu',type=int,help='Specify GPU to run on',default=-1)
parser.add_argument('-o','--output',type=str,default='',help='Output file name,default= predict_[model]_[input]')
parser.add_argument('--iterations',type=int,default=1000,help="number of times to bootstrap")
parser.add_argument('-k','--keep',action='store_true',default=False,help="Don't delete prototxt files")
parser.add_argument('-n', '--number',action='store_true',default=False,help="if true uses caffemodel/input as is. if false uses all folds")
parser.add_argument('--max_score',action='store_true',default=False,help="take max score per ligand as its score")
parser.add_argument('--notcalc_predictions', type=str, default='',help='file of predictions')
args = parser.parse_args()
if args.output == '':
output = 'bootstrap_%s_%s'%(args.model, args.input)
else:
output = args.output
outname=output
predictions=[]
if args.notcalc_predictions=='':
cm = args.weights
ts = args.input
if not args.number:
foldnum = re.search('.[0-9]_iter',cm).group()
cm=cm.replace(foldnum, '.[0-9]_iter')
foldnum = re.search('[0-9].types',ts).group()
ts=ts.replace(foldnum, '[NUMBER].types')
for caffemodel in glob.glob(cm):
testset = ts
if not args.number:
num = re.search('.[0-9]_iter',caffemodel).group()
num=re.search(r'\d+', num).group()
testset = ts.replace('[NUMBER]',num)
args.input = testset
args.weights = caffemodel
predictions.extend(predict.predict_lines(args))
elif args.notcalc_predictions != '':
for line in open(args.notcalc_predictions).readlines():
predictions.append(line)
all_aucs=[]
for _ in range(args.iterations):
sample = np.random.choice(predictions,len(predictions), replace=True)
all_aucs.append(calc_auc(sample))
mean=np.mean(all_aucs)
std_dev = np.std(all_aucs)
txt = 'mean: %.2f standard deviation: %.2f'%(mean,std_dev)
print(txt)
output = open(output, 'w')
output.writelines('%.2f\n' %auc for auc in all_aucs)
output.write(txt)
output.close()<|fim▁hole|> plt.title('%s AUCs'%args.output, fontsize=22)
plt.xlabel('AUC(%s)'%txt, fontsize=18)
plt.savefig('%s_plot.pdf'%outname,bbox_inches='tight')<|fim▁end|> |
plt.figure()
plt.boxplot(all_aucs,0,'rs',0) |
<|file_name|>pointer.go<|end_file_name|><|fim▁begin|>// Copyright 2011 The "GoScript" Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gojs
import (
"fmt"
"regexp"
"strings"
)
<|fim▁hole|>
To identify variables that could be addressed ahead, it is used the map:
{number of function: {number of block: {variable name: is pointer?} }}
In the generated code, it is added a tag before and after of each new variable
but pointer. The tag uses the schema `<<side:funcId:blockId:varName>>`
*side:* *L* or *R* if the tag is on the left or on the right of the variable.
*i* indicates that its value is zero for the value type.
*funcId:* identifier of function. '0' is for global declarations
*blockId:* number of block inner of that function. Start with '1'
*varName:* variable's name
It is also added the tag `<<P:funcId:blockId:varName>>` after of each variable
name, and `<<&>>` after of it when the assignment is an address.
*/
// To remove tags related to pointers
var reTagPointer = regexp.MustCompile(`<<z?[LRP]:\d+:\d+:[^>]+>>`)
// Returns a tag to identify pointers.
// The argument field indicates if the variable is zero.
func tagPointer(zero bool, typ rune, funcId, blockId int, name string) string {
/*if typ != 'L' && typ != 'R' && typ != 'P' {
panic("invalid identifier for pointer: " + string(typ))
}*/
zeroStr := ""
if zero {
zeroStr = "z"
}
return fmt.Sprintf("<<%s:%d:%d:%s>>", zeroStr + string(typ), funcId, blockId, name)
}
// Search the point where the variable was declared for tag it as pointer.
func (tr *transform) addPointer(name string) {
// In the actual function
if tr.funcId != 0 {
for block := tr.blockId; block >= 1; block-- {
if _, ok := tr.vars[tr.funcId][block][name]; ok {
tr.vars[tr.funcId][block][name] = true
return
}
}
}
// Finally, search in the global variables (funcId = 0).
for block := tr.blockId; block >= 0; block-- { // block until 0
if _, ok := tr.vars[0][block][name]; ok {
tr.vars[0][block][name] = true
return
}
}
//fmt.Printf("Function %d, block %d, name %s\n", tr.funcId, tr.blockId, name)
panic("addPointer: unreachable")
}
// Replaces tags related to variables addressed.
func (tr *transform) replacePointers(str *string) {
// Replaces tags in variables that access to pointers.
replaceLocal := func(funcId, startBlock, endBlock int, varName string) {
for block := startBlock; block <= endBlock; block++ {
// Check if there is a variable named like the pointer in another block.
if block != startBlock {
if _, ok := tr.vars[funcId][block][varName]; ok {
break
}
}
pointer := tagPointer(false, 'P', funcId, block, varName)
// Comparing pointers with value nil
reNil := regexp.MustCompile(pointer + NIL)
*str = reNil.ReplaceAllString(*str, ".p")
if tr.addr[funcId][block][varName] {
*str = strings.Replace(*str, pointer+ADDR, "", -1)
} else {
*str = strings.Replace(*str, pointer, ".p", -1)
}
}
}
// In each function
for funcId, blocks := range tr.vars {
for blockId := 0; blockId <= len(blocks); blockId++ {
for name, isPointer := range tr.vars[funcId][blockId] {
if isPointer {
replaceLocal(funcId, blockId, len(blocks), name)
// Replace brackets around variables addressed.
lBrack := tagPointer(false, 'L', funcId, blockId, name)
rBrack := tagPointer(false, 'R', funcId, blockId, name)
*str = strings.Replace(*str, lBrack, "{p:", 1)
*str = strings.Replace(*str, rBrack, "}", 1)
// The declaration of pointers without initial value
// have type "nil" in Go
iLBrack := tagPointer(true, 'L', funcId, blockId, name)
iRBrack := tagPointer(true, 'R', funcId, blockId, name)
re := regexp.MustCompile(iLBrack + `[^<]+` + iRBrack)
*str = re.ReplaceAllString(*str, "{p:undefined}")
}
}
}
}
// === Global pointers
globalScope := 0
replaceGlobal := func(globVarName string) {
for funcId, blocks := range tr.vars {
if funcId == globalScope {
continue
}
for blockId := 1; blockId <= len(blocks); blockId++ {
if _, ok := tr.vars[funcId][blockId][globVarName]; ok {
continue
}
pointer := tagPointer(false, 'P', funcId, blockId, globVarName)
reNil := regexp.MustCompile(pointer + NIL)
*str = reNil.ReplaceAllString(*str, ".p")
if tr.addr[funcId][blockId][globVarName] {
*str = strings.Replace(*str, pointer+ADDR, "", -1)
} else {
*str = strings.Replace(*str, pointer, ".p", -1)
}
}
}
}
for globBlockId := 0; globBlockId <= len(tr.vars[globalScope]); globBlockId++ {
for globName, isPointer := range tr.vars[globalScope][globBlockId] {
if isPointer {
replaceGlobal(globName)
}
}
}
// * * *
// Remove the tags.
*str = reTagPointer.ReplaceAllString(*str, "")
*str = strings.Replace(*str, NIL, "", -1)
}<|fim▁end|> | /*
## Pointers |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverse
from .models import Question
class QuestionMethodTests(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recently() should return False for questions whose
pub_date is in the future.
"""
time = timezone.now() + datetime.timedelta(days=30)
future_question = Question(pub_date=time)
self.assertIs(future_question.was_published_recently(), False)
def test_was_published_recently_with_old_question(self):
"""
was_published_recently() should return False for questions whose
pub_date is older than 1 day.
"""
time = timezone.now() - datetime.timedelta(days=30)
old_question = Question(pub_date=time)
self.assertIs(old_question.was_published_recently(), False)
def test_was_published_recently_with_recent_question(self):
"""
was_published_recently() should return True for questions whose
pub_date is within the last day.
"""<|fim▁hole|> self.assertIs(recent_question.was_published_recently(), True)
def create_question(question_text, days):
"""
Creates a question with the given `question_text` and published the
given number of `days` offset to now (negative for questions published
in the past, positive for questions that have yet to be published).
"""
time = timezone.now() + datetime.timedelta(days=days)
return Question.objects.create(question_text=question_text, pub_date=time)
class QuestionViewTests(TestCase):
def test_index_view_with_no_questions(self):
"""
If no questions exist, an appropriate message should be displayed.
"""
response = self.client.get(reverse('polls:index'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "No polls are available.")
self.assertQuerysetEqual(response.context['latest_question_list'], [])
def test_index_view_with_a_past_question(self):
"""
Questions with a pub_date in the past should be displayed on the
index page.
"""
create_question(question_text="Past question.", days=-30)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question.>']
)
def test_index_view_with_a_future_question(self):
"""
Questions with a pub_date in the future should not be displayed on
the index page.
"""
create_question(question_text="Future question.", days=30)
response = self.client.get(reverse('polls:index'))
self.assertContains(response, "No polls are available.")
self.assertQuerysetEqual(response.context['latest_question_list'], [])
def test_index_view_with_future_question_and_past_question(self):
"""
Even if both past and future questions exist, only past questions
should be displayed.
"""
create_question(question_text="Past question.", days=-30)
create_question(question_text="Future question.", days=30)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question.>']
)
def test_index_view_with_two_past_questions(self):
"""
The questions index page may display multiple questions.
"""
create_question(question_text="Past question 1.", days=-30)
create_question(question_text="Past question 2.", days=-5)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question 2.>', '<Question: Past question 1.>']
)
class QuestionIndexDetailTests(TestCase):
def test_detail_view_with_a_future_question(self):
"""
The detail view of a question with a pub_date in the future should
return a 404 not found.
"""
future_question = create_question(question_text='Future question.', days=5)
url = reverse('polls:detail', args=(future_question.id,))
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_detail_view_with_a_past_question(self):
"""
The detail view of a question with a pub_date in the past should
display the question's text.
"""
past_question = create_question(question_text='Past Question.', days=-5)
url = reverse('polls:detail', args=(past_question.id,))
response = self.client.get(url)
self.assertContains(response, past_question.question_text)<|fim▁end|> | time = timezone.now() - datetime.timedelta(hours=1)
recent_question = Question(pub_date=time) |
<|file_name|>watch.js<|end_file_name|><|fim▁begin|>module.exports = function(grunt) {<|fim▁hole|> tasks: "js"
},
pluginCss: {
files: ["src/css/flags.scss", "src/css/intlTelInput.scss"],
tasks: "sass:main"
},
demoCss: {
files: "src/css/demo.scss",
tasks: "sass:demo"
}
};
};<|fim▁end|> | return {
js: {
files: "src/js/**/*.js", |
<|file_name|>outputbox.js<|end_file_name|><|fim▁begin|>/**
* Created with JetBrains WebStorm.
* User: k-nkgwj
* Date: 13/01/21
* Time: 3:29
* To change this template use File | Settings | File Templates.
*/
var OutputBox = (function () {
function OutputBox(selector) {
this.jqueryObject = $(selector);
this.enabled = true;
}
<|fim▁hole|> if (!this.enabled) {
return;
}
$(this.jqueryObject).prepend($('<p>').addClass('message').append(
$('<span>').addClass('message-subject').html(subject)
).append(
$('<span>').addClass('message-body').html(body)
));
};
OutputBox.prototype.log = function (msg) {
if (!this.enabled) {
return;
}
$(this.jqueryObject).prepend($('<p>').addClass('system-log').html(Array.apply(null, arguments).join('')));
};
return OutputBox;
})();<|fim▁end|> |
OutputBox.prototype.message = function (subject, body) {
|
<|file_name|>wordgame.ts<|end_file_name|><|fim▁begin|><!DOCTYPE TS><TS>
<context>
<name>Board</name>
<message>
<source>Blanks: </source>
<translation>Blanco:</translation>
</message>
<message>
<source>OK</source>
<translation>OK</translation>
</message>
<message>
<source>Cancel</source>
<translation>Annuleer</translation>
</message>
<message>
<source>Unknown word</source>
<translation>Onbekend woord</translation>
</message>
<message>
<source><p>The word "%1" is not in the dictionary.</source>
<translation><p>Het woord "%1" staat niet in het woordenboek.</translation>
</message>
<message>
<source>Add</source>
<translation>Voeg toe</translation>
</message>
<message>
<source>Ignore</source>
<translation>Negeer</translation>
</message>
</context>
<context>
<name>NewGameBase</name>
<message>
<source>Players</source>
<translation>Spelers</translation>
</message>
<message>
<source>AI3: Smart AI player</source>
<translation>AI3: Slimme AI speler</translation>
</message>
<message>
<source>Rules</source>
<translation>Regels</translation>
</message>
<message>
<source>&Start</source>
<translation>&Start</translation>
</message>
</context>
<context>
<name>RulesBase</name>
<message>
<source>Game Rules</source>
<translation>Spelregels</translation>
</message>
<message>
<source>Name:</source>
<translation>Naam:</translation>
</message>
<message>
<source>Board</source>
<translation>Bord</translation>
</message>
<message>
<source>Size:</source>
<translation>Grootte:</translation>
</message><|fim▁hole|> </message>
<message>
<source>Delete</source>
<translation>Verwijder</translation>
</message>
<message>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&Annuleer</translation>
</message>
</context>
<context>
<name>ScoreInfo</name>
<message>
<source><P>Invalid move</source>
<translation><p>Foute zet</translation>
</message>
<message>
<source><P>Score: </source>
<translation><p>Score:</translation>
</message>
</context>
<context>
<name>WordGame</name>
<message>
<source>Word Game</source>
<translation>Woordspel</translation>
</message>
<message>
<source>Back</source>
<translation>Terug</translation>
</message>
<message>
<source>Done</source>
<translation>Klaar</translation>
</message>
<message>
<source>Close</source>
<translation>Sluit</translation>
</message>
<message>
<source>End game</source>
<translation>Einde spel</translation>
</message>
<message>
<source>Do you want to end the game early?</source>
<translation>Wil je tijdens het spel stoppen?</translation>
</message>
<message>
<source>Yes</source>
<translation>Ja</translation>
</message>
<message>
<source>No</source>
<translation>Nee</translation>
</message>
</context>
</TS><|fim▁end|> | <message>
<source>Edit...</source>
<translation>Wijzig...</translation> |
<|file_name|>settings.rs<|end_file_name|><|fim▁begin|>//! WARNING: This file is generated, derived from table bazaar.settings, DO NOT EDIT
use chrono::datetime::DateTime;
use chrono::offset::utc::UTC;
use gen::column;
use gen::schema;
use gen::table;
use rustc_serialize::json::Json;
use rustc_serialize::json::ToJson;
use rustorm::dao::Dao;
use rustorm::dao::IsDao;
use rustorm::dao::Type;
use rustorm::dao::Value;
use rustorm::query::Operand;
use rustorm::table::Column;
use rustorm::table::Foreign;
use rustorm::table::IsTable;
use rustorm::table::Table;
use uuid::Uuid;
use gen::bazaar::Users;
#[derive(RustcEncodable)]
#[derive(Debug, Clone)]
pub struct Settings {
/// primary
/// default: 'uuid_generate_v4()'
/// not nullable
/// db data type: uuid
pub settings_id: Uuid,
/// Use metric system as unit, if false, use english system
/// default: 'true'
/// db data type: boolean
pub use_metric: Option<bool>,
/// db data type: uuid
pub user_id: Option<Uuid>,
/// db data type: json
pub value: Option<Json>,
/// default: 'true'
/// not nullable
/// --inherited--
/// db data type: boolean
pub active: bool,
/// --inherited--
/// db data type: uuid
pub client_id: Option<Uuid>,
/// default: 'now()'
/// not nullable
/// --inherited--
/// db data type: timestamp with time zone
pub created: DateTime<UTC>,
/// --inherited--
/// db data type: uuid
pub created_by: Option<Uuid>,
/// --inherited--
/// db data type: character varying
pub description: Option<String>,
/// --inherited--
/// db data type: text
pub help: Option<String>,
/// --inherited--
/// db data type: character varying
pub name: Option<String>,
/// --inherited--
/// db data type: uuid
pub organization_id: Option<Uuid>,
/// --inherited--
/// db data type: double precision
pub priority: Option<f64>,
/// default: 'now()'
/// not nullable
/// --inherited--
/// db data type: timestamp with time zone
pub updated: DateTime<UTC>,
/// --inherited--
/// db data type: uuid
pub updated_by: Option<Uuid>,
/// has one
pub user: Option<Users>,
}
impl IsDao for Settings {
fn from_dao(dao: &Dao) -> Self {
Settings {
organization_id: dao.get_opt(column::organization_id),
client_id: dao.get_opt(column::client_id),
created: dao.get(column::created),
created_by: dao.get_opt(column::created_by),
updated: dao.get(column::updated),
updated_by: dao.get_opt(column::updated_by),
priority: dao.get_opt(column::priority),
name: dao.get_opt(column::name),
description: dao.get_opt(column::description),
help: dao.get_opt(column::help),
active: dao.get(column::active),
user_id: dao.get_opt(column::user_id),
value: dao.get_opt(column::value),
settings_id: dao.get(column::settings_id),
use_metric: dao.get_opt(column::use_metric),
user: None,
}
}
fn to_dao(&self) -> Dao {
let mut dao = Dao::new();
match self.organization_id {
Some(ref _value) => dao.set(column::organization_id, _value),
None => dao.set_null(column::organization_id)
}
match self.client_id {
Some(ref _value) => dao.set(column::client_id, _value),
None => dao.set_null(column::client_id)
}
dao.set(column::created, &self.created);
match self.created_by {
Some(ref _value) => dao.set(column::created_by, _value),
None => dao.set_null(column::created_by)
}
dao.set(column::updated, &self.updated);
match self.updated_by {
Some(ref _value) => dao.set(column::updated_by, _value),
None => dao.set_null(column::updated_by)
}
match self.priority {
Some(ref _value) => dao.set(column::priority, _value),
None => dao.set_null(column::priority)
}
match self.name {
Some(ref _value) => dao.set(column::name, _value),
None => dao.set_null(column::name)
}
match self.description {
Some(ref _value) => dao.set(column::description, _value),
None => dao.set_null(column::description)
}
match self.help {
Some(ref _value) => dao.set(column::help, _value),
None => dao.set_null(column::help)
}
dao.set(column::active, &self.active);
match self.user_id {
Some(ref _value) => dao.set(column::user_id, _value),
None => dao.set_null(column::user_id)
}
match self.value {
Some(ref _value) => dao.set(column::value, _value),
None => dao.set_null(column::value)
}
dao.set(column::settings_id, &self.settings_id);
match self.use_metric {
Some(ref _value) => dao.set(column::use_metric, _value),
None => dao.set_null(column::use_metric)
}
dao
}
}
impl ToJson for Settings {
fn to_json(&self) -> Json {
self.to_dao().to_json()
}
}
impl Default for Settings {
fn default() -> Self {
Settings{
organization_id: Default::default(),
client_id: Default::default(),
created: UTC::now(),
created_by: Default::default(),
updated: UTC::now(),
updated_by: Default::default(),
priority: Default::default(),
name: Default::default(),
description: Default::default(),
help: Default::default(),
active: Default::default(),
user_id: Default::default(),
value: Default::default(),
settings_id: Default::default(),
use_metric: Default::default(),
user: Default::default(),
}
}
}
impl IsTable for Settings {
fn table() -> Table {
Table {
schema: Some(schema::bazaar.to_owned()),
name: table::settings.to_owned(),
parent_table: Some(table::record.to_owned()),
sub_table: vec![],
comment: None,
columns: vec![
organization_id(),
client_id(),
created(),
created_by(),
updated(),
updated_by(),
priority(),
name(),
description(),
help(),
active(),
user_id(),
value(),
settings_id(),
use_metric(),
],
is_view: false,
}
}
}
// Generated columns for easier development of dynamic queries without sacrificing wrong spelling of column names
#[allow(dead_code)]
pub fn organization_id()->Column{
Column {
table: Some("settings".to_owned()),
name: column::organization_id.to_owned(),
data_type: Type::Uuid,
db_data_type: "uuid".to_owned(),
is_primary: false, is_unique: false, not_null: false, is_inherited: true,
default: None,
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn client_id()->Column{
Column {
table: Some("settings".to_owned()),
name: column::client_id.to_owned(),
data_type: Type::Uuid,
db_data_type: "uuid".to_owned(),
is_primary: false, is_unique: false, not_null: false, is_inherited: true,
default: None,
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn created()->Column{
Column {
table: Some("settings".to_owned()),
name: column::created.to_owned(),
data_type: Type::DateTime,
db_data_type: "timestamp with time zone".to_owned(),
is_primary: false, is_unique: false, not_null: true, is_inherited: true,
default: Some(Operand::Value(Value::String("'now()'".to_owned()))),
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn created_by()->Column{
Column {
table: Some("settings".to_owned()),
name: column::created_by.to_owned(),
data_type: Type::Uuid,
db_data_type: "uuid".to_owned(),
is_primary: false, is_unique: false, not_null: false, is_inherited: true,
default: None,
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn updated()->Column{
Column {
table: Some("settings".to_owned()),
name: column::updated.to_owned(),
data_type: Type::DateTime,
db_data_type: "timestamp with time zone".to_owned(),
is_primary: false, is_unique: false, not_null: true, is_inherited: true,
default: Some(Operand::Value(Value::String("'now()'".to_owned()))),
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn updated_by()->Column{
Column {
table: Some("settings".to_owned()),
name: column::updated_by.to_owned(),
data_type: Type::Uuid,<|fim▁hole|> default: None,
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn priority()->Column{
Column {
table: Some("settings".to_owned()),
name: column::priority.to_owned(),
data_type: Type::F64,
db_data_type: "double precision".to_owned(),
is_primary: false, is_unique: false, not_null: false, is_inherited: true,
default: None,
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn name()->Column{
Column {
table: Some("settings".to_owned()),
name: column::name.to_owned(),
data_type: Type::String,
db_data_type: "character varying".to_owned(),
is_primary: false, is_unique: false, not_null: false, is_inherited: true,
default: None,
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn description()->Column{
Column {
table: Some("settings".to_owned()),
name: column::description.to_owned(),
data_type: Type::String,
db_data_type: "character varying".to_owned(),
is_primary: false, is_unique: false, not_null: false, is_inherited: true,
default: None,
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn help()->Column{
Column {
table: Some("settings".to_owned()),
name: column::help.to_owned(),
data_type: Type::String,
db_data_type: "text".to_owned(),
is_primary: false, is_unique: false, not_null: false, is_inherited: true,
default: None,
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn active()->Column{
Column {
table: Some("settings".to_owned()),
name: column::active.to_owned(),
data_type: Type::Bool,
db_data_type: "boolean".to_owned(),
is_primary: false, is_unique: false, not_null: true, is_inherited: true,
default: Some(Operand::Value(Value::String("'true'".to_owned()))),
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn user_id()->Column{
Column {
table: Some("settings".to_owned()),
name: column::user_id.to_owned(),
data_type: Type::Uuid,
db_data_type: "uuid".to_owned(),
is_primary: false, is_unique: false, not_null: false, is_inherited: false,
default: None,
comment: None,
foreign: Some(
Foreign {
schema: Some("bazaar".to_owned()),
table: "users".to_owned(),
column: "user_id".to_owned(),
}),
}}
#[allow(dead_code)]
pub fn value()->Column{
Column {
table: Some("settings".to_owned()),
name: column::value.to_owned(),
data_type: Type::Json,
db_data_type: "json".to_owned(),
is_primary: false, is_unique: false, not_null: false, is_inherited: false,
default: None,
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn settings_id()->Column{
Column {
table: Some("settings".to_owned()),
name: column::settings_id.to_owned(),
data_type: Type::Uuid,
db_data_type: "uuid".to_owned(),
is_primary: true, is_unique: false, not_null: true, is_inherited: false,
default: Some(Operand::Value(Value::String("'uuid_generate_v4()'".to_owned()))),
comment: None,
foreign: None,
}}
#[allow(dead_code)]
pub fn use_metric()->Column{
Column {
table: Some("settings".to_owned()),
name: column::use_metric.to_owned(),
data_type: Type::Bool,
db_data_type: "boolean".to_owned(),
is_primary: false, is_unique: false, not_null: false, is_inherited: false,
default: Some(Operand::Value(Value::String("'true'".to_owned()))),
comment: Some(r#"Use metric system as unit, if false, use english system"#.to_owned()),
foreign: None,
}}<|fim▁end|> | db_data_type: "uuid".to_owned(),
is_primary: false, is_unique: false, not_null: false, is_inherited: true, |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>__version__ = '0.1.0'<|fim▁hole|><|fim▁end|> |
from .reports import Report |
<|file_name|>valid-object.js<|end_file_name|><|fim▁begin|>'use strict';
module.exports = function (t, a) {<|fim▁hole|> var x;
a.throws(function () { t(0); }, TypeError, "0");
a.throws(function () { t(false); }, TypeError, "false");
a.throws(function () { t(''); }, TypeError, "''");
a(t(x = {}), x, "Object");
a(t(x = function () {}), x, "Function");
a(t(x = new String('raz')), x, "String object"); //jslint: ignore
a(t(x = new Date()), x, "Date");
a.throws(function () { t(); }, TypeError, "Undefined");
a.throws(function () { t(null); }, TypeError, "null");
};<|fim▁end|> | |
<|file_name|>EhCacheServiceFactory.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2013-2016 The Rythm Engine project
* for LICENSE and other details see:
* https://github.com/rythmengine/rythmengine
*/
package org.rythmengine.cache;
/*-
* #%L
* Rythm Template Engine
* %%<|fim▁hole|> * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.rythmengine.extension.ICacheService;
import org.rythmengine.extension.ICacheServiceFactory;
/**
* Created with IntelliJ IDEA.
* User: luog
* Date: 2/12/13
* Time: 8:45 AM
* To change this template use File | Settings | File Templates.
*/
class EhCacheServiceFactory implements ICacheServiceFactory {
@Override
public ICacheService get() {
return EhCacheService.INSTANCE;
}
}<|fim▁end|> | * Copyright (C) 2017 - 2021 OSGL (Open Source General Library)
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. |
<|file_name|>test_form.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
class BtcPriceTestCase(unittest.TestCase):
def test_checking_of_input_in_form(self):
input = 46
answer = is_not_number(input) # The bitcoin returned changes over time!
self.assertEqual(answer, False)<|fim▁end|> | import unittest
from .Weather_analyzer import is_not_number
|
<|file_name|>TwouyoMildr~ed_Trouillot.js<|end_file_name|><|fim▁begin|>showWord(["np. ","Avoka, politisyen. Madanm prezidan Jean Bertrand Aristide."<|fim▁hole|><|fim▁end|> | ]) |
<|file_name|>moment.module.ts<|end_file_name|><|fim▁begin|>module rl.utilities.services.momentWrapper {
export var moduleName: string = 'rl.utilities.services.momentWrapper';
export var serviceName: string = 'momentWrapper';
export function momentWrapper(): void {
'use strict';
// Using `any` instead of MomentStatic because
// createFromInputFallback doesn't appear to be
// defined in MomentStatic... :-(
var momentWrapper: any = moment; // moment must already be loaded
// Set default method for handling non-ISO date conversions.
// See 4/28 comment in https://github.com/moment/moment/issues/1407
// This also prevents the deprecation warning message to the console.
momentWrapper.createFromInputFallback = (config: any): void => {
config._d = new Date(config._i);<|fim▁hole|> }
angular.module(moduleName, [])
.factory(serviceName, momentWrapper);
}<|fim▁end|> | };
return momentWrapper; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Support for OVO Energy."""
from __future__ import annotations
from datetime import datetime, timedelta
import logging
import aiohttp
import async_timeout
from ovoenergy import OVODailyUsage
from ovoenergy.ovoenergy import OVOEnergy
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.typing import ConfigType
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["sensor"]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up OVO Energy from a config entry."""
client = OVOEnergy()
try:
authenticated = await client.authenticate(
entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD]
)
except aiohttp.ClientError as exception:
_LOGGER.warning(exception)
raise ConfigEntryNotReady from exception
if not authenticated:
raise ConfigEntryAuthFailed
async def async_update_data() -> OVODailyUsage:
"""Fetch data from OVO Energy."""
async with async_timeout.timeout(10):
try:
authenticated = await client.authenticate(
entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD]
)
except aiohttp.ClientError as exception:
raise UpdateFailed(exception) from exception
if not authenticated:
raise ConfigEntryAuthFailed("Not authenticated with OVO Energy")
return await client.get_daily_usage(datetime.utcnow().strftime("%Y-%m"))
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
# Name of the data. For logging purposes.
name="sensor",
update_method=async_update_data,
# Polling interval. Will only be polled if there are subscribers.
update_interval=timedelta(seconds=3600),
)
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][entry.entry_id] = {
DATA_CLIENT: client,<|fim▁hole|>
# Fetch initial data so we have data when entities subscribe
await coordinator.async_config_entry_first_refresh()
# Setup components
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigType) -> bool:
"""Unload OVO Energy config entry."""
# Unload sensors
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
del hass.data[DOMAIN][entry.entry_id]
return unload_ok
class OVOEnergyEntity(CoordinatorEntity):
"""Defines a base OVO Energy entity."""
def __init__(
self,
coordinator: DataUpdateCoordinator,
client: OVOEnergy,
) -> None:
"""Initialize the OVO Energy entity."""
super().__init__(coordinator)
self._client = client
class OVOEnergyDeviceEntity(OVOEnergyEntity):
"""Defines a OVO Energy device entity."""
@property
def device_info(self) -> DeviceInfo:
"""Return device information about this OVO Energy instance."""
return DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, self._client.account_id)},
manufacturer="OVO Energy",
name=self._client.username,
)<|fim▁end|> | DATA_COORDINATOR: coordinator,
} |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod android;
pub mod caca;
pub mod cocoa;
pub mod dlopen;
pub mod egl;
pub mod emscripten;
pub mod glx;
pub mod osmesa;<|fim▁hole|><|fim▁end|> | pub mod wayland;
pub mod wgl;
pub mod win32;
pub mod x11; |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from django.apps import AppConfig
class ExcelUploadConfig(AppConfig):<|fim▁hole|> name = 'excel_upload'<|fim▁end|> | |
<|file_name|>periph_uart_if.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018 Kevin Weiss, for HAW Hamburg <[email protected]>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
"""@package PyToAPI
This module handles parsing of information from RIOT periph_uart test.
"""
try:
from riot_pal import DutShell
except ImportError:
raise ImportError('Cannot find riot_pal, try "pip install riot_pal"')
<|fim▁hole|>
def uart_init(self, dev, baud):
"""Initialize DUT's UART."""
return self.send_cmd("init {} {}".format(dev, baud))
def uart_mode(self, dev, data_bits, parity, stop_bits):
"""Setup databits, parity and stopbits."""
return self.send_cmd(
"mode {} {} {} {}".format(dev, data_bits, parity, stop_bits))
def uart_send_string(self, dev, test_string):
"""Send data via DUT's UART."""
return self.send_cmd("send {} {}".format(dev, test_string))<|fim▁end|> |
class PeriphUartIf(DutShell):
"""Interface to the node with periph_uart firmware.""" |
<|file_name|>0014_auto_20160210_0406.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('questions', '0013_auto_20160210_0400'),
]
<|fim▁hole|> migrations.AlterField(
model_name='category',
name='order',
field=models.PositiveIntegerField(default=1),
),
migrations.AlterField(
model_name='question',
name='order',
field=models.PositiveIntegerField(default=1),
),
]<|fim▁end|> | operations = [ |
<|file_name|>test_processing.py<|end_file_name|><|fim▁begin|>"""Tests for functions and classes in data/processing.py."""
import glob
import os
from absl.testing import absltest
import heatnet.data.processing as hdp
import heatnet.file_util as file_util
import heatnet.test.test_util as test_util
import numpy as np
import xarray as xr
class CDSPreprocessorTest(absltest.TestCase):
"""Tests for CDSPreprocesor."""
def test_init(self):
"""Tests CDSPreprocessor initialization."""
with file_util.mkdtemp() as tmp_dir:
data_paths = [
os.path.join(tmp_dir, 'temp_data.nc'),
os.path.join(tmp_dir, 'temp_data_2.nc')
]
proc_path = os.path.join(tmp_dir, 'temp_proc_data.nc')
variables = ['swvl1', 't2m']
for path, var in zip(data_paths, variables):
test_util.write_dummy_dataset(path, var)
pp = hdp.CDSPreprocessor(data_paths, base_out_path=proc_path, mode='ext')
self.assertEqual(pp.raw_files, data_paths)
self.assertEqual(pp.base_out_path, proc_path)
self.assertEqual(pp.lead_times, [1])
self.assertEqual(pp.past_times, [0])
pp.close()
pp = hdp.CDSPreprocessor(
data_paths[0], base_out_path=proc_path, mode='ext')
self.assertEqual(pp.raw_files, data_paths[0])
self.assertEqual(pp.base_out_path, proc_path)
self.assertEqual(pp.lead_times, [1])
self.assertEqual(pp.past_times, [0])
pp.close()
for path in data_paths:
os.remove(path)
def test_raw_to_batched_samples(self):
"""Tests default raw_to_batched_samples call."""
tol = 1.0e-4
with file_util.mkdtemp() as tmp_dir:
path = os.path.join(tmp_dir, 'temp_data.nc')
proc_path = os.path.join(tmp_dir, 'temp_proc_data.nc')
proc_path1 = os.path.join(tmp_dir, 'temp_proc_data.000000.nc')
test_util.write_dummy_dataset(path, 'swvl1')
pp = hdp.CDSPreprocessor(path, base_out_path=proc_path, mode='ext')
pp.raw_to_batched_samples()
self.assertEqual(pp.pred_varlev_time, ['swvl1/0'])
self.assertEqual(pp.tgt_varlev_time, ['swvl1/0/+1D'])
with xr.open_dataset(path) as ds, xr.open_dataset(proc_path1) as proc_ds:
self.assertTrue(
np.allclose(
ds.isel(time=0).swvl1.values,
proc_ds.isel(sample=0).sel(
pred_varlev='swvl1/0').predictors.values,
rtol=tol,
atol=tol))
os.remove(path)
for f in glob.glob(os.path.join(tmp_dir, 'temp_proc*')):
os.remove(f)
pp.close()
def test_offsets(self):
"""Tests correctness of time offsets from raw to processed data."""
tol = 1.0e-4
with file_util.mkdtemp() as tmp_dir:
data_paths = [
os.path.join(tmp_dir, 'temp_data.nc'),
os.path.join(tmp_dir, 'temp_data_3.nc'),
os.path.join(tmp_dir, 'temp_data_2.nc'),
]
variables = ['t2m', 'swvl1', 't2m_anom']
proc_path_1 = os.path.join(tmp_dir, 'temp_proc_data.000000.nc')
for path, var in zip(data_paths, variables):
test_util.write_dummy_dataset(path, var)
pp = hdp.CDSPreprocessor(
data_paths,
past_times=[1, 2],
lead_times=[1, 2],
base_out_path=os.path.join(tmp_dir, 'temp_proc_data.nc'),
mode='ext')
pp.raw_to_batched_samples()
with xr.open_dataset(proc_path_1) as proc_ds:
with xr.open_dataset(data_paths[0]) as ds:
# First possible target with lead time = 2
raw_data_slice = (ds.isel(time=4).t2m.values)
tgt_data_slice = (
proc_ds.sel(tgt_varlev='t2m/0/+1D').isel(sample=1).targets.values)
tgt2_data_slice = (
proc_ds.sel(tgt_varlev='t2m/0/+2D').isel(sample=0).targets.values)
pred0_data_slice = (
proc_ds.sel(pred_varlev='t2m/0').isel(sample=2).predictors.values)
pred1_data_slice = (
proc_ds.sel(pred_varlev='t2m/0/-1D').isel(
sample=3).predictors.values)
pred2_data_slice = (
proc_ds.sel(pred_varlev='t2m/0/-2D').isel(
sample=4).predictors.values)
self.assertTrue(
np.allclose(raw_data_slice, tgt_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, tgt2_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred0_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred1_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred2_data_slice, rtol=tol, atol=tol))
self.assertEqual(ds.time.values[2], proc_ds.sample.values[0])
with xr.open_dataset(data_paths[2]) as ds:
# First possible target with lead time = 2
raw_data_slice = (ds.isel(time=4).t2m_anom.values)
tgt_data_slice = (
proc_ds.sel(tgt_varlev='t2m_anom/0/+1D').isel(
sample=1).targets.values)
tgt2_data_slice = (
proc_ds.sel(tgt_varlev='t2m_anom/0/+2D').isel(
sample=0).targets.values)
pred0_data_slice = (
proc_ds.sel(pred_varlev='t2m_anom/0').isel(
sample=2).predictors.values)
pred1_data_slice = (
proc_ds.sel(pred_varlev='t2m_anom/0/-1D').isel(
sample=3).predictors.values)
pred2_data_slice = (
proc_ds.sel(pred_varlev='t2m_anom/0/-2D').isel(
sample=4).predictors.values)
self.assertTrue(
np.allclose(raw_data_slice, tgt_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, tgt2_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred0_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred1_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred2_data_slice, rtol=tol, atol=tol))
pp.close()
for path in data_paths:<|fim▁hole|> def test_mean_std_recovery(self):
"""Tests recovery of dimensional data from processed normalized data."""
tol = 1.0e-4
with file_util.mkdtemp() as tmp_dir:
data_paths = [
os.path.join(tmp_dir, 'temp_data.nc'),
os.path.join(tmp_dir, 'temp_data_3.nc'),
os.path.join(tmp_dir, 'temp_data_2.nc'),
]
variables = ['t2m', 'swvl1', 't2m_anom']
proc_path_1 = os.path.join(tmp_dir, 'temp_proc_data.000000.nc')
for path, var in zip(data_paths, variables):
test_util.write_dummy_dataset(path, var)
pp = hdp.CDSPreprocessor(
data_paths,
base_out_path=os.path.join(tmp_dir, 'temp_proc_data.nc'),
past_times=[1, 2],
lead_times=[1, 2],
mode='ext')
pp.raw_to_batched_samples(scale_variables=True)
with xr.open_dataset(proc_path_1) as proc_ds:
with xr.open_dataset(os.path.join(
tmp_dir, 'temp_proc_data.scales.nc')) as scale_ds:
with xr.open_dataset(data_paths[1]) as ds:
raw_values = ds.isel(time=4).swvl1.values
proc_values = proc_ds.isel(sample=2).sel(
pred_varlev='swvl1/0').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='swvl1/0').pred_std.values),
scale_ds.sel(pred_varlev='swvl1/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
proc_values = proc_ds.isel(sample=4).sel(
pred_varlev='swvl1/0/-2D').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='swvl1/0').pred_std.values),
scale_ds.sel(pred_varlev='swvl1/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
with xr.open_dataset(data_paths[2]) as ds:
raw_values = ds.isel(time=4).t2m_anom.values
proc_values = proc_ds.isel(sample=2).sel(
pred_varlev='t2m_anom/0').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='t2m_anom/0').pred_std.values),
scale_ds.sel(pred_varlev='t2m_anom/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
proc_values = proc_ds.isel(sample=3).sel(
pred_varlev='t2m_anom/0/-1D').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='t2m_anom/0').pred_std.values),
scale_ds.sel(pred_varlev='t2m_anom/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
pp.close()
for path in data_paths:
os.remove(path)
for f in glob.glob(os.path.join(tmp_dir, 'temp_proc*')):
os.remove(f)
if __name__ == '__main__':
absltest.main()<|fim▁end|> | os.remove(path)
for f in glob.glob(os.path.join(tmp_dir, 'temp_proc*')):
os.remove(f)
|
<|file_name|>testing_main.cc<|end_file_name|><|fim▁begin|>#include "base/testing.h"
#include <iostream><|fim▁hole|> InitTest(&argc, &argv);
return RUN_ALL_TESTS();
}<|fim▁end|> |
GTEST_API_ int main(int argc, char** argv) {
std::cout << "Running main() from testing_main.cc\n"; |
<|file_name|>conf.go<|end_file_name|><|fim▁begin|><|fim▁hole|> debug string
// CommitHash exported to assign it from main.go
commitHash string
)
// Most easiest way to configure
// an application is define config as
// yaml string and then parse it into
// map.
// How it works see here:
// https://github.com/olebedev/config
var confString = `
debug: false
commit: 0
port: 5000
title: Vio
api:
prefix: /api
duktape:
path: static/build/bundle.js
`<|fim▁end|> | package main
var (
// Debug var to switch mode from outside |
<|file_name|>check_CoC.py<|end_file_name|><|fim▁begin|>################################################
#
# file moved to own repository:
# https://github.com/mozilla/Mozilla-GitHub-Standards
#<|fim▁hole|><|fim▁end|> | ################################################ |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import numpy as np
def index2onehot(n_labels, index):
return np.eye(n_labels)[index]
# From https://github.com/lisa-lab/DeepLearningTutorials/blob/master/code/utils.py
def scale_to_unit_interval(ndar, eps=1e-8):
""" Scales all values in the ndarray ndar to be between 0 and 1 """
ndar = ndar.copy()
ndar -= ndar.min()
ndar *= 1.0 / (ndar.max() + eps)
return ndar
# From https://github.com/lisa-lab/DeepLearningTutorials/blob/master/code/utils.py
def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),
scale_rows_to_unit_interval=True,
output_pixel_vals=True):
"""
Transform an array with one flattened image per row, into an array in
which images are reshaped and layed out like tiles on a floor.
This function is useful for visualizing datasets whose rows are images,
and also columns of matrices for transforming those rows
(such as the first layer of a neural net).
:type X: a 2-D ndarray or a tuple of 4 channels, elements of which can
be 2-D ndarrays or None;
:param X: a 2-D array in which every row is a flattened image.
:type img_shape: tuple; (height, width)
:param img_shape: the original shape of each image
:type tile_shape: tuple; (rows, cols)
:param tile_shape: the number of images to tile (rows, cols)
:param output_pixel_vals: if output should be pixel values (i.e. int8
values) or floats
:param scale_rows_to_unit_interval: if the values need to be scaled before
being plotted to [0,1] or not
:returns: array suitable for viewing as an image.
(See:`Image.fromarray`.)
:rtype: a 2-d array with same dtype as X.
"""
assert len(img_shape) == 2
assert len(tile_shape) == 2
assert len(tile_spacing) == 2
# The expression below can be re-written in a more C style as
# follows :
#
# out_shape = [0,0]
# out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -
# tile_spacing[0]
# out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -
# tile_spacing[1]
out_shape = [
(ishp + tsp) * tshp - tsp
for ishp, tshp, tsp in zip(img_shape, tile_shape, tile_spacing)
]
if isinstance(X, tuple):
assert len(X) == 4
# Create an output numpy ndarray to store the image
if output_pixel_vals:
out_array = np.zeros((out_shape[0], out_shape[1], 4),
dtype='uint8')
else:
out_array = np.zeros((out_shape[0], out_shape[1], 4),
dtype=X.dtype)
#colors default to 0, alpha defaults to 1 (opaque)
if output_pixel_vals:
channel_defaults = [0, 0, 0, 255]
else:
channel_defaults = [0., 0., 0., 1.]
for i in xrange(4):
if X[i] is None:
# if channel is None, fill it with zeros of the correct
# dtype
dt = out_array.dtype
if output_pixel_vals:
dt = 'uint8'
out_array[:, :, i] = np.zeros(
out_shape,
dtype=dt
) + channel_defaults[i]
else:
# use a recurrent call to compute the channel and store it
# in the output
out_array[:, :, i] = tile_raster_images(
X[i], img_shape, tile_shape, tile_spacing,
scale_rows_to_unit_interval, output_pixel_vals)
return out_array
else:
# if we are dealing with only one channel
H, W = img_shape
Hs, Ws = tile_spacing
# generate a matrix to store the output
dt = X.dtype
if output_pixel_vals:
dt = 'uint8'
out_array = np.zeros(out_shape, dtype=dt)
for tile_row in xrange(tile_shape[0]):
for tile_col in xrange(tile_shape[1]):
if tile_row * tile_shape[1] + tile_col < X.shape[0]:
this_x = X[tile_row * tile_shape[1] + tile_col]
if scale_rows_to_unit_interval:
# if we should scale values to be between 0 and 1
# do this by calling the `scale_to_unit_interval`
# function
this_img = scale_to_unit_interval(
this_x.reshape(img_shape))
else:
this_img = this_x.reshape(img_shape)<|fim▁hole|> if output_pixel_vals:
c = 255
out_array[
tile_row * (H + Hs): tile_row * (H + Hs) + H,
tile_col * (W + Ws): tile_col * (W + Ws) + W
] = this_img * c
return out_array<|fim▁end|> | # add the slice to the corresponding position in the
# output array
c = 1 |
<|file_name|>introspection.py<|end_file_name|><|fim▁begin|>import re
from collections import namedtuple
import sqlparse
from django.db.backends.base.introspection import (
BaseDatabaseIntrospection, FieldInfo as BaseFieldInfo, TableInfo,
)
from django.db.models import Index
from django.utils.regex_helper import _lazy_re_compile
<|fim▁hole|>
def get_field_size(name):
""" Extract the size number from a "varchar(11)" type name """
m = field_size_re.search(name)
return int(m.group(1)) if m else None
# This light wrapper "fakes" a dictionary interface, because some SQLite data
# types include variables in them -- e.g. "varchar(30)" -- and can't be matched
# as a simple dictionary lookup.
class FlexibleFieldLookupDict:
# Maps SQL types to Django Field types. Some of the SQL types have multiple
# entries here because SQLite allows for anything and doesn't normalize the
# field type; it uses whatever was given.
base_data_types_reverse = {
'bool': 'BooleanField',
'boolean': 'BooleanField',
'smallint': 'SmallIntegerField',
'smallint unsigned': 'PositiveSmallIntegerField',
'smallinteger': 'SmallIntegerField',
'int': 'IntegerField',
'integer': 'IntegerField',
'bigint': 'BigIntegerField',
'integer unsigned': 'PositiveIntegerField',
'bigint unsigned': 'PositiveBigIntegerField',
'decimal': 'DecimalField',
'real': 'FloatField',
'text': 'TextField',
'char': 'CharField',
'varchar': 'CharField',
'blob': 'BinaryField',
'date': 'DateField',
'datetime': 'DateTimeField',
'time': 'TimeField',
}
def __getitem__(self, key):
key = key.lower().split('(', 1)[0].strip()
return self.base_data_types_reverse[key]
class DatabaseIntrospection(BaseDatabaseIntrospection):
data_types_reverse = FlexibleFieldLookupDict()
def get_field_type(self, data_type, description):
field_type = super().get_field_type(data_type, description)
if description.pk and field_type in {'BigIntegerField', 'IntegerField', 'SmallIntegerField'}:
# No support for BigAutoField or SmallAutoField as SQLite treats
# all integer primary keys as signed 64-bit integers.
return 'AutoField'
return field_type
def get_table_list(self, cursor):
"""Return a list of table and view names in the current database."""
# Skip the sqlite_sequence system table used for autoincrement key
# generation.
cursor.execute("""
SELECT name, type FROM sqlite_master
WHERE type in ('table', 'view') AND NOT name='sqlite_sequence'
ORDER BY name""")
return [TableInfo(row[0], row[1][0]) for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"""
Return a description of the table with the DB-API cursor.description
interface.
"""
cursor.execute('PRAGMA table_info(%s)' % self.connection.ops.quote_name(table_name))
return [
FieldInfo(
name, data_type, None, get_field_size(data_type), None, None,
not notnull, default, pk == 1,
)
for cid, name, data_type, notnull, default, pk in cursor.fetchall()
]
def get_sequences(self, cursor, table_name, table_fields=()):
pk_col = self.get_primary_key_column(cursor, table_name)
return [{'table': table_name, 'column': pk_col}]
def get_relations(self, cursor, table_name):
"""
Return a dictionary of {field_name: (field_name_other_table, other_table)}
representing all relationships to the given table.
"""
# Dictionary of relations to return
relations = {}
# Schema for this table
cursor.execute(
"SELECT sql, type FROM sqlite_master "
"WHERE tbl_name = %s AND type IN ('table', 'view')",
[table_name]
)
create_sql, table_type = cursor.fetchone()
if table_type == 'view':
# It might be a view, then no results will be returned
return relations
results = create_sql[create_sql.index('(') + 1:create_sql.rindex(')')]
# Walk through and look for references to other tables. SQLite doesn't
# really have enforced references, but since it echoes out the SQL used
# to create the table we can look for REFERENCES statements used there.
for field_desc in results.split(','):
field_desc = field_desc.strip()
if field_desc.startswith("UNIQUE"):
continue
m = re.search(r'references (\S*) ?\(["|]?(.*)["|]?\)', field_desc, re.I)
if not m:
continue
table, column = [s.strip('"') for s in m.groups()]
if field_desc.startswith("FOREIGN KEY"):
# Find name of the target FK field
m = re.match(r'FOREIGN KEY\s*\(([^\)]*)\).*', field_desc, re.I)
field_name = m.groups()[0].strip('"')
else:
field_name = field_desc.split()[0].strip('"')
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s", [table])
result = cursor.fetchall()[0]
other_table_results = result[0].strip()
li, ri = other_table_results.index('('), other_table_results.rindex(')')
other_table_results = other_table_results[li + 1:ri]
for other_desc in other_table_results.split(','):
other_desc = other_desc.strip()
if other_desc.startswith('UNIQUE'):
continue
other_name = other_desc.split(' ', 1)[0].strip('"')
if other_name == column:
relations[field_name] = (other_name, table)
break
return relations
def get_key_columns(self, cursor, table_name):
"""
Return a list of (column_name, referenced_table_name, referenced_column_name)
for all key columns in given table.
"""
key_columns = []
# Schema for this table
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"])
results = cursor.fetchone()[0].strip()
results = results[results.index('(') + 1:results.rindex(')')]
# Walk through and look for references to other tables. SQLite doesn't
# really have enforced references, but since it echoes out the SQL used
# to create the table we can look for REFERENCES statements used there.
for field_index, field_desc in enumerate(results.split(',')):
field_desc = field_desc.strip()
if field_desc.startswith("UNIQUE"):
continue
m = re.search(r'"(.*)".*references (.*) \(["|](.*)["|]\)', field_desc, re.I)
if not m:
continue
# This will append (column_name, referenced_table_name, referenced_column_name) to key_columns
key_columns.append(tuple(s.strip('"') for s in m.groups()))
return key_columns
def get_primary_key_column(self, cursor, table_name):
"""Return the column name of the primary key for the given table."""
# Don't use PRAGMA because that causes issues with some transactions
cursor.execute(
"SELECT sql, type FROM sqlite_master "
"WHERE tbl_name = %s AND type IN ('table', 'view')",
[table_name]
)
row = cursor.fetchone()
if row is None:
raise ValueError("Table %s does not exist" % table_name)
create_sql, table_type = row
if table_type == 'view':
# Views don't have a primary key.
return None
fields_sql = create_sql[create_sql.index('(') + 1:create_sql.rindex(')')]
for field_desc in fields_sql.split(','):
field_desc = field_desc.strip()
m = re.match(r'(?:(?:["`\[])(.*)(?:["`\]])|(\w+)).*PRIMARY KEY.*', field_desc)
if m:
return m.group(1) if m.group(1) else m.group(2)
return None
def _get_foreign_key_constraints(self, cursor, table_name):
constraints = {}
cursor.execute('PRAGMA foreign_key_list(%s)' % self.connection.ops.quote_name(table_name))
for row in cursor.fetchall():
# Remaining on_update/on_delete/match values are of no interest.
id_, _, table, from_, to = row[:5]
constraints['fk_%d' % id_] = {
'columns': [from_],
'primary_key': False,
'unique': False,
'foreign_key': (table, to),
'check': False,
'index': False,
}
return constraints
def _parse_column_or_constraint_definition(self, tokens, columns):
token = None
is_constraint_definition = None
field_name = None
constraint_name = None
unique = False
unique_columns = []
check = False
check_columns = []
braces_deep = 0
for token in tokens:
if token.match(sqlparse.tokens.Punctuation, '('):
braces_deep += 1
elif token.match(sqlparse.tokens.Punctuation, ')'):
braces_deep -= 1
if braces_deep < 0:
# End of columns and constraints for table definition.
break
elif braces_deep == 0 and token.match(sqlparse.tokens.Punctuation, ','):
# End of current column or constraint definition.
break
# Detect column or constraint definition by first token.
if is_constraint_definition is None:
is_constraint_definition = token.match(sqlparse.tokens.Keyword, 'CONSTRAINT')
if is_constraint_definition:
continue
if is_constraint_definition:
# Detect constraint name by second token.
if constraint_name is None:
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
constraint_name = token.value
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
constraint_name = token.value[1:-1]
# Start constraint columns parsing after UNIQUE keyword.
if token.match(sqlparse.tokens.Keyword, 'UNIQUE'):
unique = True
unique_braces_deep = braces_deep
elif unique:
if unique_braces_deep == braces_deep:
if unique_columns:
# Stop constraint parsing.
unique = False
continue
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
unique_columns.append(token.value)
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
unique_columns.append(token.value[1:-1])
else:
# Detect field name by first token.
if field_name is None:
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
field_name = token.value
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
field_name = token.value[1:-1]
if token.match(sqlparse.tokens.Keyword, 'UNIQUE'):
unique_columns = [field_name]
# Start constraint columns parsing after CHECK keyword.
if token.match(sqlparse.tokens.Keyword, 'CHECK'):
check = True
check_braces_deep = braces_deep
elif check:
if check_braces_deep == braces_deep:
if check_columns:
# Stop constraint parsing.
check = False
continue
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
if token.value in columns:
check_columns.append(token.value)
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
if token.value[1:-1] in columns:
check_columns.append(token.value[1:-1])
unique_constraint = {
'unique': True,
'columns': unique_columns,
'primary_key': False,
'foreign_key': None,
'check': False,
'index': False,
} if unique_columns else None
check_constraint = {
'check': True,
'columns': check_columns,
'primary_key': False,
'unique': False,
'foreign_key': None,
'index': False,
} if check_columns else None
return constraint_name, unique_constraint, check_constraint, token
def _parse_table_constraints(self, sql, columns):
# Check constraint parsing is based of SQLite syntax diagram.
# https://www.sqlite.org/syntaxdiagrams.html#table-constraint
statement = sqlparse.parse(sql)[0]
constraints = {}
unnamed_constrains_index = 0
tokens = (token for token in statement.flatten() if not token.is_whitespace)
# Go to columns and constraint definition
for token in tokens:
if token.match(sqlparse.tokens.Punctuation, '('):
break
# Parse columns and constraint definition
while True:
constraint_name, unique, check, end_token = self._parse_column_or_constraint_definition(tokens, columns)
if unique:
if constraint_name:
constraints[constraint_name] = unique
else:
unnamed_constrains_index += 1
constraints['__unnamed_constraint_%s__' % unnamed_constrains_index] = unique
if check:
if constraint_name:
constraints[constraint_name] = check
else:
unnamed_constrains_index += 1
constraints['__unnamed_constraint_%s__' % unnamed_constrains_index] = check
if end_token.match(sqlparse.tokens.Punctuation, ')'):
break
return constraints
def get_constraints(self, cursor, table_name):
"""
Retrieve any constraints or keys (unique, pk, fk, check, index) across
one or more columns.
"""
constraints = {}
# Find inline check constraints.
try:
table_schema = cursor.execute(
"SELECT sql FROM sqlite_master WHERE type='table' and name=%s" % (
self.connection.ops.quote_name(table_name),
)
).fetchone()[0]
except TypeError:
# table_name is a view.
pass
else:
columns = {info.name for info in self.get_table_description(cursor, table_name)}
constraints.update(self._parse_table_constraints(table_schema, columns))
# Get the index info
cursor.execute("PRAGMA index_list(%s)" % self.connection.ops.quote_name(table_name))
for row in cursor.fetchall():
# SQLite 3.8.9+ has 5 columns, however older versions only give 3
# columns. Discard last 2 columns if there.
number, index, unique = row[:3]
cursor.execute(
"SELECT sql FROM sqlite_master "
"WHERE type='index' AND name=%s" % self.connection.ops.quote_name(index)
)
# There's at most one row.
sql, = cursor.fetchone() or (None,)
# Inline constraints are already detected in
# _parse_table_constraints(). The reasons to avoid fetching inline
# constraints from `PRAGMA index_list` are:
# - Inline constraints can have a different name and information
# than what `PRAGMA index_list` gives.
# - Not all inline constraints may appear in `PRAGMA index_list`.
if not sql:
# An inline constraint
continue
# Get the index info for that index
cursor.execute('PRAGMA index_info(%s)' % self.connection.ops.quote_name(index))
for index_rank, column_rank, column in cursor.fetchall():
if index not in constraints:
constraints[index] = {
"columns": [],
"primary_key": False,
"unique": bool(unique),
"foreign_key": None,
"check": False,
"index": True,
}
constraints[index]['columns'].append(column)
# Add type and column orders for indexes
if constraints[index]['index'] and not constraints[index]['unique']:
# SQLite doesn't support any index type other than b-tree
constraints[index]['type'] = Index.suffix
order_info = sql.split('(')[-1].split(')')[0].split(',')
orders = ['DESC' if info.endswith('DESC') else 'ASC' for info in order_info]
constraints[index]['orders'] = orders
# Get the PK
pk_column = self.get_primary_key_column(cursor, table_name)
if pk_column:
# SQLite doesn't actually give a name to the PK constraint,
# so we invent one. This is fine, as the SQLite backend never
# deletes PK constraints by name, as you can't delete constraints
# in SQLite; we remake the table with a new PK instead.
constraints["__primary__"] = {
"columns": [pk_column],
"primary_key": True,
"unique": False, # It's not actually a unique constraint.
"foreign_key": None,
"check": False,
"index": False,
}
constraints.update(self._get_foreign_key_constraints(cursor, table_name))
return constraints<|fim▁end|> | FieldInfo = namedtuple('FieldInfo', BaseFieldInfo._fields + ('pk',))
field_size_re = _lazy_re_compile(r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$')
|
<|file_name|>person.py<|end_file_name|><|fim▁begin|>import numpy as np
from src.base.animal import Animal
class Stats(object):
# TODO: Re-implement this at a higher level
# Ugh. I feel dirty.
# name, type, min, max ... distro?
DATA = {
'air': ('i2', 0, 1000),
'hunger': ('i2', 0, 1000),<|fim▁hole|> 'warmth': ('i2', 0, 1000) }
# Just the first two columns
VECTOR_TEMPLATE = np.zeros(1, dtype=[(k, v[0]) for k,v in DATA.items()])
def __init__(self):
self.data = Stats.VECTOR_TEMPLATE[:]
def __getitem__(self, name):
return self.data[name]
def __setitem__(self, name, value):
if(value < Stats.DATA[name][1]):
value = Stats.DATA[name][1]
elif(value < Stats.DATA[name][2]):
value = Stats.DATA[name][2]
self.data[name] = value
class Person(Animal):
"""
Follows the hierarchy of needs:
Air, water, food, shelter, clothing
Health, safety, future security
Friendship, intimacy, family
Esteem, self-image
Goals/Dreams
"""
def __init__(self, world, name="Bob"):
super(Person, self).__init__(world)
self.name = name if name else "Noname"
self.inventory = []<|fim▁end|> | 'thirst': ('i2', 0, 1000),
'stamina': ('i2', 0, 1000), |
<|file_name|>CrossOriginHandlerTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014, The Sporting Exchange Limited
* Copyright 2014, Simon Matić Langford
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.exemel.disco.transport.jetty;
import uk.co.exemel.disco.DiscoVersion;
import org.eclipse.jetty.servlets.CrossOriginFilter;
import org.junit.Test;
<|fim▁hole|>import static org.mockito.Mockito.*;
/**
* Unit tests for {@link uk.co.exemel.disco.transport.jetty.CrossOriginHandler}
*/
public class CrossOriginHandlerTest {
@Test
public void testHandlerSetsServerHeaderInTheResponse() throws Exception {
final CrossOriginHandler victim = new CrossOriginHandler("betfair.com", "GET,POST,HEAD", "X-Requested-With,Content-Type,Accept,Origin", "1800", "true", "");
final MockJettyRequest req = mock(MockJettyRequest.class);
final MockJettyResponse res = mock(MockJettyResponse.class);
victim.handle("/", req, req, res);
verify(res, times(1)).setHeader(eq("Server"), eq("Disco 2 - " + DiscoVersion.getVersion()));
}
@Test
public void testHandlerMarksRequestAsHandledByDefault() throws Exception {
final CrossOriginHandler victim = new CrossOriginHandler("betfair.com", "GET,POST,HEAD", "X-Requested-With,Content-Type,Accept,Origin", "1800", "true", "");
final MockJettyRequest req = mock(MockJettyRequest.class);
final MockJettyResponse res = mock(MockJettyResponse.class);
victim.handle("/", req, req, res);
verify(req, times(1)).setHandled(eq(true));
verify(req, times(1)).setHandled(eq(false));
}
@Test
public void testHandlerUnmarksRequestAsHandledIfFilterContinuesTheChainExplicitDomain() throws Exception {
testHandlesCrossOriginRequest("betfair.com", true);
}
@Test
public void testHandlerUnmarksRequestAsHandledIfFilterContinuesTheChainAllDomains() throws Exception {
testHandlesCrossOriginRequest("*", true);
}
@Test
public void testHandlerUnmarksRequestAsHandledIfFilterContinuesTheChainNoDomains() throws Exception {
testHandlesCrossOriginRequest("", false);
}
private void testHandlesCrossOriginRequest(String domains, boolean wantHandled) throws Exception {
final CrossOriginHandler victim = new CrossOriginHandler(domains, "GET,POST,HEAD", "X-Requested-With,Content-Type,Accept,Origin", "1800", "true", "");
final MockJettyRequest req = mock(MockJettyRequest.class);
final MockJettyResponse res = mock(MockJettyResponse.class);
when(req.getMethod()).thenReturn("OPTIONS");
when(req.getHeader("Origin")).thenReturn("betfair.com");
when(req.getHeader(CrossOriginFilter.ACCESS_CONTROL_REQUEST_METHOD_HEADER)).thenReturn("PUT");
when(req.getHeaders("Connection")).thenReturn(Collections.<String>emptyEnumeration());
victim.handle("/", req, req, res);
// this is always called
verify(req, times(1)).setHandled(eq(true));
if (wantHandled) {
verify(req, never()).setHandled(eq(false));
}
else {
verify(req, times(1)).setHandled(eq(false));
}
}
}<|fim▁end|> | import java.util.Collections;
import static org.mockito.Matchers.eq; |
<|file_name|>Mmhc.java<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////////////////////////////////
// For information as to what this class does, see the Javadoc, below. //
// Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, //
// 2007, 2008, 2009, 2010, 2014, 2015 by Peter Spirtes, Richard Scheines, Joseph //
// Ramsey, and Clark Glymour. //<|fim▁hole|>// (at your option) any later version. //
// //
// This program is distributed in the hope that it will be useful, //
// but WITHOUT ANY WARRANTY; without even the implied warranty of //
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //
// GNU General Public License for more details. //
// //
// You should have received a copy of the GNU General Public License //
// along with this program; if not, write to the Free Software //
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA //
///////////////////////////////////////////////////////////////////////////////
package edu.cmu.tetrad.search.mb;
import edu.cmu.tetrad.data.DataSet;
import edu.cmu.tetrad.data.IKnowledge;
import edu.cmu.tetrad.data.Knowledge2;
import edu.cmu.tetrad.graph.EdgeListGraph;
import edu.cmu.tetrad.graph.Graph;
import edu.cmu.tetrad.graph.Node;
import edu.cmu.tetrad.search.FgesOrienter;
import edu.cmu.tetrad.search.GraphSearch;
import edu.cmu.tetrad.search.IndependenceTest;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Implements the MMHC algorithm.
*
* @author Joseph Ramsey (this version).
*/
public class Mmhc implements GraphSearch {
/**
* The independence test used for the PC search.
*/
private IndependenceTest independenceTest;
/**
* The maximum number of nodes conditioned on in the search.
*/
private int depth = Integer.MAX_VALUE;
private DataSet data;
private IKnowledge knowledge = new Knowledge2();
//=============================CONSTRUCTORS==========================//
public Mmhc(IndependenceTest test, DataSet dataSet) {
this.depth = -1;
this.independenceTest = test;
this.data = dataSet;
}
//==============================PUBLIC METHODS========================//
public IndependenceTest getIndependenceTest() {
return independenceTest;
}
public int getDepth() {
return depth;
}
public long getElapsedTime() {
return 0;
}
/**
* Runs PC starting with a fully connected graph over all of the variables in the domain of the independence test.
*/
public Graph search() {
List<Node> variables = independenceTest.getVariables();
Mmmb mmmb = new Mmmb(independenceTest, getDepth(), true);
Map<Node, List<Node>> pc = new HashMap<>();
for (Node x : variables) {
pc.put(x, mmmb.getPc(x));
}
Graph graph = new EdgeListGraph();
for (Node x : variables) {
graph.addNode(x);
}
for (Node x : variables) {
for (Node y : pc.get(x)) {
if (!graph.isAdjacentTo(x, y)) {
graph.addUndirectedEdge(x, y);
}
}
}
FgesOrienter orienter = new FgesOrienter(data);
orienter.orient(graph);
return graph;
}
public IKnowledge getKnowledge() {
return knowledge;
}
public void setKnowledge(IKnowledge knowledge) {
if (knowledge == null) {
throw new NullPointerException();
}
this.knowledge = knowledge;
}
public void setDepth(int depth) {
this.depth = depth;
}
}<|fim▁end|> | // //
// This program is free software; you can redistribute it and/or modify //
// it under the terms of the GNU General Public License as published by //
// the Free Software Foundation; either version 2 of the License, or // |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from django.db import models
from pagedown.widgets import AdminPagedownWidget
from .models import Faq, Category
class FaqAdmin(admin.ModelAdmin):
formfield_overrides = {
models.TextField: {'widget': AdminPagedownWidget},
}
fieldsets = [
('Faq', {'fields': ['question', 'answer', 'category']})
]
list_display = ('question', 'created', 'modified')
list_filter = ['created', 'modified']
search_fields = ['question', 'answer']
ordering = ['-created']<|fim▁hole|>
def save_model(self, request, obj, form, change):
obj.author = request.user
obj.save()
class CategoryAdmin(admin.ModelAdmin):
fieldsets = [
('Category', {'fields': ['title']})
]
list_display = ('title', 'slug')
search_fields = ['title']
admin.site.register(Faq, FaqAdmin)
admin.site.register(Category, CategoryAdmin)<|fim▁end|> | |
<|file_name|>validator.go<|end_file_name|><|fim▁begin|>// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package validate
import (
"fmt"
"reflect"
"github.com/go-openapi/errors"
"github.com/go-openapi/spec"
"github.com/go-openapi/strfmt"
)
// An EntityValidator is an interface for things that can validate entities
type EntityValidator interface {
Validate(interface{}) *Result
}
type valueValidator interface {
SetPath(path string)
Applies(interface{}, reflect.Kind) bool
Validate(interface{}) *Result
}
type itemsValidator struct {
items *spec.Items
root interface{}
path string
in string
validators []valueValidator
KnownFormats strfmt.Registry
}
func newItemsValidator(path, in string, items *spec.Items, root interface{}, formats strfmt.Registry) *itemsValidator {
iv := &itemsValidator{path: path, in: in, items: items, root: root, KnownFormats: formats}
iv.validators = []valueValidator{
&typeValidator{
Type: spec.StringOrArray([]string{items.Type}),
Nullable: items.Nullable,
Format: items.Format,
In: in,
Path: path,
},
iv.stringValidator(),
iv.formatValidator(),
iv.numberValidator(),
iv.sliceValidator(),
iv.commonValidator(),
}
return iv
}
func (i *itemsValidator) Validate(index int, data interface{}) *Result {
tpe := reflect.TypeOf(data)
kind := tpe.Kind()
mainResult := new(Result)
path := fmt.Sprintf("%s.%d", i.path, index)
for _, validator := range i.validators {
validator.SetPath(path)
if validator.Applies(i.root, kind) {
result := validator.Validate(data)
mainResult.Merge(result)
mainResult.Inc()
if result != nil && result.HasErrors() {
return mainResult
}
}
}
return mainResult
}
func (i *itemsValidator) commonValidator() valueValidator {
return &basicCommonValidator{
In: i.in,
Default: i.items.Default,
Enum: i.items.Enum,
}
}
func (i *itemsValidator) sliceValidator() valueValidator {
return &basicSliceValidator{
In: i.in,
Default: i.items.Default,
MaxItems: i.items.MaxItems,
MinItems: i.items.MinItems,
UniqueItems: i.items.UniqueItems,
Source: i.root,
Items: i.items.Items,
KnownFormats: i.KnownFormats,
}
}
func (i *itemsValidator) numberValidator() valueValidator {
return &numberValidator{
In: i.in,
Default: i.items.Default,
MultipleOf: i.items.MultipleOf,
Maximum: i.items.Maximum,
ExclusiveMaximum: i.items.ExclusiveMaximum,
Minimum: i.items.Minimum,
ExclusiveMinimum: i.items.ExclusiveMinimum,
Type: i.items.Type,
Format: i.items.Format,
}
}
func (i *itemsValidator) stringValidator() valueValidator {
return &stringValidator{
In: i.in,
Default: i.items.Default,
MaxLength: i.items.MaxLength,
MinLength: i.items.MinLength,
Pattern: i.items.Pattern,
AllowEmptyValue: false,
}
}
func (i *itemsValidator) formatValidator() valueValidator {
return &formatValidator{
In: i.in,
//Default: i.items.Default,
Format: i.items.Format,
KnownFormats: i.KnownFormats,
}
}
type basicCommonValidator struct {
Path string
In string
Default interface{}
Enum []interface{}
}
func (b *basicCommonValidator) SetPath(path string) {
b.Path = path
}
func (b *basicCommonValidator) Applies(source interface{}, kind reflect.Kind) bool {
switch source.(type) {
case *spec.Parameter, *spec.Schema, *spec.Header:
return true
}
return false
}
func (b *basicCommonValidator) Validate(data interface{}) (res *Result) {
if len(b.Enum) > 0 {
for _, enumValue := range b.Enum {
actualType := reflect.TypeOf(enumValue)
if actualType != nil { // Safeguard
expectedValue := reflect.ValueOf(data)
if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) {
if reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), enumValue) {
return nil
}
}
}
}
return errorHelp.sErr(errors.EnumFail(b.Path, b.In, data, b.Enum))
}
return nil
}
// A HeaderValidator has very limited subset of validations to apply
type HeaderValidator struct {
name string
header *spec.Header
validators []valueValidator
KnownFormats strfmt.Registry
}
// NewHeaderValidator creates a new header validator object
func NewHeaderValidator(name string, header *spec.Header, formats strfmt.Registry) *HeaderValidator {
p := &HeaderValidator{name: name, header: header, KnownFormats: formats}
p.validators = []valueValidator{
&typeValidator{
Type: spec.StringOrArray([]string{header.Type}),
Nullable: header.Nullable,
Format: header.Format,
In: "header",
Path: name,
},
p.stringValidator(),
p.formatValidator(),
p.numberValidator(),
p.sliceValidator(),
p.commonValidator(),
}
return p
}
// Validate the value of the header against its schema
func (p *HeaderValidator) Validate(data interface{}) *Result {
result := new(Result)
tpe := reflect.TypeOf(data)
kind := tpe.Kind()
for _, validator := range p.validators {
if validator.Applies(p.header, kind) {
if err := validator.Validate(data); err != nil {
result.Merge(err)
if err.HasErrors() {
return result
}
}
}
}
return nil
}
func (p *HeaderValidator) commonValidator() valueValidator {
return &basicCommonValidator{
Path: p.name,
In: "response",
Default: p.header.Default,
Enum: p.header.Enum,
}
}
func (p *HeaderValidator) sliceValidator() valueValidator {
return &basicSliceValidator{
Path: p.name,
In: "response",
Default: p.header.Default,
MaxItems: p.header.MaxItems,
MinItems: p.header.MinItems,
UniqueItems: p.header.UniqueItems,
Items: p.header.Items,
Source: p.header,
KnownFormats: p.KnownFormats,
}
}
func (p *HeaderValidator) numberValidator() valueValidator {
return &numberValidator{
Path: p.name,
In: "response",
Default: p.header.Default,
MultipleOf: p.header.MultipleOf,
Maximum: p.header.Maximum,
ExclusiveMaximum: p.header.ExclusiveMaximum,
Minimum: p.header.Minimum,
ExclusiveMinimum: p.header.ExclusiveMinimum,
Type: p.header.Type,
Format: p.header.Format,
}
}
func (p *HeaderValidator) stringValidator() valueValidator {
return &stringValidator{
Path: p.name,
In: "response",
Default: p.header.Default,
Required: true,
MaxLength: p.header.MaxLength,
MinLength: p.header.MinLength,
Pattern: p.header.Pattern,
AllowEmptyValue: false,
}
}
func (p *HeaderValidator) formatValidator() valueValidator {
return &formatValidator{
Path: p.name,
In: "response",
//Default: p.header.Default,
Format: p.header.Format,
KnownFormats: p.KnownFormats,
}
}
// A ParamValidator has very limited subset of validations to apply
type ParamValidator struct {
param *spec.Parameter
validators []valueValidator
KnownFormats strfmt.Registry
}
// NewParamValidator creates a new param validator object
func NewParamValidator(param *spec.Parameter, formats strfmt.Registry) *ParamValidator {
p := &ParamValidator{param: param, KnownFormats: formats}
p.validators = []valueValidator{
&typeValidator{
Type: spec.StringOrArray([]string{param.Type}),
Nullable: param.Nullable,
Format: param.Format,
In: param.In,
Path: param.Name,
},
p.stringValidator(),
p.formatValidator(),
p.numberValidator(),
p.sliceValidator(),
p.commonValidator(),
}
return p
}
// Validate the data against the description of the parameter
func (p *ParamValidator) Validate(data interface{}) *Result {
result := new(Result)
tpe := reflect.TypeOf(data)
kind := tpe.Kind()
// TODO: validate type
for _, validator := range p.validators {
if validator.Applies(p.param, kind) {
if err := validator.Validate(data); err != nil {
result.Merge(err)
if err.HasErrors() {
return result
}
}
}
}
return nil
}
func (p *ParamValidator) commonValidator() valueValidator {
return &basicCommonValidator{
Path: p.param.Name,
In: p.param.In,
Default: p.param.Default,
Enum: p.param.Enum,
}
}
func (p *ParamValidator) sliceValidator() valueValidator {
return &basicSliceValidator{
Path: p.param.Name,
In: p.param.In,
Default: p.param.Default,
MaxItems: p.param.MaxItems,
MinItems: p.param.MinItems,
UniqueItems: p.param.UniqueItems,
Items: p.param.Items,
Source: p.param,
KnownFormats: p.KnownFormats,
}
}
func (p *ParamValidator) numberValidator() valueValidator {
return &numberValidator{
Path: p.param.Name,
In: p.param.In,
Default: p.param.Default,
MultipleOf: p.param.MultipleOf,
Maximum: p.param.Maximum,
ExclusiveMaximum: p.param.ExclusiveMaximum,
Minimum: p.param.Minimum,
ExclusiveMinimum: p.param.ExclusiveMinimum,
Type: p.param.Type,
Format: p.param.Format,
}
}
func (p *ParamValidator) stringValidator() valueValidator {
return &stringValidator{
Path: p.param.Name,
In: p.param.In,
Default: p.param.Default,
AllowEmptyValue: p.param.AllowEmptyValue,
Required: p.param.Required,
MaxLength: p.param.MaxLength,
MinLength: p.param.MinLength,
Pattern: p.param.Pattern,
}
}
func (p *ParamValidator) formatValidator() valueValidator {
return &formatValidator{
Path: p.param.Name,
In: p.param.In,
//Default: p.param.Default,
Format: p.param.Format,
KnownFormats: p.KnownFormats,
}
}
type basicSliceValidator struct {
Path string
In string
Default interface{}
MaxItems *int64
MinItems *int64
UniqueItems bool
Items *spec.Items
Source interface{}
itemsValidator *itemsValidator
KnownFormats strfmt.Registry
}
func (s *basicSliceValidator) SetPath(path string) {
s.Path = path
}
func (s *basicSliceValidator) Applies(source interface{}, kind reflect.Kind) bool {
switch source.(type) {
case *spec.Parameter, *spec.Items, *spec.Header:
return kind == reflect.Slice
}
return false
}
func (s *basicSliceValidator) Validate(data interface{}) *Result {
val := reflect.ValueOf(data)
size := int64(val.Len())
if s.MinItems != nil {
if err := MinItems(s.Path, s.In, size, *s.MinItems); err != nil {
return errorHelp.sErr(err)
}
}
if s.MaxItems != nil {
if err := MaxItems(s.Path, s.In, size, *s.MaxItems); err != nil {
return errorHelp.sErr(err)
}
}
if s.UniqueItems {
if err := UniqueItems(s.Path, s.In, data); err != nil {
return errorHelp.sErr(err)
}
}
if s.itemsValidator == nil && s.Items != nil {
s.itemsValidator = newItemsValidator(s.Path, s.In, s.Items, s.Source, s.KnownFormats)
}
if s.itemsValidator != nil {
for i := 0; i < int(size); i++ {
ele := val.Index(i)
if err := s.itemsValidator.Validate(i, ele.Interface()); err != nil && err.HasErrors() {
return err
}
}
}
return nil
}
/* unused
func (s *basicSliceValidator) hasDuplicates(value reflect.Value, size int) bool {
dict := make(map[interface{}]struct{})
for i := 0; i < size; i++ {
ele := value.Index(i)
if _, ok := dict[ele.Interface()]; ok {
return true
}
dict[ele.Interface()] = struct{}{}
}
return false
}
*/
type numberValidator struct {
Path string
In string
Default interface{}
MultipleOf *float64
Maximum *float64
ExclusiveMaximum bool
Minimum *float64
ExclusiveMinimum bool
// Allows for more accurate behavior regarding integers
Type string
Format string
}
func (n *numberValidator) SetPath(path string) {
n.Path = path
}
func (n *numberValidator) Applies(source interface{}, kind reflect.Kind) bool {
switch source.(type) {
case *spec.Parameter, *spec.Schema, *spec.Items, *spec.Header:
isInt := kind >= reflect.Int && kind <= reflect.Uint64
isFloat := kind == reflect.Float32 || kind == reflect.Float64
r := isInt || isFloat
debugLog("schema props validator for %q applies %t for %T (kind: %v) isInt=%t, isFloat=%t\n", n.Path, r, source, kind, isInt, isFloat)
return r
}
debugLog("schema props validator for %q applies %t for %T (kind: %v)\n", n.Path, false, source, kind)
return false
}
// Validate provides a validator for generic JSON numbers,
//
// By default, numbers are internally represented as float64.
// Formats float, or float32 may alter this behavior by mapping to float32.
// A special validation process is followed for integers, with optional "format":
// this is an attempt to provide a validation with native types.
//
// NOTE: since the constraint specified (boundary, multipleOf) is unmarshalled
// as float64, loss of information remains possible (e.g. on very large integers).
//
// Since this value directly comes from the unmarshalling, it is not possible
// at this stage of processing to check further and guarantee the correctness of such values.
//
// Normally, the JSON Number.MAX_SAFE_INTEGER (resp. Number.MIN_SAFE_INTEGER)
// would check we do not get such a loss.
//
// If this is the case, replace AddErrors() by AddWarnings() and IsValid() by !HasWarnings().
//
// TODO: consider replacing boundary check errors by simple warnings.
//
// TODO: default boundaries with MAX_SAFE_INTEGER are not checked (specific to json.Number?)
func (n *numberValidator) Validate(val interface{}) *Result {
res := new(Result)
resMultiple := new(Result)
resMinimum := new(Result)
resMaximum := new(Result)
// Used only to attempt to validate constraint on value,
// even though value or constraint specified do not match type and format
data := valueHelp.asFloat64(val)
// Is the provided value within the range of the specified numeric type and format?
res.AddErrors(IsValueValidAgainstRange(val, n.Type, n.Format, "Checked", n.Path))
// nolint: dupl
if n.MultipleOf != nil {
// Is the constraint specifier within the range of the specific numeric type and format?
resMultiple.AddErrors(IsValueValidAgainstRange(*n.MultipleOf, n.Type, n.Format, "MultipleOf", n.Path))
if resMultiple.IsValid() {
// Constraint validated with compatible types
if err := MultipleOfNativeType(n.Path, n.In, val, *n.MultipleOf); err != nil {
resMultiple.Merge(errorHelp.sErr(err))
}
} else {
// Constraint nevertheless validated, converted as general number
if err := MultipleOf(n.Path, n.In, data, *n.MultipleOf); err != nil {
resMultiple.Merge(errorHelp.sErr(err))
}
}
}
// nolint: dupl
if n.Maximum != nil {
// Is the constraint specifier within the range of the specific numeric type and format?
resMaximum.AddErrors(IsValueValidAgainstRange(*n.Maximum, n.Type, n.Format, "Maximum boundary", n.Path))
if resMaximum.IsValid() {
// Constraint validated with compatible types
if err := MaximumNativeType(n.Path, n.In, val, *n.Maximum, n.ExclusiveMaximum); err != nil {
resMaximum.Merge(errorHelp.sErr(err))
}
} else {
// Constraint nevertheless validated, converted as general number
if err := Maximum(n.Path, n.In, data, *n.Maximum, n.ExclusiveMaximum); err != nil {
resMaximum.Merge(errorHelp.sErr(err))
}
}
}
// nolint: dupl
if n.Minimum != nil {
// Is the constraint specifier within the range of the specific numeric type and format?
resMinimum.AddErrors(IsValueValidAgainstRange(*n.Minimum, n.Type, n.Format, "Minimum boundary", n.Path))
if resMinimum.IsValid() {
// Constraint validated with compatible types
if err := MinimumNativeType(n.Path, n.In, val, *n.Minimum, n.ExclusiveMinimum); err != nil {
resMinimum.Merge(errorHelp.sErr(err))
}
} else {
// Constraint nevertheless validated, converted as general number
if err := Minimum(n.Path, n.In, data, *n.Minimum, n.ExclusiveMinimum); err != nil {
resMinimum.Merge(errorHelp.sErr(err))
}
}<|fim▁hole|>}
type stringValidator struct {
Default interface{}
Required bool
AllowEmptyValue bool
MaxLength *int64
MinLength *int64
Pattern string
Path string
In string
}
func (s *stringValidator) SetPath(path string) {
s.Path = path
}
func (s *stringValidator) Applies(source interface{}, kind reflect.Kind) bool {
switch source.(type) {
case *spec.Parameter, *spec.Schema, *spec.Items, *spec.Header:
r := kind == reflect.String
debugLog("string validator for %q applies %t for %T (kind: %v)\n", s.Path, r, source, kind)
return r
}
debugLog("string validator for %q applies %t for %T (kind: %v)\n", s.Path, false, source, kind)
return false
}
func (s *stringValidator) Validate(val interface{}) *Result {
data, ok := val.(string)
if !ok {
return errorHelp.sErr(errors.InvalidType(s.Path, s.In, stringType, val))
}
if s.Required && !s.AllowEmptyValue && (s.Default == nil || s.Default == "") {
if err := RequiredString(s.Path, s.In, data); err != nil {
return errorHelp.sErr(err)
}
}
if s.MaxLength != nil {
if err := MaxLength(s.Path, s.In, data, *s.MaxLength); err != nil {
return errorHelp.sErr(err)
}
}
if s.MinLength != nil {
if err := MinLength(s.Path, s.In, data, *s.MinLength); err != nil {
return errorHelp.sErr(err)
}
}
if s.Pattern != "" {
if err := Pattern(s.Path, s.In, data, s.Pattern); err != nil {
return errorHelp.sErr(err)
}
}
return nil
}<|fim▁end|> | }
res.Merge(resMultiple, resMinimum, resMaximum)
res.Inc()
return res |
<|file_name|>EqualTreePartition.py<|end_file_name|><|fim▁begin|>__source__ = 'https://leetcode.com/problems/equal-tree-partition/discuss/'
# Time: O()
# Space: O()
#
# Description: Leetcode # 663. Equal Tree Partition
#
# Given a binary tree with n nodes,
# your task is to check if it's possible to partition the tree to two trees
# which have the equal sum of values after removing exactly one edge on the original tree.
#
# Example 1:
# Input:
# 5
# / \
# 10 10
# / \
# 2 3
#
# Output: True
# Explanation:
# 5
# /
# 10
#
# Sum: 15
#
# 10
# / \
# 2 3
#
# Sum: 15
# Example 2:
# Input:
# 1
# / \
# 2 10
# / \
# 2 20
#
# Output: False
# Explanation: You can't split the tree into two trees with equal sum after removing exactly one edge on the tree.
# Note:
# The range of tree node value is in the range of [-100000, 100000].
# 1 <= n <= 10000
#
# Companies
# Amazon
# Related Topics
# Tree
#
import unittest
class Solution(object):
pass # your function here
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/equal-tree-partition/solution/
/**
* Definition for a binary tree node.
* public class TreeNode {
* int val;
* TreeNode left;
* TreeNode right;
* TreeNode(int x) { val = x; }
* }
*/
# 7ms 98.08%
class class Solution {
int judge=0;
public boolean checkEqualTree(TreeNode root) {
if(root==null)
return false;
int s=sum(root);
if(s%2!=0)
return false;
check(root.left,s/2);
check(root.right,s/2);
if(judge==1)
return true;
return false;
}
private int sum(TreeNode root){
if(root==null)
return 0;
return root.val+sum(root.left)+sum(root.right);
}
private int check(TreeNode root,int half){
if(root==null)
return 0;
int s=root.val+check(root.left,half)+check(root.right,half);
if(s==half)
judge=1;
return s;
}
}
<|fim▁hole|># hash map
# 14ms 31.49%
class Solution {
public boolean checkEqualTree(TreeNode root) {
Map<Integer, Integer> map = new HashMap<Integer, Integer>();
int sum = getsum(root, map);
if(sum == 0)return map.getOrDefault(sum, 0) > 1;
return sum%2 == 0 && map.containsKey(sum/2);
}
public int getsum(TreeNode root, Map<Integer, Integer> map ){
if(root == null)return 0;
int cur = root.val + getsum(root.left, map) + getsum(root.right, map);
map.put(cur, map.getOrDefault(cur,0) + 1);
return cur;
}
}
'''<|fim▁end|> | |
<|file_name|>data.rs<|end_file_name|><|fim▁begin|>use std::io::TcpStream;
pub trait ServerData
{<|fim▁hole|><|fim▁end|> | fn process_request_data(&mut self, request: TcpStream);
} |
<|file_name|>datastructs.py<|end_file_name|><|fim▁begin|>"""
.. module:: mlpy.auxiliary.datastructs
:platform: Unix, Windows
:synopsis: Provides data structure implementations.
.. moduleauthor:: Astrid Jackson <[email protected]>
"""
from __future__ import division, print_function, absolute_import
import heapq
import numpy as np
from abc import ABCMeta, abstractmethod
class Array(object):
"""The managed array class.
The managed array class pre-allocates memory to the given size
automatically resizing as needed.
Parameters
----------
size : int
The size of the array.
Examples
--------
>>> a = Array(5)
>>> a[0] = 3
>>> a[1] = 6
Retrieving an elements:
>>> a[0]
3
>>> a[2]
0
Finding the length of the array:
>>> len(a)
2
"""
def __init__(self, size):
self._data = np.zeros((size,))
self._capacity = size
self._size = 0
def __setitem__(self, index, value):
"""Set the the array at the index to the given value.
Parameters
----------
index : int
The index into the array.
value :
The value to set the array to.
"""
if index >= self._size:
if self._size == self._capacity:
self._capacity *= 2
new_data = np.zeros((self._capacity,))
new_data[:self._size] = self._data
self._data = new_data
self._size += 1
self._data[index] = value
def __getitem__(self, index):
"""Get the value at the given index.
Parameters
----------
index : int
The index into the array.
"""
return self._data[index]
def __len__(self):
"""The length of the array.
Returns
-------<|fim▁hole|> """
return self._size
class Point2D(object):
"""The 2d-point class.
The 2d-point class is a container for positions
in a 2d-coordinate system.
Parameters
----------
x : float, optional
The x-position in a 2d-coordinate system. Default is 0.0.
y : float, optional
The y-position in a 2d-coordinate system. Default is 0.0.
Attributes
----------
x : float
The x-position in a 2d-coordinate system.
y : float
The y-position in a 2d-coordinate system.
"""
__slots__ = ['x', 'y']
def __init__(self, x=0.0, y=0.0):
self.x = x
self.y = y
class Point3D(object):
"""
The 3d-point class.
The 3d-point class is a container for positions
in a 3d-coordinate system.
Parameters
----------
x : float, optional
The x-position in a 2d-coordinate system. Default is 0.0.
y : float, optional
The y-position in a 2d-coordinate system. Default is 0.0.
z : float, optional
The z-position in a 3d-coordinate system. Default is 0.0.
Attributes
----------
x : float
The x-position in a 2d-coordinate system.
y : float
The y-position in a 2d-coordinate system.
z : float
The z-position in a 3d-coordinate system.
"""
__slots__ = ['x', 'y', 'z']
def __init__(self, x=0.0, y=0.0, z=0.0):
self.x = x
self.y = y
self.z = z
class Vector3D(Point3D):
"""The 3d-vector class.
.. todo::
Implement vector functionality.
Parameters
----------
x : float, optional
The x-position in a 2d-coordinate system. Default is 0.0.
y : float, optional
The y-position in a 2d-coordinate system. Default is 0.0.
z : float, optional
The z-position in a 3d-coordinate system. Default is 0.0.
Attributes
----------
x : float
The x-position in a 2d-coordinate system.
y : float
The y-position in a 2d-coordinate system.
z : float
The z-position in a 3d-coordinate system.
"""
def __init__(self, x=0.0, y=0.0, z=0.0):
super(Vector3D, self).__init__(x, y, z)
class Queue(object):
"""The abstract queue base class.
The queue class handles core functionality common for
any type of queue. All queues inherit from the queue
base class.
See Also
--------
:class:`FIFOQueue`, :class:`PriorityQueue`
"""
__metaclass__ = ABCMeta
def __init__(self):
self._queue = []
def __len__(self):
return len(self._queue)
def __contains__(self, item):
try:
self._queue.index(item)
return True
except Exception:
return False
def __iter__(self):
return iter(self._queue)
def __str__(self):
return '[' + ', '.join('{}'.format(el) for el in self._queue) + ']'
def __repr__(self):
return ', '.join('{}'.format(el) for el in self._queue)
@abstractmethod
def push(self, item):
"""Push a new element on the queue
Parameters
----------
item :
The element to push on the queue
"""
raise NotImplementedError
@abstractmethod
def pop(self):
"""Pop an element from the queue."""
raise NotImplementedError
def empty(self):
"""Check if the queue is empty.
Returns
-------
bool :
Whether the queue is empty.
"""
return len(self._queue) <= 0
def extend(self, items):
"""Extend the queue by a number of elements.
Parameters
----------
items : list
A list of items.
"""
for item in items:
self.push(item)
def get(self, item):
"""Return the element in the queue identical to `item`.
Parameters
----------
item :
The element to search for.
Returns
-------
The element in the queue identical to `item`. If the element
was not found, None is returned.
"""
try:
index = self._queue.index(item)
return self._queue[index]
except Exception:
return None
def remove(self, item):
"""Remove an element from the queue.
Parameters
----------
item :
The element to remove.
"""
self._queue.remove(item)
class FIFOQueue(Queue):
"""The first-in-first-out (FIFO) queue.
In a FIFO queue the first element added to the queue
is the first element to be removed.
Examples
--------
>>> q = FIFOQueue()
>>> q.push(5)
>>> q.extend([1, 3, 7])
>>> print q
[5, 1, 3, 7]
Retrieving an element:
>>> q.pop()
5
Removing an element:
>>> q.remove(3)
>>> print q
[1, 7]
Get the element in the queue identical to the given item:
>>> q.get(7)
7
Check if the queue is empty:
>>> q.empty()
False
Loop over the elements in the queue:
>>> for x in q:
>>> print x
1
7
Check if an element is in the queue:
>>> if 7 in q:
>>> print "yes"
yes
See Also
--------
:class:`PriorityQueue`
"""
def __init__(self):
super(FIFOQueue, self).__init__()
def push(self, item):
"""Push an element to the end of the queue.
Parameters
----------
item :
The element to append.
"""
self._queue.append(item)
def pop(self):
"""Return the element at the front of the queue.
Returns
-------
The first element in the queue.
"""
return self._queue.pop(0)
def extend(self, items):
"""Append a list of elements at the end of the queue.
Parameters
----------
items : list
List of elements.
"""
self._queue.extend(items)
class PriorityQueue(Queue):
"""
The priority queue.
In a priority queue each element has a priority associated with it. An element
with high priority (i.e., smallest value) is served before an element with low priority
(i.e., largest value). The priority queue is implemented with a heap.
Parameters
----------
func : callable
A callback function handling the priority. By default the priority
is the value of the element.
Examples
--------
>>> q = PriorityQueue()
>>> q.push(5)
>>> q.extend([1, 3, 7])
>>> print q
[(1,1), (5,5), (3,3), (7,7)]
Retrieving the element with highest priority:
>>> q.pop()
1
Removing an element:
>>> q.remove((3, 3))
>>> print q
[(5,5), (7,7)]
Get the element in the queue identical to the given item:
>>> q.get(7)
7
Check if the queue is empty:
>>> q.empty()
False
Loop over the elements in the queue:
>>> for x in q:
>>> print x
(5, 5)
(7, 7)
Check if an element is in the queue:
>>> if 7 in q:
>>> print "yes"
yes
See Also
--------
:class:`FIFOQueue`
"""
def __init__(self, func=lambda x: x):
super(PriorityQueue, self).__init__()
self.func = func
def __contains__(self, item):
for _, element in self._queue:
if item == element:
return True
return False
def __str__(self):
return '[' + ', '.join('({},{})'.format(*el) for el in self._queue) + ']'
def push(self, item):
"""Push an element on the priority queue.
The element is pushed on the priority queue according
to its priority.
Parameters
----------
item :
The element to push on the queue.
"""
heapq.heappush(self._queue, (self.func(item), item))
def pop(self):
"""Get the element with the highest priority.
Get the element with the highest priority (i.e., smallest value).
Returns
-------
The element with the highest priority.
"""
return heapq.heappop(self._queue)[1]
def get(self, item):
"""Return the element in the queue identical to `item`.
Parameters
----------
item :
The element to search for.
Returns
-------
The element in the queue identical to `item`. If the element
was not found, None is returned.
"""
for _, element in self._queue:
if item == element:
return element
return None
def remove(self, item):
"""Remove an element from the queue.
Parameters
----------
item :
The element to remove.
"""
super(PriorityQueue, self).remove(item)
heapq.heapify(self._queue)<|fim▁end|> | int :
The size of the array
|
<|file_name|>usuarios.js<|end_file_name|><|fim▁begin|>module.exports = function(app){
var validacao = require('../validacoes/usuarios');
var Usuario = app.models.usuarios;
var UsuarioController = {
index: function(req,res){
Usuario.find(function(err,dados){
if(err){
req.flash('erro', 'Erro ao buscar usuários: '+err);
res.redirect('/usuarios');
}else{
res.render('usuarios/index', {lista: dados});
}
});
},
create: function(req,res){<|fim▁hole|> res.render('usuarios/create', {user: new Usuario()});
},
post: function(req,res){
if(validacao(req,res)){
var model = new Usuario();
model.nome = req.body.nome;
model.email = req.body.email;
model.telefone = req.body.telefone;
model.password = model.generateHash(req.body.password);
Usuario.findOne({'email': model.email}, function(err,data){
if(data){
req.flash('erro', 'E-mail encontra-se cadastrado, tente outro.');
res.render('usuarios/create', {user: model});
}else{
model.save(function(err){
if(err){
req.flash('erro', 'Erro ao cadastrar: '+err);
res.render('usuarios/create', {user: req.body});
}else{
req.flash('info', 'Registro cadastrado com sucesso!');
res.redirect('/usuarios');
}
});
}
});
}else{
res.render('usuarios/create', {user: req.body});
}
},
show: function(req,res){
Usuario.findById(req.params.id, function(err,dados){
if(err){
req.flash('erro', 'Erro ao visualizar usuário: '+err);
res.redirect('/usuarios');
}else{
res.render('usuarios/show', {dados: dados});
}
});
},
delete: function(req,res){
Usuario.remove({_id: req.params.id}, function(err){
if(err){
req.flash('erro', 'Erro ao excluir usuário: '+err);
res.redirect('/usuarios');
}else{
req.flash('info', 'Registro excluído com sucesso!');
res.redirect('/usuarios');
}
});
},
edit: function(req,res){
Usuario.findById(req.params.id, function(err,data){
if(err){
req.flash('erro', 'Erro ao editar: '+err);
res.redirect('/usuarios');
}else{
res.render('usuarios/edit', {dados: data});
}
});
},
update: function(req,res){
if(validacao(req,res)){
Usuario.findById(req.params.id, function(err,data){
var model = data;
model.nome = req.body.nome;
model.telefone = req.body.telefone;
model.save(function(err){
if(err){
req.flash('erro', 'Erro ao editar: '+err);
res.render('usuarios/edit', {dados: model});
}else{
req.flash('info', 'Registro atualizado com sucesso!');
res.redirect('/usuarios');
}
});
});
}else{
res.render('usuarios/edit', {user: req.body});
}
}
}
return UsuarioController;
}<|fim▁end|> | |
<|file_name|>_application_gateways_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
<|fim▁hole|> from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ApplicationGatewaysOperations(object):
"""ApplicationGatewaysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGateway"
"""Gets the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.ApplicationGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
parameters, # type: "_models.ApplicationGateway"
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ApplicationGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
parameters, # type: "_models.ApplicationGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ApplicationGateway"]
"""Creates or updates the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param parameters: Parameters supplied to the create or update application gateway operation.
:type parameters: ~azure.mgmt.network.v2019_08_01.models.ApplicationGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_08_01.models.ApplicationGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ApplicationGateway"]
"""Updates the specified application gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param parameters: Parameters supplied to update application gateway tags.
:type parameters: ~azure.mgmt.network.v2019_08_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_08_01.models.ApplicationGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ApplicationGatewayListResult"]
"""Lists all application gateways in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ApplicationGatewayListResult"]
"""Gets all the application gateways in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGateways'} # type: ignore
def _start_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
# Construct URL
url = self._start_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start'} # type: ignore
def begin_start(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Starts the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._start_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start'} # type: ignore
def _stop_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
# Construct URL
url = self._stop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop'} # type: ignore
def begin_stop(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Stops the specified application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._stop_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop'} # type: ignore
def _backend_health_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ApplicationGatewayBackendHealth"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ApplicationGatewayBackendHealth"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self._backend_health_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_backend_health_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth'} # type: ignore
def begin_backend_health(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ApplicationGatewayBackendHealth"]
"""Gets the backend health of the specified application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param expand: Expands BackendAddressPool and BackendHttpSettings referenced in backend health.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationGatewayBackendHealth or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayBackendHealth]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayBackendHealth"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._backend_health_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
expand=expand,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_backend_health.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth'} # type: ignore
def _backend_health_on_demand_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
probe_request, # type: "_models.ApplicationGatewayOnDemandProbe"
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ApplicationGatewayBackendHealthOnDemand"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ApplicationGatewayBackendHealthOnDemand"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._backend_health_on_demand_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(probe_request, 'ApplicationGatewayOnDemandProbe')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGatewayBackendHealthOnDemand', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_backend_health_on_demand_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/getBackendHealthOnDemand'} # type: ignore
def begin_backend_health_on_demand(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
probe_request, # type: "_models.ApplicationGatewayOnDemandProbe"
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ApplicationGatewayBackendHealthOnDemand"]
"""Gets the backend health for given combination of backend pool and http setting of the specified
application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param probe_request: Request body for on-demand test probe operation.
:type probe_request: ~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayOnDemandProbe
:param expand: Expands BackendAddressPool and BackendHttpSettings referenced in backend health.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationGatewayBackendHealthOnDemand or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayBackendHealthOnDemand]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayBackendHealthOnDemand"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._backend_health_on_demand_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
probe_request=probe_request,
expand=expand,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayBackendHealthOnDemand', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_backend_health_on_demand.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/getBackendHealthOnDemand'} # type: ignore
def list_available_server_variables(
self,
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Lists all available server variables.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.list_available_server_variables.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_server_variables.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableServerVariables'} # type: ignore
def list_available_request_headers(
self,
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Lists all available request headers.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.list_available_request_headers.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_request_headers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableRequestHeaders'} # type: ignore
def list_available_response_headers(
self,
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Lists all available response headers.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.list_available_response_headers.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_response_headers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableResponseHeaders'} # type: ignore
def list_available_waf_rule_sets(
self,
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGatewayAvailableWafRuleSetsResult"
"""Lists all available web application firewall rule sets.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewayAvailableWafRuleSetsResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayAvailableWafRuleSetsResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayAvailableWafRuleSetsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.list_available_waf_rule_sets.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewayAvailableWafRuleSetsResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_waf_rule_sets.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableWafRuleSets'} # type: ignore
def list_available_ssl_options(
self,
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGatewayAvailableSslOptions"
"""Lists available Ssl options for configuring Ssl policy.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewayAvailableSslOptions, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayAvailableSslOptions
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayAvailableSslOptions"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.list_available_ssl_options.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewayAvailableSslOptions', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_ssl_options.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default'} # type: ignore
def list_available_ssl_predefined_policies(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ApplicationGatewayAvailableSslPredefinedPolicies"]
"""Lists all SSL predefined policies for configuring Ssl policy.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayAvailableSslPredefinedPolicies or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayAvailableSslPredefinedPolicies]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayAvailableSslPredefinedPolicies"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_available_ssl_predefined_policies.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayAvailableSslPredefinedPolicies', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_available_ssl_predefined_policies.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies'} # type: ignore
def get_ssl_predefined_policy(
self,
predefined_policy_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGatewaySslPredefinedPolicy"
"""Gets Ssl predefined policy with the specified policy name.
:param predefined_policy_name: Name of Ssl predefined policy.
:type predefined_policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewaySslPredefinedPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.ApplicationGatewaySslPredefinedPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewaySslPredefinedPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.get_ssl_predefined_policy.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'predefinedPolicyName': self._serialize.url("predefined_policy_name", predefined_policy_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewaySslPredefinedPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_ssl_predefined_policy.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies/{predefinedPolicyName}'} # type: ignore<|fim▁end|> | from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports |
<|file_name|>test_persons.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from journal.tests.factories import StudentFactory
class StudentTestCase(TestCase):
"""Tests for the Student models"""
<|fim▁hole|> def test_student(self):
"""Test to ensure that Students can be created properly"""
student = StudentFactory.build()
self.assertEqual(student.personal_code, '123456')<|fim▁end|> | |
<|file_name|>pokemon-details.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import { RouteSegment } from '@angular/router';
import { SharedService } from '../../app/shared';
import { PokemonDetailsService } from './service/pokemon-details.service';
import { KeyValues} from '../../app/shared';
@Component({
moduleId: module.id,
selector: 'app-pokemon-details',
templateUrl: 'pokemon-details.component.html',<|fim▁hole|> pipes: [KeyValues]
})
export class PokemonDetailsComponent implements OnInit {
pokemonDetails = {};
private _routeSegmentParam;
constructor(private _segment:RouteSegment,
private _pokemonDetailsService:PokemonDetailsService,
public _sharedService: SharedService) {}
ngOnInit() {
this._routeSegmentParam = this._segment.parameters;
this._pokemonDetailsService.getPokemonDetails(this._routeSegmentParam.id)
.subscribe((pokemonDetails:any) => {
this.pokemonDetails = pokemonDetails;
this._sharedService.addPokemonImage(this.pokemonDetails, pokemonDetails.id-1);
console.log(this.pokemonDetails);
});
}
}<|fim▁end|> | styleUrls: ['pokemon-details.component.css'],
providers: [PokemonDetailsService], |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from .views import (
semseterResultxlsx,
)
urlpatterns=[
url(r'^semester-xlsx/(?P<collegeCode>\d+)/(?P<branchCode>\d+)/(?P<yearOfJoining>\d+)/(?P<semester>\d+)/$',semseterResultxlsx,name='semseterResultxlsx')
]<|fim▁end|> | from django.conf.urls import url |
<|file_name|>checks.go<|end_file_name|><|fim▁begin|>package core
import (
"fmt"
"gopkg.in/Knetic/govaluate.v2"
)
func (o *Eye) registerValidateChecks() {
for _, item := range o.config.Validate {<|fim▁hole|> serviceName, item.Request)
}
} else if len(item.Services) > 0 {
o.registerValidateCheck(item.Name,
item.Services[0], item.Request)
} else {
Log.Info("No service defined for the check %v", item.Name)
}
}
}
func (o *Eye) registerValidateCheck(checkName string, serviceName string, request *ValidationRequest) {
check, err := o.buildCheck(serviceName, request)
if err == nil {
o.checks[checkName] = check
} else {
Log.Info("Can't build check '%v' because of '%v'", checkName, err)
}
}
func (o *Eye) registerMultiPing() {
for _, item := range o.config.PingAll {
o.checks[item.Name] = &MultiPing{check: item, validator: o.PingAll}
}
for _, item := range o.config.PingAny {
o.checks[item.Name] = &MultiPing{check: item, validator: o.PingAny}
}
}
func (o *Eye) registerMultiValidates() {
for _, item := range o.config.ValidateAll {
o.checks[item.Name] = &MultiValidate{check: item, validator: o.ValidateAll}
}
for _, item := range o.config.ValidateAny {
o.checks[item.Name] = &MultiValidate{check: item, validator: o.ValidateAny}
}
for _, item := range o.config.ValidateRunning {
o.checks[item.Name] = &MultiValidate{check: item, validator: o.ValidateRunning}
}
}
func (o *Eye) registerCompares() {
for _, item := range o.config.CompareAll {
if check, err := o.buildCompareCheck(item.Name, item.Services, false, item.Request); err == nil {
o.checks[item.Name] = check
} else {
item.logBuildCheckNotPossible(err)
}
}
for _, item := range o.config.CompareRunning {
if check, err := o.buildCompareCheck(item.Name, item.Services, true, item.Request); err == nil {
o.checks[item.Name] = check
} else {
item.logBuildCheckNotPossible(err)
}
}
}
func (o *Eye) getOrBuildCompareCheck(checkKey string, serviceNames []string, onlyRunning bool, req *ValidationRequest) (check Check, err error) {
var value interface{}
if value, err = o.liveChecks.GetOrBuild(checkKey, func() (interface{}, error) {
return o.buildCompareCheck(checkKey, serviceNames, onlyRunning, req)
}); err == nil {
check = value.(Check)
}
return
}
func (o *Eye) buildCompareCheck(checkKey string, serviceNames []string, onlyRunning bool, req *ValidationRequest) (ret Check, err error) {
var eval *govaluate.EvaluableExpression
if eval, err = compileEval(req.EvalExpr); err != nil {
return
}
queries := make([]Query, len(serviceNames))
var serviceQuery Check
for i, serviceName := range serviceNames {
serviceQuery, err = o.buildCheck(serviceName, req)
if err != nil {
break
}
queries[i] = serviceQuery
}
if err == nil {
ret = &MultiCheck{info: checkKey, queries: queries, eval: eval, onlyRunning: onlyRunning}
}
return
}
func (o *Eye) query(serviceName string, req *ValidationRequest) (data QueryResults, err error) {
var buildCheck Check
buildCheck, err = o.buildCheck(serviceName, req)
if err == nil {
data, err = buildCheck.Query()
}
return
}
func (o *Eye) buildCheck(serviceName string, req *ValidationRequest) (ret Check, err error) {
var value interface{}
value, err = o.liveChecks.GetOrBuild(req.CheckKey(serviceName), func() (interface{}, error) {
service, err := o.serviceFactory.Find(serviceName)
if err == nil {
return service.NewСheck(req)
} else {
return nil, err
}
})
if err == nil {
ret = value.(Check)
}
return
}<|fim▁end|> | if len(item.Services) > 1 {
for _, serviceName := range item.Services {
o.registerValidateCheck(fmt.Sprintf("%v-%v", serviceName, item.Name), |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.