prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>defaults.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate native;
extern crate glfw;
use glfw::Context;
#[start]
fn start(argc: int, argv: **u8) -> int {
native::start(argc, argv, main)
}
fn main() {
let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::Visible(true));
let (window, _) = glfw.create_window(640, 480, "Defaults", glfw::Windowed)
.expect("Failed to create GLFW window.");
window.make_current();
let (width, height) = window.get_size();
println!("window size: ({}, {})", width, height);
println!("Context version: {:s}", window.get_context_version().to_str());
println!("OpenGL forward compatible: {}", window.is_opengl_forward_compat());
println!("OpenGL debug context: {}", window.is_opengl_debug_context());
println!("OpenGL profile: {}", window.get_opengl_profile());
let gl_params = [
(gl::RED_BITS, None, "red bits" ),
(gl::GREEN_BITS, None, "green bits" ),
(gl::BLUE_BITS, None, "blue bits" ),
(gl::ALPHA_BITS, None, "alpha bits" ),
(gl::DEPTH_BITS, None, "depth bits" ),
(gl::STENCIL_BITS, None, "stencil bits" ),
(gl::ACCUM_RED_BITS, None, "accum red bits" ),
(gl::ACCUM_GREEN_BITS, None, "accum green bits" ),
(gl::ACCUM_BLUE_BITS, None, "accum blue bits" ),
(gl::ACCUM_ALPHA_BITS, None, "accum alpha bits" ),
(gl::STEREO, None, "stereo" ),
(gl::SAMPLES_ARB, Some("GL_ARB_multisample"), "FSAA samples" ),
];
for &(param, ext, name) in gl_params.iter() {
if ext.map_or(true, |s| {
glfw.extension_supported(s)
}) {
let value = 0;
unsafe { gl::GetIntegerv(param, &value) };
println!("OpenGL {:s}: {}", name, value);
};
}<|fim▁hole|>mod gl {
extern crate libc;
#[cfg(target_os = "macos")]
#[link(name="OpenGL", kind="framework")]
extern { }
#[cfg(target_os = "linux")]
#[link(name="GL")]
extern { }
pub type GLenum = libc::c_uint;
pub type GLint = libc::c_int;
pub static RED_BITS : GLenum = 0x0D52;
pub static GREEN_BITS : GLenum = 0x0D53;
pub static BLUE_BITS : GLenum = 0x0D54;
pub static ALPHA_BITS : GLenum = 0x0D55;
pub static DEPTH_BITS : GLenum = 0x0D56;
pub static STENCIL_BITS : GLenum = 0x0D57;
pub static ACCUM_RED_BITS : GLenum = 0x0D58;
pub static ACCUM_GREEN_BITS : GLenum = 0x0D59;
pub static ACCUM_BLUE_BITS : GLenum = 0x0D5A;
pub static ACCUM_ALPHA_BITS : GLenum = 0x0D5B;
pub static STEREO : GLenum = 0x0C33;
pub static SAMPLES_ARB : GLenum = 0x80A9;
#[inline(never)]
pub unsafe fn GetIntegerv(pname: GLenum, params: *GLint) {
glGetIntegerv(pname, params)
}
extern "C" {
fn glGetIntegerv(pname: GLenum, params: *GLint);
}
}<|fim▁end|> | }
|
<|file_name|>shortcuts.py<|end_file_name|><|fim▁begin|># Copyright (c) 2008 Mikeal Rogers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from urlparse import urljoin
from django.http import HttpResponse
from django.template import Context
from edxmako import lookup_template
from edxmako.request_context import get_template_request_context
from django.conf import settings
from django.core.urlresolvers import reverse
from openedx.core.djangoapps.theming.helpers import get_template_path, is_request_in_themed_site
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
log = logging.getLogger(__name__)
def marketing_link(name):
"""Returns the correct URL for a link to the marketing site
depending on if the marketing site is enabled
Since the marketing site is enabled by a setting, we have two
possible URLs for certain links. This function is to decides
which URL should be provided.
"""
# link_map maps URLs from the marketing site to the old equivalent on
# the Django site
link_map = settings.MKTG_URL_LINK_MAP
enable_mktg_site = configuration_helpers.get_value(
'ENABLE_MKTG_SITE',
settings.FEATURES.get('ENABLE_MKTG_SITE', False)
)
marketing_urls = configuration_helpers.get_value(
'MKTG_URLS',
settings.MKTG_URLS
)
if enable_mktg_site and name in marketing_urls:
# special case for when we only want the root marketing URL
if name == 'ROOT':
return marketing_urls.get('ROOT')
# Using urljoin here allows us to enable a marketing site and set
# a site ROOT, but still specify absolute URLs for other marketing
# URLs in the MKTG_URLS setting
# e.g. urljoin('http://marketing.com', 'http://open-edx.org/about') >>> 'http://open-edx.org/about'
return urljoin(marketing_urls.get('ROOT'), marketing_urls.get(name))
# only link to the old pages when the marketing site isn't on
elif not enable_mktg_site and name in link_map:
# don't try to reverse disabled marketing links
if link_map[name] is not None:
return reverse(link_map[name])
else:
log.debug("Cannot find corresponding link for name: %s", name)
return '#'
def is_any_marketing_link_set(names):
"""
Returns a boolean if any given named marketing links are configured.
"""
return any(is_marketing_link_set(name) for name in names)
def is_marketing_link_set(name):
"""
Returns a boolean if a given named marketing link is configured.
"""
enable_mktg_site = configuration_helpers.get_value(
'ENABLE_MKTG_SITE',
settings.FEATURES.get('ENABLE_MKTG_SITE', False)
)
marketing_urls = configuration_helpers.get_value(
'MKTG_URLS',
settings.MKTG_URLS
)
if enable_mktg_site:
return name in marketing_urls
else:
return name in settings.MKTG_URL_LINK_MAP
def marketing_link_context_processor(request):
"""
A django context processor to give templates access to marketing URLs
Returns a dict whose keys are the marketing link names usable with the
marketing_link method (e.g. 'ROOT', 'CONTACT', etc.) prefixed with
'MKTG_URL_' and whose values are the corresponding URLs as computed by the
marketing_link method.
"""
marketing_urls = configuration_helpers.get_value(
'MKTG_URLS',
settings.MKTG_URLS
)
return dict(
[
("MKTG_URL_" + k, marketing_link(k))
for k in (
settings.MKTG_URL_LINK_MAP.viewkeys() |
marketing_urls.viewkeys()
)
]
)
def footer_context_processor(request): # pylint: disable=unused-argument
"""
Checks the site name to determine whether to use the edX.org footer or the Open Source Footer.
"""
return dict(
[
("IS_REQUEST_IN_MICROSITE", is_request_in_themed_site())
]
)
def render_to_string(template_name, dictionary, context=None, namespace='main', request=None):
"""
Render a Mako template to as a string.
The following values are available to all templates:
settings: the django settings object
EDX_ROOT_URL: settings.EDX_ROOT_URL
marketing_link: The :func:`marketing_link` function
is_any_marketing_link_set: The :func:`is_any_marketing_link_set` function
is_marketing_link_set: The :func:`is_marketing_link_set` function
Arguments:
template_name: The name of the template to render. Will be loaded
from the template paths specified in configuration.
dictionary: A dictionary of variables to insert into the template during
rendering.
context: A :class:`~django.template.Context` with values to make
available to the template.
namespace: The Mako namespace to find the named template in.
request: The request to use to construct the RequestContext for rendering
this template. If not supplied, the current request will be used.
"""
template_name = get_template_path(template_name)
context_instance = Context(dictionary)
# add dictionary to context_instance
context_instance.update(dictionary or {})
# collapse context_instance to a single dictionary for mako
context_dictionary = {}
context_instance['settings'] = settings
context_instance['EDX_ROOT_URL'] = settings.EDX_ROOT_URL
context_instance['marketing_link'] = marketing_link
context_instance['is_any_marketing_link_set'] = is_any_marketing_link_set
context_instance['is_marketing_link_set'] = is_marketing_link_set
# In various testing contexts, there might not be a current request context.
request_context = get_template_request_context(request)
if request_context:
for item in request_context:
context_dictionary.update(item)
for item in context_instance:
context_dictionary.update(item)
if context:
context_dictionary.update(context)
# "Fix" CSRF token by evaluating the lazy object
KEY_CSRF_TOKENS = ('csrf_token', 'csrf')
for key in KEY_CSRF_TOKENS:
if key in context_dictionary:
context_dictionary[key] = unicode(context_dictionary[key])
# fetch and render template
template = lookup_template(namespace, template_name)
return template.render_unicode(**context_dictionary)
def render_to_response(template_name, dictionary=None, context_instance=None, namespace='main', request=None, **kwargs):
"""
Returns a HttpResponse whose content is filled with the result of calling
lookup.get_template(args[0]).render with the passed arguments.
"""
dictionary = dictionary or {}
return HttpResponse(render_to_string(template_name, dictionary, context_instance, namespace, request), **kwargs)<|fim▁end|> | #
# Unless required by applicable law or agreed to in writing, software |
<|file_name|>ewk_history.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2009-2010 ProFUSION embedded systems
Copyright (C) 2009-2010 Samsung Electronics
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public License
along with this library; see the file COPYING.LIB. If not, write to
the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
*/
#include "config.h"
#include "ewk_history.h"
#include "BackForwardList.h"
#include "CairoUtilitiesEfl.h"
#include "HistoryItem.h"
#include "IconDatabaseBase.h"
#include "Image.h"
#include "IntSize.h"
#include "Page.h"
#include "PageGroup.h"
#include "ewk_history_private.h"
#include "ewk_private.h"
#include <Eina.h>
#include <eina_safety_checks.h>
#include <wtf/text/CString.h>
struct _Ewk_History {
WebCore::BackForwardList* core;
};
#define EWK_HISTORY_CORE_GET_OR_RETURN(history, core_, ...) \
if (!(history)) { \
CRITICAL("history is NULL."); \
return __VA_ARGS__; \
} \
if (!(history)->core) { \
CRITICAL("history->core is NULL."); \
return __VA_ARGS__; \
} \
if (!(history)->core->enabled()) { \
ERR("history->core is disabled!."); \
return __VA_ARGS__; \
} \
WebCore::BackForwardList* core_ = (history)->core
struct _Ewk_History_Item {
WebCore::HistoryItem* core;
const char* title;
const char* alternateTitle;
const char* uri;
const char* originalUri;
};
#define EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core_, ...) \
if (!(item)) { \
CRITICAL("item is NULL."); \
return __VA_ARGS__; \
} \
if (!(item)->core) { \
CRITICAL("item->core is NULL."); \
return __VA_ARGS__; \
} \
WebCore::HistoryItem* core_ = (item)->core
static inline Eina_List* _ewk_history_item_list_get(const WebCore::HistoryItemVector& coreItems)
{<|fim▁hole|> for (unsigned int i = 0; i < size; i++) {
Ewk_History_Item* item = ewk_history_item_new_from_core(coreItems[i].get());
if (item)
result = eina_list_append(result, item);
}
return result;
}
Eina_Bool ewk_history_clear(Ewk_History* history)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, false);
WebCore::Page* page = core->page();
if (page && page->groupPtr())
page->groupPtr()->removeVisitedLinks();
const int limit = ewk_history_limit_get(history);
ewk_history_limit_set(history, 0);
ewk_history_limit_set(history, limit);
return true;
}
Eina_Bool ewk_history_forward(Ewk_History* history)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, false);
if (core->forwardListCount() < 1)
return false;
core->goForward();
return true;
}
Eina_Bool ewk_history_back(Ewk_History* history)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, false);
if (core->backListCount() < 1)
return false;
core->goBack();
return true;
}
Eina_Bool ewk_history_history_item_add(Ewk_History* history, const Ewk_History_Item* item)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, history_core, false);
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, item_core, false);
history_core->addItem(item_core);
return true;
}
Eina_Bool ewk_history_history_item_set(Ewk_History* history, const Ewk_History_Item* item)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, history_core, false);
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, item_core, false);
history_core->goToItem(item_core);
return true;
}
Ewk_History_Item* ewk_history_history_item_back_get(const Ewk_History* history)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, 0);
return ewk_history_item_new_from_core(core->backItem());
}
Ewk_History_Item* ewk_history_history_item_current_get(const Ewk_History* history)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, 0);
WebCore::HistoryItem* currentItem = core->currentItem();
if (currentItem)
return ewk_history_item_new_from_core(currentItem);
return 0;
}
Ewk_History_Item* ewk_history_history_item_forward_get(const Ewk_History* history)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, 0);
return ewk_history_item_new_from_core(core->forwardItem());
}
Ewk_History_Item* ewk_history_history_item_nth_get(const Ewk_History* history, int index)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, 0);
return ewk_history_item_new_from_core(core->itemAtIndex(index));
}
Eina_Bool ewk_history_history_item_contains(const Ewk_History* history, const Ewk_History_Item* item)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, history_core, false);
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, item_core, false);
return history_core->containsItem(item_core);
}
Eina_List* ewk_history_forward_list_get(const Ewk_History* history)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, 0);
WebCore::HistoryItemVector items;
int limit = core->forwardListCount();
core->forwardListWithLimit(limit, items);
return _ewk_history_item_list_get(items);
}
Eina_List* ewk_history_forward_list_get_with_limit(const Ewk_History* history, int limit)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, 0);
WebCore::HistoryItemVector items;
core->forwardListWithLimit(limit, items);
return _ewk_history_item_list_get(items);
}
int ewk_history_forward_list_length(const Ewk_History* history)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, 0);
return core->forwardListCount();
}
Eina_List* ewk_history_back_list_get(const Ewk_History* history)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, 0);
WebCore::HistoryItemVector items;
int limit = core->backListCount();
core->backListWithLimit(limit, items);
return _ewk_history_item_list_get(items);
}
Eina_List* ewk_history_back_list_get_with_limit(const Ewk_History* history, int limit)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, 0);
WebCore::HistoryItemVector items;
core->backListWithLimit(limit, items);
return _ewk_history_item_list_get(items);
}
int ewk_history_back_list_length(const Ewk_History* history)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, 0);
return core->backListCount();
}
int ewk_history_limit_get(Ewk_History* history)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, 0);
return core->capacity();
}
Eina_Bool ewk_history_limit_set(const Ewk_History* history, int limit)
{
EWK_HISTORY_CORE_GET_OR_RETURN(history, core, false);
core->setCapacity(limit);
return true;
}
Ewk_History_Item* ewk_history_item_new_from_core(WebCore::HistoryItem* core)
{
Ewk_History_Item* item;
if (!core) {
ERR("WebCore::HistoryItem is NULL.");
return 0;
}
core->ref();
item = new Ewk_History_Item;
memset(item, 0, sizeof(*item));
item->core = core;
return item;
}
Ewk_History_Item* ewk_history_item_new(const char* uri, const char* title)
{
WTF::String historyUri = WTF::String::fromUTF8(uri);
WTF::String historyTitle = WTF::String::fromUTF8(title);
WTF::RefPtr<WebCore::HistoryItem> core = WebCore::HistoryItem::create(historyUri, historyTitle, 0);
Ewk_History_Item* item = ewk_history_item_new_from_core(core.release().leakRef());
return item;
}
static inline void _ewk_history_item_free(Ewk_History_Item* item, WebCore::HistoryItem* core)
{
core->deref();
delete item;
}
void ewk_history_item_free(Ewk_History_Item* item)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core);
_ewk_history_item_free(item, core);
}
void ewk_history_item_list_free(Eina_List* history_items)
{
void* deleteItem;
EINA_LIST_FREE(history_items, deleteItem) {
Ewk_History_Item* item = (Ewk_History_Item*)deleteItem;
_ewk_history_item_free(item, item->core);
}
}
const char* ewk_history_item_title_get(const Ewk_History_Item* item)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core, 0);
// hide the following optimzation from outside
Ewk_History_Item* historyItem = const_cast<Ewk_History_Item*>(item);
eina_stringshare_replace(&historyItem->title, core->title().utf8().data());
return historyItem->title;
}
const char* ewk_history_item_title_alternate_get(const Ewk_History_Item* item)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core, 0);
// hide the following optimzation from outside
Ewk_History_Item* historyItem = const_cast<Ewk_History_Item*>(item);
eina_stringshare_replace(&historyItem->alternateTitle,
core->alternateTitle().utf8().data());
return historyItem->alternateTitle;
}
void ewk_history_item_title_alternate_set(Ewk_History_Item* item, const char* title)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core);
if (!eina_stringshare_replace(&item->alternateTitle, title))
return;
core->setAlternateTitle(WTF::String::fromUTF8(title));
}
const char* ewk_history_item_uri_get(const Ewk_History_Item* item)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core, 0);
// hide the following optimzation from outside
Ewk_History_Item* historyItem = const_cast<Ewk_History_Item*>((item));
eina_stringshare_replace(&historyItem->uri, core->urlString().utf8().data());
return historyItem->uri;
}
const char* ewk_history_item_uri_original_get(const Ewk_History_Item* item)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core, 0);
// hide the following optimzation from outside
Ewk_History_Item* historyItem = const_cast<Ewk_History_Item*>(item);
eina_stringshare_replace(&historyItem->originalUri,
core->originalURLString().utf8().data());
return historyItem->originalUri;
}
double ewk_history_item_time_last_visited_get(const Ewk_History_Item* item)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core, 0.0);
return core->lastVisitedTime();
}
cairo_surface_t* ewk_history_item_icon_surface_get(const Ewk_History_Item* item)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core, 0);
RefPtr<cairo_surface_t> icon = WebCore::iconDatabase().synchronousNativeIconForPageURL(core->url(), WebCore::IntSize(16, 16));
if (!icon)
ERR("icon is NULL.");
return icon.get();
}
Evas_Object* ewk_history_item_icon_object_add(const Ewk_History_Item* item, Evas* canvas)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core, 0);
EINA_SAFETY_ON_NULL_RETURN_VAL(canvas, 0);
RefPtr<cairo_surface_t> surface = WebCore::iconDatabase().synchronousNativeIconForPageURL(core->url(), WebCore::IntSize(16, 16));
if (!surface) {
ERR("icon is NULL.");
return 0;
}
return surface ? WebCore::evasObjectFromCairoImageSurface(canvas, surface.get()).leakRef() : 0;
}
Eina_Bool ewk_history_item_page_cache_exists(const Ewk_History_Item* item)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core, false);
return core->isInPageCache();
}
int ewk_history_item_visit_count(const Ewk_History_Item* item)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core, 0);
return core->visitCount();
}
Eina_Bool ewk_history_item_visit_last_failed(const Ewk_History_Item* item)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(item, core, true);
return core->lastVisitWasFailure();
}
/* internal methods ****************************************************/
/**
* @internal
*
* Creates history for given view. Called internally by ewk_view and
* should never be called from outside.
*
* @param core WebCore::BackForwardList instance to use internally.
*
* @return newly allocated history instance or @c NULL on errors.
*/
Ewk_History* ewk_history_new(WebCore::BackForwardList* core)
{
Ewk_History* history;
EINA_SAFETY_ON_NULL_RETURN_VAL(core, 0);
DBG("core=%p", core);
history = new Ewk_History;
history->core = core;
core->ref();
return history;
}
/**
* @internal
*
* Destroys previously allocated history instance. This is called
* automatically by ewk_view and should never be called from outside.
*
* @param history instance to free
*/
void ewk_history_free(Ewk_History* history)
{
DBG("history=%p", history);
history->core->deref();
delete history;
}
namespace EWKPrivate {
WebCore::HistoryItem* coreHistoryItem(const Ewk_History_Item* ewkHistoryItem)
{
EWK_HISTORY_ITEM_CORE_GET_OR_RETURN(ewkHistoryItem, core, 0);
return core;
}
} // namespace EWKPrivate<|fim▁end|> | Eina_List* result = 0;
unsigned int size;
size = coreItems.size(); |
<|file_name|>redis_storage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com [email protected]
import logging<|fim▁hole|>from redis import Redis, RedisError
from thumbor.storages import BaseStorage
from thumbor.utils import on_exception
from tornado.concurrent import return_future
logger = logging.getLogger('thumbor')
class Storage(BaseStorage):
storage = None
def __init__(self, context, shared_client=True):
'''Initialize the RedisStorage
:param thumbor.context.Context shared_client: Current context
:param boolean shared_client: When set to True a singleton client will
be used.
'''
BaseStorage.__init__(self, context)
self.shared_client = shared_client
self.storage = self.reconnect_redis()
def get_storage(self):
'''Get the storage instance.
:return Redis: Redis instance
'''
if self.storage:
return self.storage
self.storage = self.reconnect_redis()
return self.storage
def reconnect_redis(self):
if self.shared_client and Storage.storage:
return Storage.storage
storage = Redis(
port=self.context.config.REDIS_STORAGE_SERVER_PORT,
host=self.context.config.REDIS_STORAGE_SERVER_HOST,
db=self.context.config.REDIS_STORAGE_SERVER_DB,
password=self.context.config.REDIS_STORAGE_SERVER_PASSWORD
)
if self.shared_client:
Storage.storage = storage
return storage
def on_redis_error(self, fname, exc_type, exc_value):
'''Callback executed when there is a redis error.
:param string fname: Function name that was being called.
:param type exc_type: Exception type
:param Exception exc_value: The current exception
:returns: Default value or raise the current exception
'''
if self.shared_client:
Storage.storage = None
else:
self.storage = None
if self.context.config.REDIS_STORAGE_IGNORE_ERRORS is True:
logger.error("[REDIS_STORAGE] %s" % exc_value)
if fname == '_exists':
return False
return None
else:
raise exc_value
def __key_for(self, url):
return 'thumbor-crypto-%s' % url
def __detector_key_for(self, url):
return 'thumbor-detector-%s' % url
@on_exception(on_redis_error, RedisError)
def put(self, path, bytes):
storage = self.get_storage()
storage.set(path, bytes)
storage.expireat(
path, datetime.now() + timedelta(
seconds=self.context.config.STORAGE_EXPIRATION_SECONDS
)
)
@on_exception(on_redis_error, RedisError)
def put_crypto(self, path):
if not self.context.config.STORES_CRYPTO_KEY_FOR_EACH_IMAGE:
return
if not self.context.server.security_key:
raise RuntimeError(
"STORES_CRYPTO_KEY_FOR_EACH_IMAGE can't be True if no "
"SECURITY_KEY specified"
)
key = self.__key_for(path)
self.get_storage().set(key, self.context.server.security_key)
@on_exception(on_redis_error, RedisError)
def put_detector_data(self, path, data):
key = self.__detector_key_for(path)
self.get_storage().set(key, dumps(data))
@return_future
def get_crypto(self, path, callback):
callback(self._get_crypto(path))
@on_exception(on_redis_error, RedisError)
def _get_crypto(self, path):
if not self.context.config.STORES_CRYPTO_KEY_FOR_EACH_IMAGE:
return None
crypto = self.get_storage().get(self.__key_for(path))
if not crypto:
return None
return crypto
@return_future
def get_detector_data(self, path, callback):
callback(self._get_detector_data(path))
@on_exception(on_redis_error, RedisError)
def _get_detector_data(self, path):
data = self.get_storage().get(self.__detector_key_for(path))
if not data:
return None
return loads(data)
@return_future
def exists(self, path, callback):
callback(self._exists(path))
@on_exception(on_redis_error, RedisError)
def _exists(self, path):
return self.get_storage().exists(path)
@on_exception(on_redis_error, RedisError)
def remove(self, path):
self.get_storage().delete(path)
@return_future
def get(self, path, callback):
@on_exception(self.on_redis_error, RedisError)
def wrap():
return self.get_storage().get(path)
callback(wrap())<|fim▁end|> |
from json import loads, dumps
from datetime import datetime, timedelta
|
<|file_name|>DefaultPage.tsx<|end_file_name|><|fim▁begin|>import { useState } from "react";
import logo from "../logo.svg";
import { css, keyframes } from "@emotion/react";
import { Tooltip } from "../components/Tooltip";
import { Barrier } from "../infrastructures/Barrier";
import { Button } from "../components/Button";
export const DefaultPage = () => {
const [count, setCount] = useState(0);
const [showButton, setShowButton] = useState(true);
return (
<div
css={css`
text-align: center;
`}
>
<header
css={css`
background-color: #282c34;
min-height: 100vh;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
font-size: calc(10px + 2vmin);
color: white;
`}
>
<img
src={logo}
css={css`
height: 40vmin;
pointer-events: none;
@media (prefers-reduced-motion: no-preference) {
& {
animation: ${appLogoSpin} infinite 20s linear;
}
}
`}
alt="logo"
/>
<p>Hello Vite + React!</p>
<p>
<label>
Show Button
<input
type="checkbox"
checked={showButton}
onChange={() => setShowButton((x) => !x)}
/>
</label>
</p>
<p>
{showButton ? (
/** in page ui system */
<Tooltip message={`count is: ${count}`}>
<button
type="button"
css={css`
font-size: calc(10px + 2vmin);
`}
onClick={() => setCount((count) => count + 1)}
>
count is: {count}
</button>
</Tooltip>
) : null}
{showButton ? (
/** Area Level Coexistence */
<Barrier>
<Tooltip message={`count is: ${count}`}>
<button
type="button"
css={css`
font-size: calc(10px + 2vmin);<|fim▁hole|> onClick={() => setCount((count) => count + 1)}
>
count is: {count}
</button>
</Tooltip>
</Barrier>
) : null}
{showButton ? (
<Tooltip message={`count is: ${count}`}>
{/** Component Level Coexistence */}
<Button onClick={() => setCount((count) => count + 1)}>
count is: {count}
</Button>
</Tooltip>
) : null}
</p>
<p>
Edit <code>App.tsx</code> and save to test HMR updates.
</p>
<p>
<a
css={appLink}
href="https://reactjs.org"
target="_blank"
rel="noopener noreferrer"
>
Learn React
</a>
{" | "}
<a
css={appLink}
href="https://vitejs.dev/guide/features.html"
target="_blank"
rel="noopener noreferrer"
>
Vite Docs
</a>
</p>
</header>
</div>
);
};
const appLogoSpin = keyframes`
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
`;
const appLink = css`
color: #61dafb;
`;<|fim▁end|> | `} |
<|file_name|>AssemblyVisitorAdapter.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2009, Rickard Öberg. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.qi4j.bootstrap;
/**
* Base class for assembly visitors. Subclass and override
* the particular methods you are interested in.
*/
public class AssemblyVisitorAdapter<ThrowableType extends Throwable>
implements AssemblyVisitor<ThrowableType>
{
public void visitApplication( ApplicationAssembly assembly )
throws ThrowableType
{
}
public void visitLayer( LayerAssembly assembly )
throws ThrowableType
{<|fim▁hole|>
public void visitModule( ModuleAssembly assembly )
throws ThrowableType
{
}
public void visitComposite( TransientDeclaration declaration )
throws ThrowableType
{
}
public void visitEntity( EntityDeclaration declaration )
throws ThrowableType
{
}
public void visitService( ServiceDeclaration declaration )
throws ThrowableType
{
}
public void visitImportedService( ImportedServiceDeclaration declaration )
throws ThrowableType
{
}
public void visitValue( ValueDeclaration declaration )
throws ThrowableType
{
}
public void visitObject( ObjectDeclaration declaration )
throws ThrowableType
{
}
}<|fim▁end|> | } |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>// Xiongxiong
// Bearer token codec
// AGPLv3 or later
// Copyright (c) 2014, 2015 Genome Research Limited
var crypto = require('crypto');
module.exports = function(/* privateKey, lifetime, algorithm OR hash */) {
var privateKey, lifetime, algorithm,
xiongxiong;
// Parse arguments
if (arguments.length) {
// Try to get options from hash first, then fallback to positional
// and finally, where appropriate, to defaults
privateKey = arguments[0].privateKey || arguments[0];
lifetime = parseInt(arguments[0].lifetime || arguments[1], 10) || 3600;
algorithm = arguments[0].algorithm || arguments[2] || 'sha1';
// Private key must be a string or a buffer
if (!(typeof privateKey == 'string' || privateKey instanceof Buffer)) {
throw new TypeError('Invalid arguments: Private key must be a string or buffer');
}
} else {
// Need at least a private key
throw new Error('No private key specified');
}
var getHMAC = (function() {
// Check algorithm is supported
if (crypto.getHashes().indexOf(algorithm) < 0) {
throw new Error('Unsupported hash algorithm \'' + algorithm + '\'');
}
return function(message) {
var hmac = crypto.createHmac(algorithm, privateKey);
hmac.setEncoding('base64');
hmac.end(message);
return hmac.read();
};
})();
// Return value
xiongxiong = {
encode: function(data, callback) {
// Flatten array
if (Array.isArray(data)) { data = data.join(':'); }
if (typeof data != 'string') {
callback(new TypeError('Invalid arguments: Seed data must be a string or array of strings'), null);
} else {
// Create a 48-bit salt
crypto.randomBytes(6, function(err, salt) {
if (err) {
callback(err, null);
} else {
var expiration = Math.floor(Date.now() / 1000) + lifetime,
message = [data, expiration, salt.toString('base64')].join(':'),
// Generate HMAC of data:expiration:salt
password = getHMAC(message);
// Return token and basic authentication pair
callback(null, Object.freeze({
expiration: expiration, // Unix epoch
accessToken: (new Buffer([message, password].join(':'))).toString('base64'),
basicLogin: (new Buffer(message)).toString('base64'),
basicPassword: password
}));
}
});
}
},
decode: function(/* bearer/basic auth data */) {
var output = {};
switch (arguments.length) {
case 1:
// Split bearer token and decode as basic auth
var accessToken = (new Buffer(arguments[0], 'base64')).toString().split(':');
var basicPassword = accessToken.pop(),
basicLogin = (new Buffer(accessToken.join(':'))).toString('base64');
output = this.decode(basicLogin, basicPassword);
break;
case 2:
// Basic authentication data
var basicLogin = (new Buffer(arguments[0], 'base64')).toString(),
extracted = basicLogin.split(':'),
basicPassword = arguments[1];
// Pass the salt
extracted.pop();
// Expiration is penultimate element
// n.b., JavaScript Date in ms, hence x1000 on Unix epoch
Object.defineProperty(output, 'expiration', {
configurable: false,
writable: false,
enumerable: true,
value: new Date(parseInt(extracted.pop(), 10) * 1000)
});
// Convert to string if we only have one element remaining
Object.defineProperty(output, 'data', {
configurable: false,
writable: false,<|fim▁hole|> enumerable: true,
value: extracted.length == 1 ? extracted[0] : extracted,
});
// Validity check
Object.defineProperty(output, 'valid', {
configurable: false,
enumerable: true,
get: (function() {
if (basicPassword == getHMAC(basicLogin)) {
return function() {
// Match: Valid until expiration
return Date.now() <= this.expiration;
};
} else {
// No match: Invalid
return function() { return false; }
}
})()
});
break;
default:
Object.defineProperty(output, 'valid', {
configurable: false,
writable: false,
enumerable: true,
value: false
});
break;
}
return Object.freeze(output);
}
};
// Set aliases (legacy API)
xiongxiong.create = xiongxiong.encode;
xiongxiong.extract = xiongxiong.decode;
return Object.freeze(xiongxiong);
};<|fim▁end|> | |
<|file_name|>core.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2011 Sybren A. Stüvel <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Core mathematical operations.
This is the actual core RSA implementation, which is only defined<|fim▁hole|>'''
import types
def assert_int(var, name):
if type(var) in (types.IntType, types.LongType):
return
raise TypeError('%s should be an integer, not %s' % (name, var.__class__))
def encrypt_int(message, ekey, n):
"""Encrypts a message using encryption key 'ekey', working modulo n"""
assert_int(message, 'message')
assert_int(ekey, 'ekey')
assert_int(n, 'n')
if message < 0:
raise ValueError('Only non-negative numbers are supported')
if message > n:
raise OverflowError("The message %i is too long for n=%i" % (message, n))
return pow(message, ekey, n)
def decrypt_int(cyphertext, dkey, n):
"""Decrypts a cypher text using the decryption key 'dkey', working
modulo n"""
if type(cyphertext) not in (types.IntType, types.LongType):
raise TypeError('cyphertext should be an integer, not %s' %
cyphertext.__type__)
assert_int(cyphertext, 'cyphertext')
assert_int(dkey, 'dkey')
assert_int(n, 'n')
message = pow(cyphertext, dkey, n)
return message<|fim▁end|> | mathematically on integers. |
<|file_name|>DefaultTrainingPlanService.java<|end_file_name|><|fim▁begin|>package org.apache.rave.portal.service.impl;
import org.apache.rave.model.ExcercicesHasTrainingPlan;
import org.apache.rave.model.Serie;
import org.apache.rave.model.TrainingPlan;
import org.apache.rave.portal.repository.ExcercicesHasTrainingPlanRepository;
import org.apache.rave.portal.repository.SerieRepository;
import org.apache.rave.portal.repository.TrainingPlanRepository;
import org.apache.rave.portal.service.TrainingPlanService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;<|fim▁hole|>
import java.util.ArrayList;
import java.util.Collection;
/**
* Created by fhernandez on 23/09/14.
*/
@Service
public class DefaultTrainingPlanService implements TrainingPlanService {
private final Logger logger = LoggerFactory.getLogger(DefaultTrainingPlanService.class);
private final TrainingPlanRepository trainingPlanRepository;
private final ExcercicesHasTrainingPlanRepository exercisesHasTrainingPlanRepository;
private final SerieRepository serieRepository;
@Autowired
public DefaultTrainingPlanService(TrainingPlanRepository trainingPlanRepository,ExcercicesHasTrainingPlanRepository exercisesHasTrainingPlanRepository,SerieRepository serieRepository) {
this.trainingPlanRepository = trainingPlanRepository;
this.exercisesHasTrainingPlanRepository = exercisesHasTrainingPlanRepository;
this.serieRepository = serieRepository;
}
@Override
@Transactional
public TrainingPlan getById(Long trainingPlanId) {
TrainingPlan trainingPlan =trainingPlanRepository.getById(trainingPlanId);
if(trainingPlan!=null) {
trainingPlan.getExercisesHasTrainingplans().size();
}
return trainingPlan;
}
@Transactional
public TrainingPlan save(TrainingPlan newPlan) {
Collection<ExcercicesHasTrainingPlan> exerciseList=newPlan.getExercisesHasTrainingplans();
try {
if(newPlan.getEntityId()==null) {
newPlan = trainingPlanRepository.save(newPlan);
}
for (ExcercicesHasTrainingPlan exerciseHasTraining : exerciseList) {
Serie serie = serieRepository.save(exerciseHasTraining.getSerie());
exerciseHasTraining.setSerie(serie);
exerciseHasTraining.setSerieId(serie.getEntityId());
exerciseHasTraining.setTrainingplanId(newPlan.getEntityId());
exerciseHasTraining.setTrainingPlan(newPlan);
}
exercisesHasTrainingPlanRepository.saveList(exerciseList);
}catch(Exception e){
logger.error("Exception saving plan " + e);
}
return newPlan;
}
public Collection<TrainingPlan> getByTrainerID(Long trainerId){
return trainingPlanRepository.getByTrainerID(trainerId);
}
}<|fim▁end|> | import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; |
<|file_name|>oauth2.go<|end_file_name|><|fim▁begin|>package integrations
import (
"fmt"
"os"
"github.com/cloudfoundry-community/go-cfenv"
)
//New - create a new oauth2 integration wrapper
func (s *MyOAuth2) New(appEnv *cfenv.App) *MyOAuth2 {
oauth2ServiceName := os.Getenv("OAUTH2_SERVICE_NAME")
clientIdName := os.Getenv("OAUTH2_CLIENT_ID")
clientSecretName := os.Getenv("OAUTH2_CLIENT_SECRET")
oauth2Service, err := appEnv.Services.WithName(oauth2ServiceName)
if err != nil {
panic(fmt.Sprintf("oauth2 client service name error: %s", err.Error()))
}<|fim▁hole|> s.Secret = oauth2Service.Credentials[clientSecretName]
return s
}<|fim▁end|> | s.ID = oauth2Service.Credentials[clientIdName] |
<|file_name|>python.py<|end_file_name|><|fim▁begin|>""" Python test discovery, setup and run of test functions. """
import re
import fnmatch
import functools
import py
import inspect
import sys
import pytest
from _pytest.mark import MarkDecorator, MarkerError
from py._code.code import TerminalRepr
try:
import enum
except ImportError: # pragma: no cover
# Only available in Python 3.4+ or as a backport
enum = None
import _pytest
import pluggy
cutdir2 = py.path.local(_pytest.__file__).dirpath()
cutdir1 = py.path.local(pluggy.__file__.rstrip("oc"))
NoneType = type(None)
NOTSET = object()
isfunction = inspect.isfunction
isclass = inspect.isclass
callable = py.builtin.callable
# used to work around a python2 exception info leak
exc_clear = getattr(sys, 'exc_clear', lambda: None)
# The type of re.compile objects is not exposed in Python.
REGEX_TYPE = type(re.compile(''))
def filter_traceback(entry):
return entry.path != cutdir1 and not entry.path.relto(cutdir2)
def get_real_func(obj):
""" gets the real function object of the (possibly) wrapped object by
functools.wraps or functools.partial.
"""
while hasattr(obj, "__wrapped__"):
obj = obj.__wrapped__
if isinstance(obj, functools.partial):
obj = obj.func
return obj
def getfslineno(obj):
# xxx let decorators etc specify a sane ordering
obj = get_real_func(obj)
if hasattr(obj, 'place_as'):
obj = obj.place_as
fslineno = py.code.getfslineno(obj)
assert isinstance(fslineno[1], int), obj
return fslineno
def getimfunc(func):
try:
return func.__func__
except AttributeError:
try:
return func.im_func
except AttributeError:
return func
def safe_getattr(object, name, default):
""" Like getattr but return default upon any Exception.
Attribute access can potentially fail for 'evil' Python objects.
See issue214
"""
try:
return getattr(object, name, default)
except Exception:
return default
class FixtureFunctionMarker:
def __init__(self, scope, params,
autouse=False, yieldctx=False, ids=None):
self.scope = scope
self.params = params
self.autouse = autouse
self.yieldctx = yieldctx
self.ids = ids
def __call__(self, function):
if isclass(function):
raise ValueError(
"class fixtures not supported (may be in the future)")
function._pytestfixturefunction = self
return function
def fixture(scope="function", params=None, autouse=False, ids=None):
""" (return a) decorator to mark a fixture factory function.
This decorator can be used (with or or without parameters) to define
a fixture function. The name of the fixture function can later be
referenced to cause its invocation ahead of running tests: test
modules or classes can use the pytest.mark.usefixtures(fixturename)
marker. Test functions can directly use fixture names as input
arguments in which case the fixture instance returned from the fixture
function will be injected.
:arg scope: the scope for which this fixture is shared, one of
"function" (default), "class", "module", "session".
:arg params: an optional list of parameters which will cause multiple
invocations of the fixture function and all of the tests
using it.
:arg autouse: if True, the fixture func is activated for all tests that
can see it. If False (the default) then an explicit
reference is needed to activate the fixture.
:arg ids: list of string ids each corresponding to the params
so that they are part of the test id. If no ids are provided
they will be generated automatically from the params.
"""
if callable(scope) and params is None and autouse == False:
# direct decoration
return FixtureFunctionMarker(
"function", params, autouse)(scope)
if params is not None and not isinstance(params, (list, tuple)):
params = list(params)
return FixtureFunctionMarker(scope, params, autouse, ids=ids)
def yield_fixture(scope="function", params=None, autouse=False, ids=None):
""" (return a) decorator to mark a yield-fixture factory function
(EXPERIMENTAL).
This takes the same arguments as :py:func:`pytest.fixture` but
expects a fixture function to use a ``yield`` instead of a ``return``
statement to provide a fixture. See
http://pytest.org/en/latest/yieldfixture.html for more info.
"""
if callable(scope) and params is None and autouse == False:
# direct decoration
return FixtureFunctionMarker(
"function", params, autouse, yieldctx=True)(scope)
else:
return FixtureFunctionMarker(scope, params, autouse,
yieldctx=True, ids=ids)
defaultfuncargprefixmarker = fixture()
def pyobj_property(name):
def get(self):
node = self.getparent(getattr(pytest, name))
if node is not None:
return node.obj
doc = "python %s object this node was collected from (can be None)." % (
name.lower(),)
return property(get, None, None, doc)
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption('--fixtures', '--funcargs',
action="store_true", dest="showfixtures", default=False,
help="show available fixtures, sorted by plugin appearance")
parser.addini("usefixtures", type="args", default=[],
help="list of default fixtures to be used with this project")
parser.addini("python_files", type="args",
default=['test_*.py', '*_test.py'],
help="glob-style file patterns for Python test module discovery")
parser.addini("python_classes", type="args", default=["Test",],
help="prefixes or glob names for Python test class discovery")
parser.addini("python_functions", type="args", default=["test",],
help="prefixes or glob names for Python test function and "
"method discovery")
def pytest_cmdline_main(config):
if config.option.showfixtures:
showfixtures(config)
return 0
def pytest_generate_tests(metafunc):
# those alternative spellings are common - raise a specific error to alert
# the user
alt_spellings = ['parameterize', 'parametrise', 'parameterise']
for attr in alt_spellings:
if hasattr(metafunc.function, attr):
msg = "{0} has '{1}', spelling should be 'parametrize'"
raise MarkerError(msg.format(metafunc.function.__name__, attr))
try:
markers = metafunc.function.parametrize
except AttributeError:
return
for marker in markers:
metafunc.parametrize(*marker.args, **marker.kwargs)
def pytest_configure(config):
config.addinivalue_line("markers",
"parametrize(argnames, argvalues): call a test function multiple "
"times passing in different arguments in turn. argvalues generally "
"needs to be a list of values if argnames specifies only one name "
"or a list of tuples of values if argnames specifies multiple names. "
"Example: @parametrize('arg1', [1,2]) would lead to two calls of the "
"decorated test function, one with arg1=1 and another with arg1=2."
"see http://pytest.org/latest/parametrize.html for more info and "
"examples."
)
config.addinivalue_line("markers",
"usefixtures(fixturename1, fixturename2, ...): mark tests as needing "
"all of the specified fixtures. see http://pytest.org/latest/fixture.html#usefixtures "
)
def pytest_sessionstart(session):
session._fixturemanager = FixtureManager(session)
@pytest.hookimpl(trylast=True)
def pytest_namespace():
raises.Exception = pytest.fail.Exception
return {
'fixture': fixture,
'yield_fixture': yield_fixture,
'raises' : raises,
'collect': {
'Module': Module, 'Class': Class, 'Instance': Instance,
'Function': Function, 'Generator': Generator,
'_fillfuncargs': fillfixtures}
}
@fixture(scope="session")
def pytestconfig(request):
""" the pytest config object with access to command line opts."""
return request.config
@pytest.hookimpl(trylast=True)
def pytest_pyfunc_call(pyfuncitem):
testfunction = pyfuncitem.obj
if pyfuncitem._isyieldedfunction():
testfunction(*pyfuncitem._args)
else:
funcargs = pyfuncitem.funcargs
testargs = {}
for arg in pyfuncitem._fixtureinfo.argnames:
testargs[arg] = funcargs[arg]
testfunction(**testargs)
return True
def pytest_collect_file(path, parent):
ext = path.ext
if ext == ".py":
if not parent.session.isinitpath(path):
for pat in parent.config.getini('python_files'):
if path.fnmatch(pat):
break
else:
return
ihook = parent.session.gethookproxy(path)
return ihook.pytest_pycollect_makemodule(path=path, parent=parent)
def pytest_pycollect_makemodule(path, parent):
return Module(path, parent)
@pytest.hookimpl(hookwrapper=True)
def pytest_pycollect_makeitem(collector, name, obj):
outcome = yield
res = outcome.get_result()
if res is not None:
raise StopIteration
# nothing was collected elsewhere, let's do it here
if isclass(obj):
if collector.istestclass(obj, name):
Class = collector._getcustomclass("Class")
outcome.force_result(Class(name, parent=collector))
elif collector.istestfunction(obj, name):
# mock seems to store unbound methods (issue473), normalize it
obj = getattr(obj, "__func__", obj)
if not isfunction(obj):
collector.warn(code="C2", message=
"cannot collect %r because it is not a function."
% name, )
if getattr(obj, "__test__", True):
if is_generator(obj):
res = Generator(name, parent=collector)
else:
res = list(collector._genfunctions(name, obj))
outcome.force_result(res)
def is_generator(func):
try:
return py.code.getrawcode(func).co_flags & 32 # generator function
except AttributeError: # builtin functions have no bytecode
# assume them to not be generators
return False
class PyobjContext(object):
module = pyobj_property("Module")
cls = pyobj_property("Class")
instance = pyobj_property("Instance")
class PyobjMixin(PyobjContext):
def obj():
def fget(self):
try:
return self._obj
except AttributeError:
self._obj = obj = self._getobj()
return obj
def fset(self, value):
self._obj = value
return property(fget, fset, None, "underlying python object")
obj = obj()
def _getobj(self):
return getattr(self.parent.obj, self.name)
def getmodpath(self, stopatmodule=True, includemodule=False):
""" return python path relative to the containing module. """
chain = self.listchain()
chain.reverse()
parts = []
for node in chain:
if isinstance(node, Instance):
continue
name = node.name
if isinstance(node, Module):
assert name.endswith(".py")
name = name[:-3]
if stopatmodule:
if includemodule:
parts.append(name)
break
parts.append(name)
parts.reverse()
s = ".".join(parts)
return s.replace(".[", "[")
def _getfslineno(self):
return getfslineno(self.obj)
def reportinfo(self):
# XXX caching?
obj = self.obj
if hasattr(obj, 'compat_co_firstlineno'):
# nose compatibility
fspath = sys.modules[obj.__module__].__file__
if fspath.endswith(".pyc"):
fspath = fspath[:-1]
lineno = obj.compat_co_firstlineno
else:
fspath, lineno = getfslineno(obj)
modpath = self.getmodpath()
assert isinstance(lineno, int)
return fspath, lineno, modpath
class PyCollector(PyobjMixin, pytest.Collector):
def funcnamefilter(self, name):
return self._matches_prefix_or_glob_option('python_functions', name)
def isnosetest(self, obj):
""" Look for the __test__ attribute, which is applied by the
@nose.tools.istest decorator
"""
return safe_getattr(obj, '__test__', False)
def classnamefilter(self, name):
return self._matches_prefix_or_glob_option('python_classes', name)
def istestfunction(self, obj, name):
return (
(self.funcnamefilter(name) or self.isnosetest(obj))
and safe_getattr(obj, "__call__", False) and getfixturemarker(obj) is None
)
def istestclass(self, obj, name):
return self.classnamefilter(name) or self.isnosetest(obj)
def _matches_prefix_or_glob_option(self, option_name, name):
"""
checks if the given name matches the prefix or glob-pattern defined
in ini configuration.
"""
for option in self.config.getini(option_name):
if name.startswith(option):
return True
# check that name looks like a glob-string before calling fnmatch
# because this is called for every name in each collected module,
# and fnmatch is somewhat expensive to call
elif ('*' in option or '?' in option or '[' in option) and \
fnmatch.fnmatch(name, option):
return True
return False
def collect(self):
if not getattr(self.obj, "__test__", True):
return []
# NB. we avoid random getattrs and peek in the __dict__ instead
# (XXX originally introduced from a PyPy need, still true?)
dicts = [getattr(self.obj, '__dict__', {})]
for basecls in inspect.getmro(self.obj.__class__):
dicts.append(basecls.__dict__)
seen = {}
l = []
for dic in dicts:
for name, obj in dic.items():
if name in seen:
continue
seen[name] = True
res = self.makeitem(name, obj)
if res is None:
continue
if not isinstance(res, list):
res = [res]
l.extend(res)
l.sort(key=lambda item: item.reportinfo()[:2])
return l
def makeitem(self, name, obj):
#assert self.ihook.fspath == self.fspath, self
return self.ihook.pytest_pycollect_makeitem(
collector=self, name=name, obj=obj)
def _genfunctions(self, name, funcobj):
module = self.getparent(Module).obj
clscol = self.getparent(Class)
cls = clscol and clscol.obj or None
transfer_markers(funcobj, cls, module)
fm = self.session._fixturemanager
fixtureinfo = fm.getfixtureinfo(self, funcobj, cls)
metafunc = Metafunc(funcobj, fixtureinfo, self.config,
cls=cls, module=module)
methods = []
if hasattr(module, "pytest_generate_tests"):
methods.append(module.pytest_generate_tests)
if hasattr(cls, "pytest_generate_tests"):
methods.append(cls().pytest_generate_tests)
if methods:
self.ihook.pytest_generate_tests.call_extra(methods,
dict(metafunc=metafunc))
else:
self.ihook.pytest_generate_tests(metafunc=metafunc)
Function = self._getcustomclass("Function")
if not metafunc._calls:
yield Function(name, parent=self, fixtureinfo=fixtureinfo)
else:
# add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs
add_funcarg_pseudo_fixture_def(self, metafunc, fm)
for callspec in metafunc._calls:
subname = "%s[%s]" %(name, callspec.id)
yield Function(name=subname, parent=self,
callspec=callspec, callobj=funcobj,
fixtureinfo=fixtureinfo,
keywords={callspec.id:True})
def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager):
# this function will transform all collected calls to a functions
# if they use direct funcargs (i.e. direct parametrization)
# because we want later test execution to be able to rely on
# an existing FixtureDef structure for all arguments.
# XXX we can probably avoid this algorithm if we modify CallSpec2
# to directly care for creating the fixturedefs within its methods.
if not metafunc._calls[0].funcargs:
return # this function call does not have direct parametrization
# collect funcargs of all callspecs into a list of values
arg2params = {}
arg2scope = {}
for callspec in metafunc._calls:
for argname, argvalue in callspec.funcargs.items():
assert argname not in callspec.params
callspec.params[argname] = argvalue
arg2params_list = arg2params.setdefault(argname, [])
callspec.indices[argname] = len(arg2params_list)
arg2params_list.append(argvalue)
if argname not in arg2scope:
scopenum = callspec._arg2scopenum.get(argname,
scopenum_function)
arg2scope[argname] = scopes[scopenum]
callspec.funcargs.clear()
# register artificial FixtureDef's so that later at test execution
# time we can rely on a proper FixtureDef to exist for fixture setup.
arg2fixturedefs = metafunc._arg2fixturedefs
for argname, valuelist in arg2params.items():
# if we have a scope that is higher than function we need
# to make sure we only ever create an according fixturedef on
# a per-scope basis. We thus store and cache the fixturedef on the
# node related to the scope.
scope = arg2scope[argname]
node = None
if scope != "function":
node = get_scope_node(collector, scope)
if node is None:
assert scope == "class" and isinstance(collector, Module)
# use module-level collector for class-scope (for now)
node = collector
if node and argname in node._name2pseudofixturedef:
arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]]
else:
fixturedef = FixtureDef(fixturemanager, '', argname,
get_direct_param_fixture_func,
arg2scope[argname],
valuelist, False, False)
arg2fixturedefs[argname] = [fixturedef]
if node is not None:
node._name2pseudofixturedef[argname] = fixturedef
def get_direct_param_fixture_func(request):
return request.param
class FuncFixtureInfo:
def __init__(self, argnames, names_closure, name2fixturedefs):
self.argnames = argnames
self.names_closure = names_closure
self.name2fixturedefs = name2fixturedefs
def _marked(func, mark):
""" Returns True if :func: is already marked with :mark:, False otherwise.
This can happen if marker is applied to class and the test file is
invoked more than once.
"""
try:
func_mark = getattr(func, mark.name)
except AttributeError:
return False
return mark.args == func_mark.args and mark.kwargs == func_mark.kwargs
def transfer_markers(funcobj, cls, mod):
# XXX this should rather be code in the mark plugin or the mark
# plugin should merge with the python plugin.
for holder in (cls, mod):
try:
pytestmark = holder.pytestmark
except AttributeError:
continue
if isinstance(pytestmark, list):
for mark in pytestmark:
if not _marked(funcobj, mark):
mark(funcobj)
else:
if not _marked(funcobj, pytestmark):
pytestmark(funcobj)
class Module(pytest.File, PyCollector):
""" Collector for test classes and functions. """
def _getobj(self):
return self._memoizedcall('_obj', self._importtestmodule)
def collect(self):
self.session._fixturemanager.parsefactories(self)
return super(Module, self).collect()
def _importtestmodule(self):
# we assume we are only called once per module
try:
mod = self.fspath.pyimport(ensuresyspath="append")
except SyntaxError:
raise self.CollectError(
py.code.ExceptionInfo().getrepr(style="short"))
except self.fspath.ImportMismatchError:
e = sys.exc_info()[1]
raise self.CollectError(
"import file mismatch:\n"<|fim▁hole|> "which is not the same as the test file we want to collect:\n"
" %s\n"
"HINT: remove __pycache__ / .pyc files and/or use a "
"unique basename for your test file modules"
% e.args
)
#print "imported test module", mod
self.config.pluginmanager.consider_module(mod)
return mod
def setup(self):
setup_module = xunitsetup(self.obj, "setUpModule")
if setup_module is None:
setup_module = xunitsetup(self.obj, "setup_module")
if setup_module is not None:
#XXX: nose compat hack, move to nose plugin
# if it takes a positional arg, its probably a pytest style one
# so we pass the current module object
if inspect.getargspec(setup_module)[0]:
setup_module(self.obj)
else:
setup_module()
fin = getattr(self.obj, 'tearDownModule', None)
if fin is None:
fin = getattr(self.obj, 'teardown_module', None)
if fin is not None:
#XXX: nose compat hack, move to nose plugin
# if it takes a positional arg, it's probably a pytest style one
# so we pass the current module object
if inspect.getargspec(fin)[0]:
finalizer = lambda: fin(self.obj)
else:
finalizer = fin
self.addfinalizer(finalizer)
class Class(PyCollector):
""" Collector for test methods. """
def collect(self):
if hasinit(self.obj):
self.warn("C1", "cannot collect test class %r because it has a "
"__init__ constructor" % self.obj.__name__)
return []
return [self._getcustomclass("Instance")(name="()", parent=self)]
def setup(self):
setup_class = xunitsetup(self.obj, 'setup_class')
if setup_class is not None:
setup_class = getattr(setup_class, 'im_func', setup_class)
setup_class = getattr(setup_class, '__func__', setup_class)
setup_class(self.obj)
fin_class = getattr(self.obj, 'teardown_class', None)
if fin_class is not None:
fin_class = getattr(fin_class, 'im_func', fin_class)
fin_class = getattr(fin_class, '__func__', fin_class)
self.addfinalizer(lambda: fin_class(self.obj))
class Instance(PyCollector):
def _getobj(self):
obj = self.parent.obj()
return obj
def collect(self):
self.session._fixturemanager.parsefactories(self)
return super(Instance, self).collect()
def newinstance(self):
self.obj = self._getobj()
return self.obj
class FunctionMixin(PyobjMixin):
""" mixin for the code common to Function and Generator.
"""
def setup(self):
""" perform setup for this test function. """
if hasattr(self, '_preservedparent'):
obj = self._preservedparent
elif isinstance(self.parent, Instance):
obj = self.parent.newinstance()
self.obj = self._getobj()
else:
obj = self.parent.obj
if inspect.ismethod(self.obj):
setup_name = 'setup_method'
teardown_name = 'teardown_method'
else:
setup_name = 'setup_function'
teardown_name = 'teardown_function'
setup_func_or_method = xunitsetup(obj, setup_name)
if setup_func_or_method is not None:
setup_func_or_method(self.obj)
fin = getattr(obj, teardown_name, None)
if fin is not None:
self.addfinalizer(lambda: fin(self.obj))
def _prunetraceback(self, excinfo):
if hasattr(self, '_obj') and not self.config.option.fulltrace:
code = py.code.Code(get_real_func(self.obj))
path, firstlineno = code.path, code.firstlineno
traceback = excinfo.traceback
ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
if ntraceback == traceback:
ntraceback = ntraceback.cut(path=path)
if ntraceback == traceback:
#ntraceback = ntraceback.cut(excludepath=cutdir2)
ntraceback = ntraceback.filter(filter_traceback)
if not ntraceback:
ntraceback = traceback
excinfo.traceback = ntraceback.filter()
# issue364: mark all but first and last frames to
# only show a single-line message for each frame
if self.config.option.tbstyle == "auto":
if len(excinfo.traceback) > 2:
for entry in excinfo.traceback[1:-1]:
entry.set_repr_style('short')
def _repr_failure_py(self, excinfo, style="long"):
if excinfo.errisinstance(pytest.fail.Exception):
if not excinfo.value.pytrace:
return str(excinfo.value)
return super(FunctionMixin, self)._repr_failure_py(excinfo,
style=style)
def repr_failure(self, excinfo, outerr=None):
assert outerr is None, "XXX outerr usage is deprecated"
style = self.config.option.tbstyle
if style == "auto":
style = "long"
return self._repr_failure_py(excinfo, style=style)
class Generator(FunctionMixin, PyCollector):
def collect(self):
# test generators are seen as collectors but they also
# invoke setup/teardown on popular request
# (induced by the common "test_*" naming shared with normal tests)
self.session._setupstate.prepare(self)
# see FunctionMixin.setup and test_setupstate_is_preserved_134
self._preservedparent = self.parent.obj
l = []
seen = {}
for i, x in enumerate(self.obj()):
name, call, args = self.getcallargs(x)
if not callable(call):
raise TypeError("%r yielded non callable test %r" %(self.obj, call,))
if name is None:
name = "[%d]" % i
else:
name = "['%s']" % name
if name in seen:
raise ValueError("%r generated tests with non-unique name %r" %(self, name))
seen[name] = True
l.append(self.Function(name, self, args=args, callobj=call))
return l
def getcallargs(self, obj):
if not isinstance(obj, (tuple, list)):
obj = (obj,)
# explict naming
if isinstance(obj[0], py.builtin._basestring):
name = obj[0]
obj = obj[1:]
else:
name = None
call, args = obj[0], obj[1:]
return name, call, args
def hasinit(obj):
init = getattr(obj, '__init__', None)
if init:
if init != object.__init__:
return True
def fillfixtures(function):
""" fill missing funcargs for a test function. """
try:
request = function._request
except AttributeError:
# XXX this special code path is only expected to execute
# with the oejskit plugin. It uses classes with funcargs
# and we thus have to work a bit to allow this.
fm = function.session._fixturemanager
fi = fm.getfixtureinfo(function.parent, function.obj, None)
function._fixtureinfo = fi
request = function._request = FixtureRequest(function)
request._fillfixtures()
# prune out funcargs for jstests
newfuncargs = {}
for name in fi.argnames:
newfuncargs[name] = function.funcargs[name]
function.funcargs = newfuncargs
else:
request._fillfixtures()
_notexists = object()
class CallSpec2(object):
def __init__(self, metafunc):
self.metafunc = metafunc
self.funcargs = {}
self._idlist = []
self.params = {}
self._globalid = _notexists
self._globalid_args = set()
self._globalparam = _notexists
self._arg2scopenum = {} # used for sorting parametrized resources
self.keywords = {}
self.indices = {}
def copy(self, metafunc):
cs = CallSpec2(self.metafunc)
cs.funcargs.update(self.funcargs)
cs.params.update(self.params)
cs.keywords.update(self.keywords)
cs.indices.update(self.indices)
cs._arg2scopenum.update(self._arg2scopenum)
cs._idlist = list(self._idlist)
cs._globalid = self._globalid
cs._globalid_args = self._globalid_args
cs._globalparam = self._globalparam
return cs
def _checkargnotcontained(self, arg):
if arg in self.params or arg in self.funcargs:
raise ValueError("duplicate %r" %(arg,))
def getparam(self, name):
try:
return self.params[name]
except KeyError:
if self._globalparam is _notexists:
raise ValueError(name)
return self._globalparam
@property
def id(self):
return "-".join(map(str, filter(None, self._idlist)))
def setmulti(self, valtypes, argnames, valset, id, keywords, scopenum,
param_index):
for arg,val in zip(argnames, valset):
self._checkargnotcontained(arg)
valtype_for_arg = valtypes[arg]
getattr(self, valtype_for_arg)[arg] = val
self.indices[arg] = param_index
self._arg2scopenum[arg] = scopenum
if val is _notexists:
self._emptyparamspecified = True
self._idlist.append(id)
self.keywords.update(keywords)
def setall(self, funcargs, id, param):
for x in funcargs:
self._checkargnotcontained(x)
self.funcargs.update(funcargs)
if id is not _notexists:
self._idlist.append(id)
if param is not _notexists:
assert self._globalparam is _notexists
self._globalparam = param
for arg in funcargs:
self._arg2scopenum[arg] = scopenum_function
class FuncargnamesCompatAttr:
""" helper class so that Metafunc, Function and FixtureRequest
don't need to each define the "funcargnames" compatibility attribute.
"""
@property
def funcargnames(self):
""" alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
return self.fixturenames
class Metafunc(FuncargnamesCompatAttr):
"""
Metafunc objects are passed to the ``pytest_generate_tests`` hook.
They help to inspect a test function and to generate tests according to
test configuration or values specified in the class or module where a
test function is defined.
:ivar fixturenames: set of fixture names required by the test function
:ivar function: underlying python test function
:ivar cls: class object where the test function is defined in or ``None``.
:ivar module: the module object where the test function is defined in.
:ivar config: access to the :class:`_pytest.config.Config` object for the
test session.
:ivar funcargnames:
.. deprecated:: 2.3
Use ``fixturenames`` instead.
"""
def __init__(self, function, fixtureinfo, config, cls=None, module=None):
self.config = config
self.module = module
self.function = function
self.fixturenames = fixtureinfo.names_closure
self._arg2fixturedefs = fixtureinfo.name2fixturedefs
self.cls = cls
self._calls = []
self._ids = py.builtin.set()
def parametrize(self, argnames, argvalues, indirect=False, ids=None,
scope=None):
""" Add new invocations to the underlying test function using the list
of argvalues for the given argnames. Parametrization is performed
during the collection phase. If you need to setup expensive resources
see about setting indirect to do it rather at test setup time.
:arg argnames: a comma-separated string denoting one or more argument
names, or a list/tuple of argument strings.
:arg argvalues: The list of argvalues determines how often a
test is invoked with different argument values. If only one
argname was specified argvalues is a list of simple values. If N
argnames were specified, argvalues must be a list of N-tuples,
where each tuple-element specifies a value for its respective
argname.
:arg indirect: The list of argnames or boolean. A list of arguments'
names (subset of argnames). If True the list contains all names from
the argnames. Each argvalue corresponding to an argname in this list will
be passed as request.param to its respective argname fixture
function so that it can perform more expensive setups during the
setup phase of a test rather than at collection time.
:arg ids: list of string ids, or a callable.
If strings, each is corresponding to the argvalues so that they are
part of the test id.
If callable, it should take one argument (a single argvalue) and return
a string or return None. If None, the automatically generated id for that
argument will be used.
If no ids are provided they will be generated automatically from
the argvalues.
:arg scope: if specified it denotes the scope of the parameters.
The scope is used for grouping tests by parameter instances.
It will also override any fixture-function defined scope, allowing
to set a dynamic scope using test context or configuration.
"""
# individual parametrized argument sets can be wrapped in a series
# of markers in which case we unwrap the values and apply the mark
# at Function init
newkeywords = {}
unwrapped_argvalues = []
for i, argval in enumerate(argvalues):
while isinstance(argval, MarkDecorator):
newmark = MarkDecorator(argval.markname,
argval.args[:-1], argval.kwargs)
newmarks = newkeywords.setdefault(i, {})
newmarks[newmark.markname] = newmark
argval = argval.args[-1]
unwrapped_argvalues.append(argval)
argvalues = unwrapped_argvalues
if not isinstance(argnames, (tuple, list)):
argnames = [x.strip() for x in argnames.split(",") if x.strip()]
if len(argnames) == 1:
argvalues = [(val,) for val in argvalues]
if not argvalues:
argvalues = [(_notexists,) * len(argnames)]
if scope is None:
scope = "function"
scopenum = scopes.index(scope)
valtypes = {}
for arg in argnames:
if arg not in self.fixturenames:
raise ValueError("%r uses no fixture %r" %(self.function, arg))
if indirect is True:
valtypes = dict.fromkeys(argnames, "params")
elif indirect is False:
valtypes = dict.fromkeys(argnames, "funcargs")
elif isinstance(indirect, (tuple, list)):
valtypes = dict.fromkeys(argnames, "funcargs")
for arg in indirect:
if arg not in argnames:
raise ValueError("indirect given to %r: fixture %r doesn't exist" %(
self.function, arg))
valtypes[arg] = "params"
idfn = None
if callable(ids):
idfn = ids
ids = None
if ids and len(ids) != len(argvalues):
raise ValueError('%d tests specified with %d ids' %(
len(argvalues), len(ids)))
if not ids:
ids = idmaker(argnames, argvalues, idfn)
newcalls = []
for callspec in self._calls or [CallSpec2(self)]:
for param_index, valset in enumerate(argvalues):
assert len(valset) == len(argnames)
newcallspec = callspec.copy(self)
newcallspec.setmulti(valtypes, argnames, valset, ids[param_index],
newkeywords.get(param_index, {}), scopenum,
param_index)
newcalls.append(newcallspec)
self._calls = newcalls
def addcall(self, funcargs=None, id=_notexists, param=_notexists):
""" (deprecated, use parametrize) Add a new call to the underlying
test function during the collection phase of a test run. Note that
request.addcall() is called during the test collection phase prior and
independently to actual test execution. You should only use addcall()
if you need to specify multiple arguments of a test function.
:arg funcargs: argument keyword dictionary used when invoking
the test function.
:arg id: used for reporting and identification purposes. If you
don't supply an `id` an automatic unique id will be generated.
:arg param: a parameter which will be exposed to a later fixture function
invocation through the ``request.param`` attribute.
"""
assert funcargs is None or isinstance(funcargs, dict)
if funcargs is not None:
for name in funcargs:
if name not in self.fixturenames:
pytest.fail("funcarg %r not used in this function." % name)
else:
funcargs = {}
if id is None:
raise ValueError("id=None not allowed")
if id is _notexists:
id = len(self._calls)
id = str(id)
if id in self._ids:
raise ValueError("duplicate id %r" % id)
self._ids.add(id)
cs = CallSpec2(self)
cs.setall(funcargs, id, param)
self._calls.append(cs)
def _idval(val, argname, idx, idfn):
if idfn:
try:
s = idfn(val)
if s:
return s
except Exception:
pass
if isinstance(val, (float, int, str, bool, NoneType)):
return str(val)
elif isinstance(val, REGEX_TYPE):
return val.pattern
elif enum is not None and isinstance(val, enum.Enum):
return str(val)
elif isclass(val) and hasattr(val, '__name__'):
return val.__name__
return str(argname)+str(idx)
def _idvalset(idx, valset, argnames, idfn):
this_id = [_idval(val, argname, idx, idfn)
for val, argname in zip(valset, argnames)]
return "-".join(this_id)
def idmaker(argnames, argvalues, idfn=None):
ids = [_idvalset(valindex, valset, argnames, idfn)
for valindex, valset in enumerate(argvalues)]
if len(set(ids)) < len(ids):
# user may have provided a bad idfn which means the ids are not unique
ids = [str(i) + testid for i, testid in enumerate(ids)]
return ids
def showfixtures(config):
from _pytest.main import wrap_session
return wrap_session(config, _showfixtures_main)
def _showfixtures_main(config, session):
import _pytest.config
session.perform_collect()
curdir = py.path.local()
tw = _pytest.config.create_terminal_writer(config)
verbose = config.getvalue("verbose")
fm = session._fixturemanager
available = []
for argname, fixturedefs in fm._arg2fixturedefs.items():
assert fixturedefs is not None
if not fixturedefs:
continue
fixturedef = fixturedefs[-1]
loc = getlocation(fixturedef.func, curdir)
available.append((len(fixturedef.baseid),
fixturedef.func.__module__,
curdir.bestrelpath(loc),
fixturedef.argname, fixturedef))
available.sort()
currentmodule = None
for baseid, module, bestrel, argname, fixturedef in available:
if currentmodule != module:
if not module.startswith("_pytest."):
tw.line()
tw.sep("-", "fixtures defined from %s" %(module,))
currentmodule = module
if verbose <= 0 and argname[0] == "_":
continue
if verbose > 0:
funcargspec = "%s -- %s" %(argname, bestrel,)
else:
funcargspec = argname
tw.line(funcargspec, green=True)
loc = getlocation(fixturedef.func, curdir)
doc = fixturedef.func.__doc__ or ""
if doc:
for line in doc.strip().split("\n"):
tw.line(" " + line.strip())
else:
tw.line(" %s: no docstring available" %(loc,),
red=True)
def getlocation(function, curdir):
import inspect
fn = py.path.local(inspect.getfile(function))
lineno = py.builtin._getcode(function).co_firstlineno
if fn.relto(curdir):
fn = fn.relto(curdir)
return "%s:%d" %(fn, lineno+1)
# builtin pytest.raises helper
def raises(expected_exception, *args, **kwargs):
""" assert that a code block/function call raises @expected_exception
and raise a failure exception otherwise.
This helper produces a ``py.code.ExceptionInfo()`` object.
If using Python 2.5 or above, you may use this function as a
context manager::
>>> with raises(ZeroDivisionError):
... 1/0
Or you can specify a callable by passing a to-be-called lambda::
>>> raises(ZeroDivisionError, lambda: 1/0)
<ExceptionInfo ...>
or you can specify an arbitrary callable with arguments::
>>> def f(x): return 1/x
...
>>> raises(ZeroDivisionError, f, 0)
<ExceptionInfo ...>
>>> raises(ZeroDivisionError, f, x=0)
<ExceptionInfo ...>
A third possibility is to use a string to be executed::
>>> raises(ZeroDivisionError, "f(0)")
<ExceptionInfo ...>
Performance note:
-----------------
Similar to caught exception objects in Python, explicitly clearing
local references to returned ``py.code.ExceptionInfo`` objects can
help the Python interpreter speed up its garbage collection.
Clearing those references breaks a reference cycle
(``ExceptionInfo`` --> caught exception --> frame stack raising
the exception --> current frame stack --> local variables -->
``ExceptionInfo``) which makes Python keep all objects referenced
from that cycle (including all local variables in the current
frame) alive until the next cyclic garbage collection run. See the
official Python ``try`` statement documentation for more detailed
information.
"""
__tracebackhide__ = True
if expected_exception is AssertionError:
# we want to catch a AssertionError
# replace our subclass with the builtin one
# see https://github.com/pytest-dev/pytest/issues/176
from _pytest.assertion.util import BuiltinAssertionError \
as expected_exception
msg = ("exceptions must be old-style classes or"
" derived from BaseException, not %s")
if isinstance(expected_exception, tuple):
for exc in expected_exception:
if not isclass(exc):
raise TypeError(msg % type(exc))
elif not isclass(expected_exception):
raise TypeError(msg % type(expected_exception))
if not args:
return RaisesContext(expected_exception)
elif isinstance(args[0], str):
code, = args
assert isinstance(code, str)
frame = sys._getframe(1)
loc = frame.f_locals.copy()
loc.update(kwargs)
#print "raises frame scope: %r" % frame.f_locals
try:
code = py.code.Source(code).compile()
py.builtin.exec_(code, frame.f_globals, loc)
# XXX didn'T mean f_globals == f_locals something special?
# this is destroyed here ...
except expected_exception:
return py.code.ExceptionInfo()
else:
func = args[0]
try:
func(*args[1:], **kwargs)
except expected_exception:
return py.code.ExceptionInfo()
pytest.fail("DID NOT RAISE")
class RaisesContext(object):
def __init__(self, expected_exception):
self.expected_exception = expected_exception
self.excinfo = None
def __enter__(self):
self.excinfo = object.__new__(py.code.ExceptionInfo)
return self.excinfo
def __exit__(self, *tp):
__tracebackhide__ = True
if tp[0] is None:
pytest.fail("DID NOT RAISE")
if sys.version_info < (2, 7):
# py26: on __exit__() exc_value often does not contain the
# exception value.
# http://bugs.python.org/issue7853
if not isinstance(tp[1], BaseException):
exc_type, value, traceback = tp
tp = exc_type, exc_type(value), traceback
self.excinfo.__init__(tp)
return issubclass(self.excinfo.type, self.expected_exception)
#
# the basic pytest Function item
#
class Function(FunctionMixin, pytest.Item, FuncargnamesCompatAttr):
""" a Function Item is responsible for setting up and executing a
Python test function.
"""
_genid = None
def __init__(self, name, parent, args=None, config=None,
callspec=None, callobj=NOTSET, keywords=None, session=None,
fixtureinfo=None):
super(Function, self).__init__(name, parent, config=config,
session=session)
self._args = args
if callobj is not NOTSET:
self.obj = callobj
self.keywords.update(self.obj.__dict__)
if callspec:
self.callspec = callspec
self.keywords.update(callspec.keywords)
if keywords:
self.keywords.update(keywords)
if fixtureinfo is None:
fixtureinfo = self.session._fixturemanager.getfixtureinfo(
self.parent, self.obj, self.cls,
funcargs=not self._isyieldedfunction())
self._fixtureinfo = fixtureinfo
self.fixturenames = fixtureinfo.names_closure
self._initrequest()
def _initrequest(self):
self.funcargs = {}
if self._isyieldedfunction():
assert not hasattr(self, "callspec"), (
"yielded functions (deprecated) cannot have funcargs")
else:
if hasattr(self, "callspec"):
callspec = self.callspec
assert not callspec.funcargs
self._genid = callspec.id
if hasattr(callspec, "param"):
self.param = callspec.param
self._request = FixtureRequest(self)
@property
def function(self):
"underlying python 'function' object"
return getattr(self.obj, 'im_func', self.obj)
def _getobj(self):
name = self.name
i = name.find("[") # parametrization
if i != -1:
name = name[:i]
return getattr(self.parent.obj, name)
@property
def _pyfuncitem(self):
"(compatonly) for code expecting pytest-2.2 style request objects"
return self
def _isyieldedfunction(self):
return getattr(self, "_args", None) is not None
def runtest(self):
""" execute the underlying test function. """
self.ihook.pytest_pyfunc_call(pyfuncitem=self)
def setup(self):
# check if parametrization happend with an empty list
try:
self.callspec._emptyparamspecified
except AttributeError:
pass
else:
fs, lineno = self._getfslineno()
pytest.skip("got empty parameter set, function %s at %s:%d" %(
self.function.__name__, fs, lineno))
super(Function, self).setup()
fillfixtures(self)
scope2props = dict(session=())
scope2props["module"] = ("fspath", "module")
scope2props["class"] = scope2props["module"] + ("cls",)
scope2props["instance"] = scope2props["class"] + ("instance", )
scope2props["function"] = scope2props["instance"] + ("function", "keywords")
def scopeproperty(name=None, doc=None):
def decoratescope(func):
scopename = name or func.__name__
def provide(self):
if func.__name__ in scope2props[self.scope]:
return func(self)
raise AttributeError("%s not available in %s-scoped context" % (
scopename, self.scope))
return property(provide, None, None, func.__doc__)
return decoratescope
class FixtureRequest(FuncargnamesCompatAttr):
""" A request for a fixture from a test or fixture function.
A request object gives access to the requesting test context
and has an optional ``param`` attribute in case
the fixture is parametrized indirectly.
"""
def __init__(self, pyfuncitem):
self._pyfuncitem = pyfuncitem
#: fixture for which this request is being performed
self.fixturename = None
#: Scope string, one of "function", "cls", "module", "session"
self.scope = "function"
self._funcargs = {}
self._fixturedefs = {}
fixtureinfo = pyfuncitem._fixtureinfo
self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy()
self._arg2index = {}
self.fixturenames = fixtureinfo.names_closure
self._fixturemanager = pyfuncitem.session._fixturemanager
@property
def node(self):
""" underlying collection node (depends on current request scope)"""
return self._getscopeitem(self.scope)
def _getnextfixturedef(self, argname):
fixturedefs = self._arg2fixturedefs.get(argname, None)
if fixturedefs is None:
# we arrive here because of a a dynamic call to
# getfuncargvalue(argname) usage which was naturally
# not known at parsing/collection time
fixturedefs = self._fixturemanager.getfixturedefs(
argname, self._pyfuncitem.parent.nodeid)
self._arg2fixturedefs[argname] = fixturedefs
# fixturedefs list is immutable so we maintain a decreasing index
index = self._arg2index.get(argname, 0) - 1
if fixturedefs is None or (-index > len(fixturedefs)):
raise FixtureLookupError(argname, self)
self._arg2index[argname] = index
return fixturedefs[index]
@property
def config(self):
""" the pytest config object associated with this request. """
return self._pyfuncitem.config
@scopeproperty()
def function(self):
""" test function object if the request has a per-function scope. """
return self._pyfuncitem.obj
@scopeproperty("class")
def cls(self):
""" class (can be None) where the test function was collected. """
clscol = self._pyfuncitem.getparent(pytest.Class)
if clscol:
return clscol.obj
@property
def instance(self):
""" instance (can be None) on which test function was collected. """
# unittest support hack, see _pytest.unittest.TestCaseFunction
try:
return self._pyfuncitem._testcase
except AttributeError:
function = getattr(self, "function", None)
if function is not None:
return py.builtin._getimself(function)
@scopeproperty()
def module(self):
""" python module object where the test function was collected. """
return self._pyfuncitem.getparent(pytest.Module).obj
@scopeproperty()
def fspath(self):
""" the file system path of the test module which collected this test. """
return self._pyfuncitem.fspath
@property
def keywords(self):
""" keywords/markers dictionary for the underlying node. """
return self.node.keywords
@property
def session(self):
""" pytest session object. """
return self._pyfuncitem.session
def addfinalizer(self, finalizer):
""" add finalizer/teardown function to be called after the
last test within the requesting test context finished
execution. """
# XXX usually this method is shadowed by fixturedef specific ones
self._addfinalizer(finalizer, scope=self.scope)
def _addfinalizer(self, finalizer, scope):
colitem = self._getscopeitem(scope)
self._pyfuncitem.session._setupstate.addfinalizer(
finalizer=finalizer, colitem=colitem)
def applymarker(self, marker):
""" Apply a marker to a single test function invocation.
This method is useful if you don't want to have a keyword/marker
on all function invocations.
:arg marker: a :py:class:`_pytest.mark.MarkDecorator` object
created by a call to ``pytest.mark.NAME(...)``.
"""
try:
self.node.keywords[marker.markname] = marker
except AttributeError:
raise ValueError(marker)
def raiseerror(self, msg):
""" raise a FixtureLookupError with the given message. """
raise self._fixturemanager.FixtureLookupError(None, self, msg)
def _fillfixtures(self):
item = self._pyfuncitem
fixturenames = getattr(item, "fixturenames", self.fixturenames)
for argname in fixturenames:
if argname not in item.funcargs:
item.funcargs[argname] = self.getfuncargvalue(argname)
def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
""" (deprecated) Return a testing resource managed by ``setup`` &
``teardown`` calls. ``scope`` and ``extrakey`` determine when the
``teardown`` function will be called so that subsequent calls to
``setup`` would recreate the resource. With pytest-2.3 you often
do not need ``cached_setup()`` as you can directly declare a scope
on a fixture function and register a finalizer through
``request.addfinalizer()``.
:arg teardown: function receiving a previously setup resource.
:arg setup: a no-argument function creating a resource.
:arg scope: a string value out of ``function``, ``class``, ``module``
or ``session`` indicating the caching lifecycle of the resource.
:arg extrakey: added to internal caching key of (funcargname, scope).
"""
if not hasattr(self.config, '_setupcache'):
self.config._setupcache = {} # XXX weakref?
cachekey = (self.fixturename, self._getscopeitem(scope), extrakey)
cache = self.config._setupcache
try:
val = cache[cachekey]
except KeyError:
self._check_scope(self.fixturename, self.scope, scope)
val = setup()
cache[cachekey] = val
if teardown is not None:
def finalizer():
del cache[cachekey]
teardown(val)
self._addfinalizer(finalizer, scope=scope)
return val
def getfuncargvalue(self, argname):
""" Dynamically retrieve a named fixture function argument.
As of pytest-2.3, it is easier and usually better to access other
fixture values by stating it as an input argument in the fixture
function. If you only can decide about using another fixture at test
setup time, you may use this function to retrieve it inside a fixture
function body.
"""
return self._get_active_fixturedef(argname).cached_result[0]
def _get_active_fixturedef(self, argname):
try:
return self._fixturedefs[argname]
except KeyError:
try:
fixturedef = self._getnextfixturedef(argname)
except FixtureLookupError:
if argname == "request":
class PseudoFixtureDef:
cached_result = (self, [0], None)
scope = "function"
return PseudoFixtureDef
raise
# remove indent to prevent the python3 exception
# from leaking into the call
result = self._getfuncargvalue(fixturedef)
self._funcargs[argname] = result
self._fixturedefs[argname] = fixturedef
return fixturedef
def _get_fixturestack(self):
current = self
l = []
while 1:
fixturedef = getattr(current, "_fixturedef", None)
if fixturedef is None:
l.reverse()
return l
l.append(fixturedef)
current = current._parent_request
def _getfuncargvalue(self, fixturedef):
# prepare a subrequest object before calling fixture function
# (latter managed by fixturedef)
argname = fixturedef.argname
funcitem = self._pyfuncitem
scope = fixturedef.scope
try:
param = funcitem.callspec.getparam(argname)
except (AttributeError, ValueError):
param = NOTSET
param_index = 0
else:
# indices might not be set if old-style metafunc.addcall() was used
param_index = funcitem.callspec.indices.get(argname, 0)
# if a parametrize invocation set a scope it will override
# the static scope defined with the fixture function
paramscopenum = funcitem.callspec._arg2scopenum.get(argname)
if paramscopenum is not None:
scope = scopes[paramscopenum]
subrequest = SubRequest(self, scope, param, param_index, fixturedef)
# check if a higher-level scoped fixture accesses a lower level one
subrequest._check_scope(argname, self.scope, scope)
# clear sys.exc_info before invoking the fixture (python bug?)
# if its not explicitly cleared it will leak into the call
exc_clear()
try:
# call the fixture function
val = fixturedef.execute(request=subrequest)
finally:
# if fixture function failed it might have registered finalizers
self.session._setupstate.addfinalizer(fixturedef.finish,
subrequest.node)
return val
def _check_scope(self, argname, invoking_scope, requested_scope):
if argname == "request":
return
if scopemismatch(invoking_scope, requested_scope):
# try to report something helpful
lines = self._factorytraceback()
pytest.fail("ScopeMismatch: You tried to access the %r scoped "
"fixture %r with a %r scoped request object, "
"involved factories\n%s" %(
(requested_scope, argname, invoking_scope, "\n".join(lines))),
pytrace=False)
def _factorytraceback(self):
lines = []
for fixturedef in self._get_fixturestack():
factory = fixturedef.func
fs, lineno = getfslineno(factory)
p = self._pyfuncitem.session.fspath.bestrelpath(fs)
args = inspect.formatargspec(*inspect.getargspec(factory))
lines.append("%s:%d: def %s%s" %(
p, lineno, factory.__name__, args))
return lines
def _getscopeitem(self, scope):
if scope == "function":
# this might also be a non-function Item despite its attribute name
return self._pyfuncitem
node = get_scope_node(self._pyfuncitem, scope)
if node is None and scope == "class":
# fallback to function item itself
node = self._pyfuncitem
assert node
return node
def __repr__(self):
return "<FixtureRequest for %r>" %(self.node)
class SubRequest(FixtureRequest):
""" a sub request for handling getting a fixture from a
test function/fixture. """
def __init__(self, request, scope, param, param_index, fixturedef):
self._parent_request = request
self.fixturename = fixturedef.argname
if param is not NOTSET:
self.param = param
self.param_index = param_index
self.scope = scope
self._fixturedef = fixturedef
self.addfinalizer = fixturedef.addfinalizer
self._pyfuncitem = request._pyfuncitem
self._funcargs = request._funcargs
self._fixturedefs = request._fixturedefs
self._arg2fixturedefs = request._arg2fixturedefs
self._arg2index = request._arg2index
self.fixturenames = request.fixturenames
self._fixturemanager = request._fixturemanager
def __repr__(self):
return "<SubRequest %r for %r>" % (self.fixturename, self._pyfuncitem)
class ScopeMismatchError(Exception):
""" A fixture function tries to use a different fixture function which
which has a lower scope (e.g. a Session one calls a function one)
"""
scopes = "session module class function".split()
scopenum_function = scopes.index("function")
def scopemismatch(currentscope, newscope):
return scopes.index(newscope) > scopes.index(currentscope)
class FixtureLookupError(LookupError):
""" could not return a requested Fixture (missing or invalid). """
def __init__(self, argname, request, msg=None):
self.argname = argname
self.request = request
self.fixturestack = request._get_fixturestack()
self.msg = msg
def formatrepr(self):
tblines = []
addline = tblines.append
stack = [self.request._pyfuncitem.obj]
stack.extend(map(lambda x: x.func, self.fixturestack))
msg = self.msg
if msg is not None:
stack = stack[:-1] # the last fixture raise an error, let's present
# it at the requesting side
for function in stack:
fspath, lineno = getfslineno(function)
try:
lines, _ = inspect.getsourcelines(get_real_func(function))
except IOError:
error_msg = "file %s, line %s: source code not available"
addline(error_msg % (fspath, lineno+1))
else:
addline("file %s, line %s" % (fspath, lineno+1))
for i, line in enumerate(lines):
line = line.rstrip()
addline(" " + line)
if line.lstrip().startswith('def'):
break
if msg is None:
fm = self.request._fixturemanager
available = []
for name, fixturedef in fm._arg2fixturedefs.items():
parentid = self.request._pyfuncitem.parent.nodeid
faclist = list(fm._matchfactories(fixturedef, parentid))
if faclist:
available.append(name)
msg = "fixture %r not found" % (self.argname,)
msg += "\n available fixtures: %s" %(", ".join(available),)
msg += "\n use 'py.test --fixtures [testpath]' for help on them."
return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname)
class FixtureLookupErrorRepr(TerminalRepr):
def __init__(self, filename, firstlineno, tblines, errorstring, argname):
self.tblines = tblines
self.errorstring = errorstring
self.filename = filename
self.firstlineno = firstlineno
self.argname = argname
def toterminal(self, tw):
#tw.line("FixtureLookupError: %s" %(self.argname), red=True)
for tbline in self.tblines:
tw.line(tbline.rstrip())
for line in self.errorstring.split("\n"):
tw.line(" " + line.strip(), red=True)
tw.line()
tw.line("%s:%d" % (self.filename, self.firstlineno+1))
class FixtureManager:
"""
pytest fixtures definitions and information is stored and managed
from this class.
During collection fm.parsefactories() is called multiple times to parse
fixture function definitions into FixtureDef objects and internal
data structures.
During collection of test functions, metafunc-mechanics instantiate
a FuncFixtureInfo object which is cached per node/func-name.
This FuncFixtureInfo object is later retrieved by Function nodes
which themselves offer a fixturenames attribute.
The FuncFixtureInfo object holds information about fixtures and FixtureDefs
relevant for a particular function. An initial list of fixtures is
assembled like this:
- ini-defined usefixtures
- autouse-marked fixtures along the collection chain up from the function
- usefixtures markers at module/class/function level
- test function funcargs
Subsequently the funcfixtureinfo.fixturenames attribute is computed
as the closure of the fixtures needed to setup the initial fixtures,
i. e. fixtures needed by fixture functions themselves are appended
to the fixturenames list.
Upon the test-setup phases all fixturenames are instantiated, retrieved
by a lookup of their FuncFixtureInfo.
"""
_argprefix = "pytest_funcarg__"
FixtureLookupError = FixtureLookupError
FixtureLookupErrorRepr = FixtureLookupErrorRepr
def __init__(self, session):
self.session = session
self.config = session.config
self._arg2fixturedefs = {}
self._holderobjseen = set()
self._arg2finish = {}
self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))]
session.config.pluginmanager.register(self, "funcmanage")
def getfixtureinfo(self, node, func, cls, funcargs=True):
if funcargs and not hasattr(node, "nofuncargs"):
if cls is not None:
startindex = 1
else:
startindex = None
argnames = getfuncargnames(func, startindex)
else:
argnames = ()
usefixtures = getattr(func, "usefixtures", None)
initialnames = argnames
if usefixtures is not None:
initialnames = usefixtures.args + initialnames
fm = node.session._fixturemanager
names_closure, arg2fixturedefs = fm.getfixtureclosure(initialnames,
node)
return FuncFixtureInfo(argnames, names_closure, arg2fixturedefs)
def pytest_plugin_registered(self, plugin):
nodeid = None
try:
p = py.path.local(plugin.__file__)
except AttributeError:
pass
else:
# construct the base nodeid which is later used to check
# what fixtures are visible for particular tests (as denoted
# by their test id)
if p.basename.startswith("conftest.py"):
nodeid = p.dirpath().relto(self.config.rootdir)
if p.sep != "/":
nodeid = nodeid.replace(p.sep, "/")
self.parsefactories(plugin, nodeid)
def _getautousenames(self, nodeid):
""" return a tuple of fixture names to be used. """
autousenames = []
for baseid, basenames in self._nodeid_and_autousenames:
if nodeid.startswith(baseid):
if baseid:
i = len(baseid)
nextchar = nodeid[i:i+1]
if nextchar and nextchar not in ":/":
continue
autousenames.extend(basenames)
# make sure autousenames are sorted by scope, scopenum 0 is session
autousenames.sort(
key=lambda x: self._arg2fixturedefs[x][-1].scopenum)
return autousenames
def getfixtureclosure(self, fixturenames, parentnode):
# collect the closure of all fixtures , starting with the given
# fixturenames as the initial set. As we have to visit all
# factory definitions anyway, we also return a arg2fixturedefs
# mapping so that the caller can reuse it and does not have
# to re-discover fixturedefs again for each fixturename
# (discovering matching fixtures for a given name/node is expensive)
parentid = parentnode.nodeid
fixturenames_closure = self._getautousenames(parentid)
def merge(otherlist):
for arg in otherlist:
if arg not in fixturenames_closure:
fixturenames_closure.append(arg)
merge(fixturenames)
arg2fixturedefs = {}
lastlen = -1
while lastlen != len(fixturenames_closure):
lastlen = len(fixturenames_closure)
for argname in fixturenames_closure:
if argname in arg2fixturedefs:
continue
fixturedefs = self.getfixturedefs(argname, parentid)
if fixturedefs:
arg2fixturedefs[argname] = fixturedefs
merge(fixturedefs[-1].argnames)
return fixturenames_closure, arg2fixturedefs
def pytest_generate_tests(self, metafunc):
for argname in metafunc.fixturenames:
faclist = metafunc._arg2fixturedefs.get(argname)
if faclist:
fixturedef = faclist[-1]
if fixturedef.params is not None:
func_params = getattr(getattr(metafunc.function, 'parametrize', None), 'args', [[None]])
# skip directly parametrized arguments
if argname not in func_params:
metafunc.parametrize(argname, fixturedef.params,
indirect=True, scope=fixturedef.scope,
ids=fixturedef.ids)
else:
continue # will raise FixtureLookupError at setup time
def pytest_collection_modifyitems(self, items):
# separate parametrized setups
items[:] = reorder_items(items)
def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False):
if nodeid is not NOTSET:
holderobj = node_or_obj
else:
holderobj = node_or_obj.obj
nodeid = node_or_obj.nodeid
if holderobj in self._holderobjseen:
return
self._holderobjseen.add(holderobj)
autousenames = []
for name in dir(holderobj):
obj = getattr(holderobj, name, None)
if not callable(obj):
continue
# fixture functions have a pytest_funcarg__ prefix (pre-2.3 style)
# or are "@pytest.fixture" marked
marker = getfixturemarker(obj)
if marker is None:
if not name.startswith(self._argprefix):
continue
marker = defaultfuncargprefixmarker
name = name[len(self._argprefix):]
elif not isinstance(marker, FixtureFunctionMarker):
# magic globals with __getattr__ might have got us a wrong
# fixture attribute
continue
else:
assert not name.startswith(self._argprefix)
fixturedef = FixtureDef(self, nodeid, name, obj,
marker.scope, marker.params,
yieldctx=marker.yieldctx,
unittest=unittest, ids=marker.ids)
faclist = self._arg2fixturedefs.setdefault(name, [])
if fixturedef.has_location:
faclist.append(fixturedef)
else:
# fixturedefs with no location are at the front
# so this inserts the current fixturedef after the
# existing fixturedefs from external plugins but
# before the fixturedefs provided in conftests.
i = len([f for f in faclist if not f.has_location])
faclist.insert(i, fixturedef)
if marker.autouse:
autousenames.append(name)
if autousenames:
self._nodeid_and_autousenames.append((nodeid or '', autousenames))
def getfixturedefs(self, argname, nodeid):
try:
fixturedefs = self._arg2fixturedefs[argname]
except KeyError:
return None
else:
return tuple(self._matchfactories(fixturedefs, nodeid))
def _matchfactories(self, fixturedefs, nodeid):
for fixturedef in fixturedefs:
if nodeid.startswith(fixturedef.baseid):
yield fixturedef
def fail_fixturefunc(fixturefunc, msg):
fs, lineno = getfslineno(fixturefunc)
location = "%s:%s" % (fs, lineno+1)
source = py.code.Source(fixturefunc)
pytest.fail(msg + ":\n\n" + str(source.indent()) + "\n" + location,
pytrace=False)
def call_fixture_func(fixturefunc, request, kwargs, yieldctx):
if yieldctx:
if not is_generator(fixturefunc):
fail_fixturefunc(fixturefunc,
msg="yield_fixture requires yield statement in function")
iter = fixturefunc(**kwargs)
next = getattr(iter, "__next__", None)
if next is None:
next = getattr(iter, "next")
res = next()
def teardown():
try:
next()
except StopIteration:
pass
else:
fail_fixturefunc(fixturefunc,
"yield_fixture function has more than one 'yield'")
request.addfinalizer(teardown)
else:
if is_generator(fixturefunc):
fail_fixturefunc(fixturefunc,
msg="pytest.fixture functions cannot use ``yield``. "
"Instead write and return an inner function/generator "
"and let the consumer call and iterate over it.")
res = fixturefunc(**kwargs)
return res
class FixtureDef:
""" A container for a factory definition. """
def __init__(self, fixturemanager, baseid, argname, func, scope, params,
yieldctx, unittest=False, ids=None):
self._fixturemanager = fixturemanager
self.baseid = baseid or ''
self.has_location = baseid is not None
self.func = func
self.argname = argname
self.scope = scope
self.scopenum = scopes.index(scope or "function")
self.params = params
startindex = unittest and 1 or None
self.argnames = getfuncargnames(func, startindex=startindex)
self.yieldctx = yieldctx
self.unittest = unittest
self.ids = ids
self._finalizer = []
def addfinalizer(self, finalizer):
self._finalizer.append(finalizer)
def finish(self):
try:
while self._finalizer:
func = self._finalizer.pop()
func()
finally:
# even if finalization fails, we invalidate
# the cached fixture value
if hasattr(self, "cached_result"):
del self.cached_result
def execute(self, request):
# get required arguments and register our own finish()
# with their finalization
kwargs = {}
for argname in self.argnames:
fixturedef = request._get_active_fixturedef(argname)
result, arg_cache_key, exc = fixturedef.cached_result
request._check_scope(argname, request.scope, fixturedef.scope)
kwargs[argname] = result
if argname != "request":
fixturedef.addfinalizer(self.finish)
my_cache_key = request.param_index
cached_result = getattr(self, "cached_result", None)
if cached_result is not None:
result, cache_key, err = cached_result
if my_cache_key == cache_key:
if err is not None:
py.builtin._reraise(*err)
else:
return result
# we have a previous but differently parametrized fixture instance
# so we need to tear it down before creating a new one
self.finish()
assert not hasattr(self, "cached_result")
fixturefunc = self.func
if self.unittest:
if request.instance is not None:
# bind the unbound method to the TestCase instance
fixturefunc = self.func.__get__(request.instance)
else:
# the fixture function needs to be bound to the actual
# request.instance so that code working with "self" behaves
# as expected.
if request.instance is not None:
fixturefunc = getimfunc(self.func)
if fixturefunc != self.func:
fixturefunc = fixturefunc.__get__(request.instance)
try:
result = call_fixture_func(fixturefunc, request, kwargs,
self.yieldctx)
except Exception:
self.cached_result = (None, my_cache_key, sys.exc_info())
raise
self.cached_result = (result, my_cache_key, None)
return result
def __repr__(self):
return ("<FixtureDef name=%r scope=%r baseid=%r >" %
(self.argname, self.scope, self.baseid))
def num_mock_patch_args(function):
""" return number of arguments used up by mock arguments (if any) """
patchings = getattr(function, "patchings", None)
if not patchings:
return 0
mock = sys.modules.get("mock", sys.modules.get("unittest.mock", None))
if mock is not None:
return len([p for p in patchings
if not p.attribute_name and p.new is mock.DEFAULT])
return len(patchings)
def getfuncargnames(function, startindex=None):
# XXX merge with main.py's varnames
#assert not inspect.isclass(function)
realfunction = function
while hasattr(realfunction, "__wrapped__"):
realfunction = realfunction.__wrapped__
if startindex is None:
startindex = inspect.ismethod(function) and 1 or 0
if realfunction != function:
startindex += num_mock_patch_args(function)
function = realfunction
if isinstance(function, functools.partial):
argnames = inspect.getargs(py.code.getrawcode(function.func))[0]
partial = function
argnames = argnames[len(partial.args):]
if partial.keywords:
for kw in partial.keywords:
argnames.remove(kw)
else:
argnames = inspect.getargs(py.code.getrawcode(function))[0]
defaults = getattr(function, 'func_defaults',
getattr(function, '__defaults__', None)) or ()
numdefaults = len(defaults)
if numdefaults:
return tuple(argnames[startindex:-numdefaults])
return tuple(argnames[startindex:])
# algorithm for sorting on a per-parametrized resource setup basis
# it is called for scopenum==0 (session) first and performs sorting
# down to the lower scopes such as to minimize number of "high scope"
# setups and teardowns
def reorder_items(items):
argkeys_cache = {}
for scopenum in range(0, scopenum_function):
argkeys_cache[scopenum] = d = {}
for item in items:
keys = set(get_parametrized_fixture_keys(item, scopenum))
if keys:
d[item] = keys
return reorder_items_atscope(items, set(), argkeys_cache, 0)
def reorder_items_atscope(items, ignore, argkeys_cache, scopenum):
if scopenum >= scopenum_function or len(items) < 3:
return items
items_done = []
while 1:
items_before, items_same, items_other, newignore = \
slice_items(items, ignore, argkeys_cache[scopenum])
items_before = reorder_items_atscope(
items_before, ignore, argkeys_cache,scopenum+1)
if items_same is None:
# nothing to reorder in this scope
assert items_other is None
return items_done + items_before
items_done.extend(items_before)
items = items_same + items_other
ignore = newignore
def slice_items(items, ignore, scoped_argkeys_cache):
# we pick the first item which uses a fixture instance in the
# requested scope and which we haven't seen yet. We slice the input
# items list into a list of items_nomatch, items_same and
# items_other
if scoped_argkeys_cache: # do we need to do work at all?
it = iter(items)
# first find a slicing key
for i, item in enumerate(it):
argkeys = scoped_argkeys_cache.get(item)
if argkeys is not None:
argkeys = argkeys.difference(ignore)
if argkeys: # found a slicing key
slicing_argkey = argkeys.pop()
items_before = items[:i]
items_same = [item]
items_other = []
# now slice the remainder of the list
for item in it:
argkeys = scoped_argkeys_cache.get(item)
if argkeys and slicing_argkey in argkeys and \
slicing_argkey not in ignore:
items_same.append(item)
else:
items_other.append(item)
newignore = ignore.copy()
newignore.add(slicing_argkey)
return (items_before, items_same, items_other, newignore)
return items, None, None, None
def get_parametrized_fixture_keys(item, scopenum):
""" return list of keys for all parametrized arguments which match
the specified scope. """
assert scopenum < scopenum_function # function
try:
cs = item.callspec
except AttributeError:
pass
else:
# cs.indictes.items() is random order of argnames but
# then again different functions (items) can change order of
# arguments so it doesn't matter much probably
for argname, param_index in cs.indices.items():
if cs._arg2scopenum[argname] != scopenum:
continue
if scopenum == 0: # session
key = (argname, param_index)
elif scopenum == 1: # module
key = (argname, param_index, item.fspath)
elif scopenum == 2: # class
key = (argname, param_index, item.fspath, item.cls)
yield key
def xunitsetup(obj, name):
meth = getattr(obj, name, None)
if getfixturemarker(meth) is None:
return meth
def getfixturemarker(obj):
""" return fixturemarker or None if it doesn't exist or raised
exceptions."""
try:
return getattr(obj, "_pytestfixturefunction", None)
except KeyboardInterrupt:
raise
except Exception:
# some objects raise errors like request (from flask import request)
# we don't expect them to be fixture functions
return None
scopename2class = {
'class': Class,
'module': Module,
'function': pytest.Item,
}
def get_scope_node(node, scope):
cls = scopename2class.get(scope)
if cls is None:
if scope == "session":
return node.session
raise ValueError("unknown scope")
return node.getparent(cls)<|fim▁end|> | "imported module %r has this __file__ attribute:\n"
" %s\n" |
<|file_name|>debug.js<|end_file_name|><|fim▁begin|>'use strict';
var fs = require('fs');
var path = require('path');
var util = require('util');
var dbg = require('debug');
// process.env.TABTAB_DEBUG = process.env.TABTAB_DEBUG || '/tmp/tabtab.log';
var out = process.env.TABTAB_DEBUG ? fs.createWriteStream(process.env.TABTAB_DEBUG, { flags: 'a' }) : null;
module.exports = debug;
// Internal: Facade to debug module, which provides the exact same interface.
//
// The added benefit is with the TABTAB_DEBUG environment variable, which when
// defined, will write debug output to the specified filename.
//
// Usefull when debugging tab completion, as logs on stdout / stderr are either
// shallowed or used as tab completion results.
//
// namespace - The String namespace to use when TABTAB_DEBUG is not defined,
// delegates to debug module.
//
// Examples
//
// // Use with following command to redirect output to file
// // TABTAB_DEBUG="debug.log" tabtab ...
// debug('Foo');
function debug(namespace) {
var log = dbg(namespace);
return function () {
for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
args = args.map(function (arg) {
if (typeof arg === 'string') return arg;
return JSON.stringify(arg);
});
out && out.write(util.format.apply(util, args) + '\n');
out || log.apply(null, args);
};<|fim▁hole|><|fim▁end|> | } |
<|file_name|>5ab66e956c6b_osversion_osmajor_id_non_nullable.py<|end_file_name|><|fim▁begin|># This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""Make osversion.osmajor_id non-NULLable
Revision ID: 5ab66e956c6b
Revises: 286ed23a5c1b<|fim▁hole|>Create Date: 2017-12-20 15:54:38.825703
"""
# revision identifiers, used by Alembic.
revision = '5ab66e956c6b'
down_revision = '286ed23a5c1b'
from alembic import op
from sqlalchemy import Integer
def upgrade():
op.alter_column('osversion', 'osmajor_id', existing_type=Integer, nullable=False)
def downgrade():
op.alter_column('osversion', 'osmajor_id', existing_type=Integer, nullable=True)<|fim▁end|> | |
<|file_name|>coala.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import shutil
import sys
import dirtyjson as json
from ..decorators import linter
from ..parsers.base import ParserBase
@linter(
name="coala",
install=[
["pipx", "install", "--spec", "coala-bears", "coala"],
[sys.executable, "-m", "pip", "install", "-U", "coala-bears"],
],
help_cmd=["coala", "-h"],
run=["coala", "-C", "--json", "--log-json", "--limit-files", "5000"],
rundefault=["coala", "-C", "--json", "--log-json", "--limit-files", "5000"],
dotfiles=[".coafile"],
language="all",
autorun=True,
run_per_file=False,
concurrency=1,
)
class CoalaParser(ParserBase):
"""Parse json coala output."""
def install(self):<|fim▁hole|> ):
config_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), "config")
)
dotfile_name = self.config.get("dotfiles")[0]
shutil.copyfile(
os.path.join(config_dir, dotfile_name),
os.path.join(os.getcwd(), dotfile_name),
)
def parse(self, output):
messages = set()
lint_data = [
msg
for category in json.loads(output).get("results", {}).values()
for msg in category
]
for msgdata in lint_data:
try:
msgbody = msgdata["message"]
for line in msgdata.get("affected_code", []):
path = line.get("file")
line = line.get("start", {}).get("line")
messages.add((path, line, msgbody))
except (ValueError, KeyError):
print("Invalid message: {0}".format(msgdata))
return messages<|fim▁end|> | if not any(
dotfile.strip() in os.listdir(os.getcwd())
for dotfile in self.config.get("dotfiles") |
<|file_name|>ui.rs<|end_file_name|><|fim▁begin|>//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//<|fim▁hole|>// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use clap::App;
pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
app
}<|fim▁end|> | |
<|file_name|>ReportConfigController.js<|end_file_name|><|fim▁begin|>/**
* Genji Scrum Tool and Issue Tracker
* Copyright (C) 2015 Steinbeis GmbH & Co. KG Task Management Solutions
* <a href="http://www.trackplus.com">Genji Scrum Tool</a>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by<|fim▁hole|> * the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/* $Id:$ */
Ext.define("com.trackplus.admin.customize.filter.ReportConfigController", {
extend: "Ext.app.ViewController",
alias: "controller.reportConfig",
mixins: {
baseController: "com.trackplus.admin.customize.category.CategoryBaseController"
},
//used in FieldExpressionAction (not directly here)
//issueFilter : true,
folderAction: "categoryConfig",
baseServerAction: "reportConfig",
entityDialog: "com.trackplus.admin.customize.report.ReportEdit",
enableDisableToolbarButtons : function(view, arrSelections) {
if (CWHF.isNull(arrSelections) || arrSelections.length === 0) {
this.getView().actionDeleteGridRow.setDisabled(true);
this.getView().actionEditGridRow.setDisabled(true);
this.getView().actionExecuteGridRow.setDisabled(true);
this.getView().actionDownloadGridRow.setDisabled(true);
} else {
if (arrSelections.length === 1) {
var selectedRecord = arrSelections[0];
var isLeaf = selectedRecord.get("leaf");
var modifiable = selectedRecord.get("modifiable");
this.getView().actionEditGridRow.setDisabled(!modifiable);
this.getView().actionExecuteGridRow.setDisabled(!isLeaf);
this.getView().actionDownloadGridRow.setDisabled(!isLeaf);
} else {
// more than one selection
this.getView().actionEditGridRow.setDisabled(true);
this.getView().actionExecuteGridRow.setDisabled(true);
this.getView().actionDownloadGridRow.setDisabled(true);
}
var allIsDeletable = true;
for (var i = 0; i < arrSelections.length; i++) {
var selectedRecord = arrSelections[i];
var deletable = selectedRecord.data.deletable;
if (!deletable) {
allIsDeletable = false;
}
}
this.getView().actionDeleteGridRow.setDisabled(!allIsDeletable);
}
},
/**
* Execute a leaf node
*/
onExecuteTreeNode: function() {
this.onExecute(true);
},
/**
* Execute a grid row
*/
onExecuteGridRow: function() {
this.onExecute(false);
},
/**
* Execute a tree node or a grid row
*/
onExecute: function(fromTree) {
var recordData = this.getView().getSingleSelectedRecordData(fromTree);
if (recordData ) {
var leaf = this.getView().selectedIsLeaf(fromTree);
var node = this.getRecordID(recordData, {
fromTree : fromTree
});
if (leaf) {
var lastIndex = node.lastIndexOf("_");
var objectID = node.substring(lastIndex + 1);
//customFeature: whether record configuration is needed
com.trackplus.admin.Report.executeReport(this, objectID, recordData["customFeature"], false);
}
}
},
/**
* Download the report for a tree node
*/
onDownloadTreeNode : function() {
this.downloadReport(true);
},
/**
* Download the report for the grid row
*/
onDownloadGridRow : function() {
this.downloadReport(false);
},
/**
* Downloads a report zip
*/
downloadReport : function(fromTree) {
var recordData = this.getView().getSingleSelectedRecordData(fromTree);
if (recordData ) {
var leaf = this.getView().selectedIsLeaf(fromTree);
if (leaf) {
var node = this.getRecordID(recordData, {
fromTree : fromTree
});
attachmentURI = "reportConfig!download.action?node=" + node;
window.open(attachmentURI);
}
}
}
});<|fim▁end|> | |
<|file_name|>test_counts.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from django.db import models
from django.test import TestCase
from django.utils import timezone
from analytics.lib.counts import CountStat, COUNT_STATS, process_count_stat, \
zerver_count_user_by_realm, zerver_count_message_by_user, \
zerver_count_message_by_stream, zerver_count_stream_by_realm, \
do_fill_count_stat_at_hour, ZerverCountQuery
from analytics.models import BaseCount, InstallationCount, RealmCount, \
UserCount, StreamCount, FillState, installation_epoch
from zerver.models import Realm, UserProfile, Message, Stream, Recipient, \
Huddle, Client, get_user_profile_by_email, get_client
from datetime import datetime, timedelta
from six.moves import range
from typing import Any, Type, Optional, Text, Tuple, List, Union
class AnalyticsTestCase(TestCase):
MINUTE = timedelta(seconds = 60)
HOUR = MINUTE * 60
DAY = HOUR * 24
TIME_ZERO = datetime(1988, 3, 14).replace(tzinfo=timezone.utc)
TIME_LAST_HOUR = TIME_ZERO - HOUR
def setUp(self):
# type: () -> None
self.default_realm = Realm.objects.create(
string_id='realmtest', name='Realm Test',
domain='test.analytics', date_created=self.TIME_ZERO - 2*self.DAY)
# used to generate unique names in self.create_*
self.name_counter = 100
# used as defaults in self.assertCountEquals
self.current_property = None # type: Optional[str]
# Lightweight creation of users, streams, and messages
def create_user(self, **kwargs):
# type: (**Any) -> UserProfile
self.name_counter += 1
defaults = {
'email': 'user%[email protected]' % (self.name_counter,),
'date_joined': self.TIME_LAST_HOUR,
'full_name': 'full_name',
'short_name': 'short_name',
'pointer': -1,
'last_pointer_updater': 'seems unused?',
'realm': self.default_realm,
'api_key': '42'}
for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value)
return UserProfile.objects.create(**kwargs)
def create_stream_with_recipient(self, **kwargs):
# type: (**Any) -> Tuple[Stream, Recipient]
self.name_counter += 1
defaults = {'name': 'stream name %s' % (self.name_counter,),
'realm': self.default_realm,
'date_created': self.TIME_LAST_HOUR}
for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value)
stream = Stream.objects.create(**kwargs)
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
return stream, recipient
def create_huddle_with_recipient(self, **kwargs):
# type: (**Any) -> Tuple[Huddle, Recipient]
self.name_counter += 1
defaults = {'huddle_hash': 'hash%s' % (self.name_counter,)}
for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value)
huddle = Huddle.objects.create(**kwargs)
recipient = Recipient.objects.create(type_id=huddle.id, type=Recipient.HUDDLE)
return huddle, recipient
def create_message(self, sender, recipient, **kwargs):
# type: (UserProfile, Recipient, **Any) -> Message
defaults = {
'sender': sender,
'recipient': recipient,
'subject': 'subject',
'content': 'hi',
'pub_date': self.TIME_LAST_HOUR,
'sending_client': get_client("website")}
for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value)
return Message.objects.create(**kwargs)
# kwargs should only ever be a UserProfile or Stream.
def assertCountEquals(self, table, value, property=None, subgroup=None,
end_time=TIME_ZERO, realm=None, **kwargs):
# type: (Type[BaseCount], int, Optional[Text], Optional[Text], datetime, Optional[Realm], **models.Model) -> None
if property is None:
property = self.current_property
queryset = table.objects.filter(property=property, end_time=end_time).filter(**kwargs)
if table is not InstallationCount:
if realm is None:
realm = self.default_realm
queryset = queryset.filter(realm=realm)
if subgroup is not None:
queryset = queryset.filter(subgroup=subgroup)
self.assertEqual(queryset.values_list('value', flat=True)[0], value)
def assertTableState(self, table, arg_keys, arg_values):
# type: (Type[BaseCount], List[str], List[List[Union[int, str, Realm, UserProfile, Stream]]]) -> None
"""Assert that the state of a *Count table is what it should be.
Example usage:
self.assertTableState(RealmCount, ['property', 'subgroup', 'realm'],
[['p1', 4], ['p2', 10, self.alt_realm]])
table -- A *Count table.
arg_keys -- List of columns of <table>.
arg_values -- List of "rows" of <table>.
Each entry of arg_values (e.g. ['p1', 4]) represents a row of <table>.
The i'th value of the entry corresponds to the i'th arg_key, so e.g.
the first arg_values entry here corresponds to a row of RealmCount
with property='p1' and subgroup=10.
Any columns not specified (in this case, every column of RealmCount
other than property and subgroup) are either set to default values,
or are ignored.
The function checks that every entry of arg_values matches exactly one
row of <table>, and that no additional rows exist. Note that this means
checking a table with duplicate rows is not supported.
"""
defaults = {
'property': self.current_property,
'subgroup': None,
'end_time': self.TIME_ZERO}
for values in arg_values:
kwargs = {} # type: Dict[str, Any]
for i in range(len(values)):
kwargs[arg_keys[i]] = values[i]
for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value)
if table is not InstallationCount:
if 'realm' not in kwargs:
if 'user' in kwargs:
kwargs['realm'] = kwargs['user'].realm
elif 'stream' in kwargs:
kwargs['realm'] = kwargs['stream'].realm
else:
kwargs['realm'] = self.default_realm
self.assertEqual(table.objects.filter(**kwargs).count(), 1)
self.assertEqual(table.objects.count(), len(arg_values))
class TestProcessCountStat(AnalyticsTestCase):
def make_dummy_count_stat(self, current_time):
# type: (datetime) -> CountStat
dummy_query = """INSERT INTO analytics_realmcount (realm_id, property, end_time, value)
VALUES (1, 'test stat', '%(end_time)s', 22)""" % {'end_time': current_time}
count_stat = CountStat('test stat', ZerverCountQuery(Recipient, UserCount, dummy_query),
{}, None, CountStat.HOUR, False)
return count_stat
def assertFillStateEquals(self, end_time, state = FillState.DONE, property = None):
# type: (datetime, int, Optional[Text]) -> None
count_stat = self.make_dummy_count_stat(end_time)
if property is None:
property = count_stat.property
fill_state = FillState.objects.filter(property=property).first()
self.assertEqual(fill_state.end_time, end_time)
self.assertEqual(fill_state.state, state)
def test_process_stat(self):
# type: () -> None
# process new stat
current_time = installation_epoch() + self.HOUR
count_stat = self.make_dummy_count_stat(current_time)
property = count_stat.property
process_count_stat(count_stat, current_time)
self.assertFillStateEquals(current_time)
self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1)
# dirty stat
FillState.objects.filter(property=property).update(state=FillState.STARTED)
process_count_stat(count_stat, current_time)
self.assertFillStateEquals(current_time)
self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1)
# clean stat, no update
process_count_stat(count_stat, current_time)
self.assertFillStateEquals(current_time)
self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1)
# clean stat, with update
current_time = current_time + self.HOUR
count_stat = self.make_dummy_count_stat(current_time)
process_count_stat(count_stat, current_time)
self.assertFillStateEquals(current_time)
self.assertEqual(InstallationCount.objects.filter(property=property).count(), 2)
class TestCountStats(AnalyticsTestCase):
def setUp(self):
# type: () -> None
super(TestCountStats, self).setUp()
# This tests two things for each of the queries/CountStats: Handling
# more than 1 realm, and the time bounds (time_start and time_end in
# the queries).
self.second_realm = Realm.objects.create(
string_id='second-realm', name='Second Realm',
domain='second.analytics', date_created=self.TIME_ZERO-2*self.DAY)
for minutes_ago in [0, 1, 61, 60*24+1]:
creation_time = self.TIME_ZERO - minutes_ago*self.MINUTE
user = self.create_user(email='user-%[email protected]' % (minutes_ago,),
realm=self.second_realm, date_joined=creation_time)
recipient = self.create_stream_with_recipient(
name='stream %s' % (minutes_ago,), realm=self.second_realm,
date_created=creation_time)[1]
self.create_message(user, recipient, pub_date=creation_time)
self.hourly_user = UserProfile.objects.get(email='[email protected]')
self.daily_user = UserProfile.objects.get(email='[email protected]')
# This realm should not show up in the *Count tables for any of the<|fim▁hole|> self.no_message_realm = Realm.objects.create(
string_id='no-message-realm', name='No Message Realm',
domain='no.message', date_created=self.TIME_ZERO-2*self.DAY)
self.create_user(realm=self.no_message_realm)
self.create_stream_with_recipient(realm=self.no_message_realm)
# This huddle should not show up anywhere
self.create_huddle_with_recipient()
def test_active_users_by_is_bot(self):
# type: () -> None
stat = COUNT_STATS['active_users:is_bot:day']
self.current_property = stat.property
# To be included
self.create_user(is_bot=True)
self.create_user(is_bot=True, date_joined=self.TIME_ZERO-25*self.HOUR)
self.create_user(is_bot=False)
# To be excluded
self.create_user(is_active=False)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
[[2, 'true'], [1, 'false'],
[3, 'false', self.second_realm],
[1, 'false', self.no_message_realm]])
self.assertTableState(InstallationCount, ['value', 'subgroup'], [[2, 'true'], [5, 'false']])
self.assertTableState(UserCount, [], [])
self.assertTableState(StreamCount, [], [])
def test_messages_sent_by_is_bot(self):
# type: () -> None
stat = COUNT_STATS['messages_sent:is_bot:hour']
self.current_property = stat.property
bot = self.create_user(is_bot=True)
human1 = self.create_user()
human2 = self.create_user()
recipient_human1 = Recipient.objects.create(type_id=human1.id, type=Recipient.PERSONAL)
recipient_stream = self.create_stream_with_recipient()[1]
recipient_huddle = self.create_huddle_with_recipient()[1]
self.create_message(bot, recipient_human1)
self.create_message(bot, recipient_stream)
self.create_message(bot, recipient_huddle)
self.create_message(human1, recipient_human1)
self.create_message(human2, recipient_human1)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['value', 'subgroup', 'user'],
[[1, 'false', human1], [1, 'false', human2], [3, 'true', bot],
[1, 'false', self.hourly_user]])
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
[[2, 'false'], [3, 'true'], [1, 'false', self.second_realm]])
self.assertTableState(InstallationCount, ['value', 'subgroup'], [[3, 'false'], [3, 'true']])
self.assertTableState(StreamCount, [], [])
def test_messages_sent_by_message_type(self):
# type: () -> None
stat = COUNT_STATS['messages_sent:message_type:day']
self.current_property = stat.property
# Nothing currently in this stat that is bot related, but so many of
# the rest of our stats make the human/bot distinction that one can
# imagine a later refactoring that will intentionally or
# unintentionally change this. So make one of our users a bot.
user1 = self.create_user(is_bot=True)
user2 = self.create_user()
user3 = self.create_user()
# private streams
recipient_stream1 = self.create_stream_with_recipient(invite_only=True)[1]
recipient_stream2 = self.create_stream_with_recipient(invite_only=True)[1]
self.create_message(user1, recipient_stream1)
self.create_message(user2, recipient_stream1)
self.create_message(user2, recipient_stream2)
# public streams
recipient_stream3 = self.create_stream_with_recipient()[1]
recipient_stream4 = self.create_stream_with_recipient()[1]
self.create_message(user1, recipient_stream3)
self.create_message(user1, recipient_stream4)
self.create_message(user2, recipient_stream3)
# huddles
recipient_huddle1 = self.create_huddle_with_recipient()[1]
recipient_huddle2 = self.create_huddle_with_recipient()[1]
self.create_message(user1, recipient_huddle1)
self.create_message(user2, recipient_huddle2)
# private messages
recipient_user1 = Recipient.objects.create(type_id=user1.id, type=Recipient.PERSONAL)
recipient_user2 = Recipient.objects.create(type_id=user2.id, type=Recipient.PERSONAL)
recipient_user3 = Recipient.objects.create(type_id=user3.id, type=Recipient.PERSONAL)
self.create_message(user1, recipient_user2)
self.create_message(user2, recipient_user1)
self.create_message(user3, recipient_user3)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['value', 'subgroup', 'user'],
[[1, 'private_stream', user1],
[2, 'private_stream', user2],
[2, 'public_stream', user1],
[1, 'public_stream', user2],
[2, 'private_message', user1],
[2, 'private_message', user2],
[1, 'private_message', user3],
[1, 'public_stream', self.hourly_user],
[1, 'public_stream', self.daily_user]])
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
[[3, 'private_stream'], [3, 'public_stream'], [5, 'private_message'],
[2, 'public_stream', self.second_realm]])
self.assertTableState(InstallationCount, ['value', 'subgroup'],
[[3, 'private_stream'], [5, 'public_stream'], [5, 'private_message']])
self.assertTableState(StreamCount, [], [])
def test_messages_sent_to_recipients_with_same_id(self):
# type: () -> None
stat = COUNT_STATS['messages_sent:message_type:day']
self.current_property = stat.property
user = self.create_user(id=1000)
user_recipient = Recipient.objects.create(type_id=user.id, type=Recipient.PERSONAL)
stream_recipient = self.create_stream_with_recipient(id=1000)[1]
huddle_recipient = self.create_huddle_with_recipient(id=1000)[1]
self.create_message(user, user_recipient)
self.create_message(user, stream_recipient)
self.create_message(user, huddle_recipient)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertCountEquals(UserCount, 2, subgroup='private_message')
self.assertCountEquals(UserCount, 1, subgroup='public_stream')
def test_messages_sent_by_client(self):
# type: () -> None
stat = COUNT_STATS['messages_sent:client:day']
self.current_property = stat.property
user1 = self.create_user(is_bot=True)
user2 = self.create_user()
recipient_user2 = Recipient.objects.create(type_id=user2.id, type=Recipient.PERSONAL)
recipient_stream = self.create_stream_with_recipient()[1]
recipient_huddle = self.create_huddle_with_recipient()[1]
client2 = Client.objects.create(name='client2')
self.create_message(user1, recipient_user2, sending_client=client2)
self.create_message(user1, recipient_stream)
self.create_message(user1, recipient_huddle)
self.create_message(user2, recipient_user2, sending_client=client2)
self.create_message(user2, recipient_user2, sending_client=client2)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
client2_id = str(client2.id)
website_client_id = str(get_client('website').id) # default for self.create_message
self.assertTableState(UserCount, ['value', 'subgroup', 'user'],
[[2, website_client_id, user1],
[1, client2_id, user1], [2, client2_id, user2],
[1, website_client_id, self.hourly_user],
[1, website_client_id, self.daily_user]])
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
[[2, website_client_id], [3, client2_id],
[2, website_client_id, self.second_realm]])
self.assertTableState(InstallationCount, ['value', 'subgroup'],
[[4, website_client_id], [3, client2_id]])
self.assertTableState(StreamCount, [], [])
def test_messages_sent_to_stream_by_is_bot(self):
# type: () -> None
stat = COUNT_STATS['messages_sent_to_stream:is_bot:hour']
self.current_property = stat.property
bot = self.create_user(is_bot=True)
human1 = self.create_user()
human2 = self.create_user()
recipient_human1 = Recipient.objects.create(type_id=human1.id, type=Recipient.PERSONAL)
stream1, recipient_stream1 = self.create_stream_with_recipient()
stream2, recipient_stream2 = self.create_stream_with_recipient()
# To be included
self.create_message(human1, recipient_stream1)
self.create_message(human2, recipient_stream1)
self.create_message(human1, recipient_stream2)
self.create_message(bot, recipient_stream2)
self.create_message(bot, recipient_stream2)
# To be excluded
self.create_message(human2, recipient_human1)
self.create_message(bot, recipient_human1)
recipient_huddle = self.create_huddle_with_recipient()[1]
self.create_message(human1, recipient_huddle)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertTableState(StreamCount, ['value', 'subgroup', 'stream'],
[[2, 'false', stream1], [1, 'false', stream2], [2, 'true', stream2],
# "hourly" stream, from TestCountStats.setUp
[1, 'false', Stream.objects.get(name='stream 1')]])
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
[[3, 'false'], [2, 'true'], [1, 'false', self.second_realm]])
self.assertTableState(InstallationCount, ['value', 'subgroup'], [[4, 'false'], [2, 'true']])
self.assertTableState(UserCount, [], [])<|fim▁end|> | # messages_* CountStats |
<|file_name|>round_to_multiple.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::arithmetic::traits::{RoundToMultiple, RoundToMultipleAssign};
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType};
use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::runner::Runner;
use malachite_nz_test_util::bench::bucketers::triple_1_natural_bit_bucketer;
use malachite_nz_test_util::generators::natural_natural_rounding_mode_triple_gen_var_2;
pub(crate) fn register(runner: &mut Runner) {
register_demo!(runner, demo_natural_round_to_multiple_assign);
register_demo!(runner, demo_natural_round_to_multiple_assign_ref);
register_demo!(runner, demo_natural_round_to_multiple);
register_demo!(runner, demo_natural_round_to_multiple_val_ref);
register_demo!(runner, demo_natural_round_to_multiple_ref_val);
register_demo!(runner, demo_natural_round_to_multiple_ref_ref);
register_bench!(
runner,
benchmark_natural_round_to_multiple_assign_evaluation_strategy
);
register_bench!(
runner,
benchmark_natural_round_to_multiple_evaluation_strategy
);
}
fn demo_natural_round_to_multiple_assign(gm: GenMode, config: GenConfig, limit: usize) {
for (mut x, y, rm) in natural_natural_rounding_mode_triple_gen_var_2()
.get(gm, &config)
.take(limit)
{
let x_old = x.clone();
let y_old = y.clone();
x.round_to_multiple_assign(y, rm);
println!(
"x := {}; x.round_to_multiple_assign({}, {}); x = {}",
x_old, y_old, rm, x
);
}
}
fn demo_natural_round_to_multiple_assign_ref(gm: GenMode, config: GenConfig, limit: usize) {
for (mut x, y, rm) in natural_natural_rounding_mode_triple_gen_var_2()
.get(gm, &config)
.take(limit)
{
let x_old = x.clone();
x.round_to_multiple_assign(&y, rm);
println!(
"x := {}; x.round_to_multiple_assign(&{}, {}); x = {}",
x_old, y, rm, x
);<|fim▁hole|>}
fn demo_natural_round_to_multiple(gm: GenMode, config: GenConfig, limit: usize) {
for (x, y, rm) in natural_natural_rounding_mode_triple_gen_var_2()
.get(gm, &config)
.take(limit)
{
let x_old = x.clone();
let y_old = y.clone();
println!(
"{}.round_to_multiple({}, {}) = {}",
x_old,
y_old,
rm,
x.round_to_multiple(y, rm)
);
}
}
fn demo_natural_round_to_multiple_val_ref(gm: GenMode, config: GenConfig, limit: usize) {
for (x, y, rm) in natural_natural_rounding_mode_triple_gen_var_2()
.get(gm, &config)
.take(limit)
{
let x_old = x.clone();
println!(
"{}.round_to_multiple(&{}, {}) = {}",
x_old,
y,
rm,
x.round_to_multiple(&y, rm)
);
}
}
fn demo_natural_round_to_multiple_ref_val(gm: GenMode, config: GenConfig, limit: usize) {
for (x, y, rm) in natural_natural_rounding_mode_triple_gen_var_2()
.get(gm, &config)
.take(limit)
{
let y_old = y.clone();
println!(
"(&{}).round_to_multiple({}, {}) = {}",
x,
y_old,
rm,
(&x).round_to_multiple(y, rm)
);
}
}
fn demo_natural_round_to_multiple_ref_ref(gm: GenMode, config: GenConfig, limit: usize) {
for (x, y, rm) in natural_natural_rounding_mode_triple_gen_var_2()
.get(gm, &config)
.take(limit)
{
println!(
"(&{}).round_to_multiple(&{}, {}) = {}",
x,
y,
rm,
(&x).round_to_multiple(&y, rm)
);
}
}
fn benchmark_natural_round_to_multiple_assign_evaluation_strategy(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"Natural.round_to_multiple_assign(Natural, RoundingMode)",
BenchmarkType::EvaluationStrategy,
natural_natural_rounding_mode_triple_gen_var_2().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_1_natural_bit_bucketer("x"),
&mut [
(
"Natural.round_to_multiple_assign(Natural, RoundingMode)",
&mut |(mut x, y, rm)| x.round_to_multiple_assign(y, rm),
),
(
"Natural.round_to_multiple_assign(&Natural, RoundingMode)",
&mut |(mut x, y, rm)| x.round_to_multiple_assign(&y, rm),
),
],
);
}
fn benchmark_natural_round_to_multiple_evaluation_strategy(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"Natural.round_to_multiple(Natural, RoundingMode)",
BenchmarkType::EvaluationStrategy,
natural_natural_rounding_mode_triple_gen_var_2().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_1_natural_bit_bucketer("x"),
&mut [
(
"Natural.round_to_multiple(Natural, RoundingMode)",
&mut |(x, y, rm)| no_out!(x.round_to_multiple(y, rm)),
),
(
"Natural.round_to_multiple(&Natural, RoundingMode)",
&mut |(x, y, rm)| no_out!(x.round_to_multiple(&y, rm)),
),
(
"(&Natural).round_to_multiple(Natural, RoundingMode)",
&mut |(x, y, rm)| no_out!((&x).round_to_multiple(y, rm)),
),
(
"(&Natural).round_to_multiple(&Natural, RoundingMode)",
&mut |(x, y, rm)| no_out!((&x).round_to_multiple(&y, rm)),
),
],
);
}<|fim▁end|> | } |
<|file_name|>test_notebook.py<|end_file_name|><|fim▁begin|>#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import json
from mock import MagicMock, patch, PropertyMock
# External imports
# Bokeh imports
from bokeh.document.document import Document
from bokeh.io.state import State
<|fim▁hole|># Module under test
import bokeh.io.notebook as binb
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
def test_install_notebook_hook():
binb.install_notebook_hook("foo", "load", "doc", "app")
assert binb._HOOKS["foo"]['load'] == "load"
assert binb._HOOKS["foo"]['doc'] == "doc"
assert binb._HOOKS["foo"]['app'] == "app"
with pytest.raises(RuntimeError):
binb.install_notebook_hook("foo", "load2", "doc2", "app2")
binb.install_notebook_hook("foo", "load2", "doc2", "app2", overwrite=True)
assert binb._HOOKS["foo"]['load'] == "load2"
assert binb._HOOKS["foo"]['doc'] == "doc2"
assert binb._HOOKS["foo"]['app'] == "app2"
@patch('bokeh.io.notebook.get_comms')
@patch('bokeh.io.notebook.publish_display_data')
@patch('bokeh.embed.notebook.notebook_content')
def test_show_doc_no_server(mock_notebook_content,
mock__publish_display_data,
mock_get_comms):
mock_get_comms.return_value = "comms"
s = State()
d = Document()
mock_notebook_content.return_value = ["notebook_script", "notebook_div", d]
class Obj(object):
id = None
def references(self): return []
assert mock__publish_display_data.call_count == 0
binb.show_doc(Obj(), s, True)
expected_args = ({'application/javascript': 'notebook_script', 'application/vnd.bokehjs_exec.v0+json': ''},)
expected_kwargs = {'metadata': {'application/vnd.bokehjs_exec.v0+json': {'id': None}}}
assert d._hold is not None
assert mock__publish_display_data.call_count == 2 # two mime types
assert mock__publish_display_data.call_args[0] == expected_args
assert mock__publish_display_data.call_args[1] == expected_kwargs
class Test_push_notebook(object):
@patch('bokeh.io.notebook.CommsHandle.comms', new_callable=PropertyMock)
def test_no_events(self, mock_comms):
mock_comms.return_value = MagicMock()
d = Document()
handle = binb.CommsHandle("comms", d)
binb.push_notebook(d, None, handle)
assert mock_comms.call_count == 0
@patch('bokeh.io.notebook.CommsHandle.comms', new_callable=PropertyMock)
def test_with_events(self, mock_comms):
mock_comm = MagicMock()
mock_send = MagicMock(return_value="junk")
mock_comm.send = mock_send
mock_comms.return_value = mock_comm
d = Document()
handle = binb.CommsHandle("comms", d)
d.title = "foo"
binb.push_notebook(d, None, handle)
assert mock_comms.call_count > 0
assert mock_send.call_count == 3 # sends header, metadata, then content
assert json.loads(mock_send.call_args[0][0]) == {u"events": [{u"kind": u"TitleChanged", u"title": u"foo"}], u"references": []}
assert mock_send.call_args[1] == {}
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
def test__origin_url():
assert binb._origin_url("foo.com:8888") == "foo.com:8888"
assert binb._origin_url("http://foo.com:8888") == "foo.com:8888"
assert binb._origin_url("https://foo.com:8888") == "foo.com:8888"
def test__server_url():
assert binb._server_url("foo.com:8888", 10) == "http://foo.com:10/"
assert binb._server_url("http://foo.com:8888", 10) == "http://foo.com:10/"
assert binb._server_url("https://foo.com:8888", 10) == "https://foo.com:10/"
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------<|fim▁end|> | |
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import flask
import json<|fim▁hole|>import sys
from fontana import twitter
import pymongo
DEFAULT_PORT = 2014
DB = 'fontana'
connection = pymongo.Connection("localhost", 27017)
db = connection[DB]
latest_headers = {}
MODERATED_SIZE = 40
class MongoEncoder(json.JSONEncoder):
def default(self, obj, **kwargs):
if isinstance(obj, bson.ObjectId):
return str(obj)
else:
return json.JSONEncoder.default(obj, **kwargs)
app = flask.Flask('fontana')
def twitter_authorisation_begin():
"""
Step 1 and 2 of the Twitter oAuth flow.
"""
callback = absolute_url('twitter_signin')
if 'next' in flask.request.args:
callback = '%s?next=%s' % (callback, flask.request.args['next'])
try:
token = twitter.request_token(app.config, callback)
flask.session['twitter_oauth_token'] = token['oauth_token']
flask.session['twitter_oauth_token_secret'] = token['oauth_token_secret']
return flask.redirect(twitter.authenticate_url(token, callback))
except twitter.TwitterException, e:
return flask.abort(403, str(e))
def twitter_authorisation_done():
"""
Step 3 of the Twitter oAuth flow.
"""
if 'oauth_token' in flask.request.args:
token = flask.request.args
if flask.session['twitter_oauth_token'] != token['oauth_token']:
return flask.abort(403, 'oauth_token mismatch!')
auth = twitter.access_token(app.config, token)
flask.session['twitter_oauth_token'] = auth['oauth_token']
flask.session['twitter_oauth_token_secret'] = auth['oauth_token_secret']
flask.session['twitter_user_id'] = auth['user_id']
flask.session['twitter_screen_name'] = auth['screen_name']
if 'next' in flask.request.args:
return flask.redirect(flask.request.args['next'])
else:
return 'OK'
elif 'denied' in flask.request.args:
return flask.abort(403, 'oauth denied')
else:
return flask.abort(403, 'unknown sign in failure')
@app.route('/api/twitter/session/new/')
def twitter_signin():
"""
Handles the Twitter oAuth flow.
"""
args = flask.request.args
if not args or (len(args) == 1 and 'next' in args):
return twitter_authorisation_begin()
else:
return twitter_authorisation_done()
@app.route('/api/twitter/session/')
def twitter_session():
"""
Check for an active Twitter session. Returns a JSON response with the
active sceen name or a 403 if there is no active session.
"""
if not flask.session.get('twitter_user_id'):
return flask.abort(403, 'no active session')
return (json.dumps({
'screen_name': flask.session['twitter_screen_name']
}), 200, {'content-type': 'application/json'})
@app.route('/api/twitter/search/')
def twitter_search():
"""
Perform a Twitter search
"""
global latest_headers
if not flask.session.get('twitter_user_id'):
return flask.abort(403, 'no active session')
token = {
'oauth_token': flask.session['twitter_oauth_token'],
'oauth_token_secret': flask.session['twitter_oauth_token_secret']
}
# Find out last id
last = db['tweets'].aggregate( { '$group': { '_id':"", 'last': { '$max': "$id" } } } )
since_id = long(flask.request.args.get('since_id'))
params = dict(flask.request.args)
if last.get("ok") == 1 and last['result']:
last = long(last['result'][0]['last'])
params['since_id'] = max(last, since_id)
# Query twitter and cache result into DB
(text, status_code, headers) = twitter.search(app.config, token, params)
data = json.loads(text)
for s in data['statuses']:
s['exclude'] = s['text'].startswith('RT ')
s['classes'] = []
if s['text'].startswith('RT '):
s['classes'].append('RT')
if '?' in s['text']:
s['classes'].append('question')
# Use tweet id as _id so that save will replace existing tweets if necessary
s['_id'] = s['id']
db['tweets'].save(s)
latest_headers = dict(headers)
return (text, status_code, headers)
@app.route('/moderated')
def twitter_moderated():
"""
Return moderated posts
"""
return (json.dumps({ 'statuses': [ s for s in db['tweets'].find({ 'exclude': False }).sort([('id', -1)]).limit(MODERATED_SIZE) ]},
indent=None if request.is_xhr else 2,
cls=MongoEncoder),
200,
{'content-type': 'application/json'})
@app.route('/all')
def twitter_all():
"""
Return all cached posts
"""
since_id = long(request.values.get('since_id', 0))
return (json.dumps({ 'statuses': [ s for s in db['tweets'].find({ 'id': { '$gt': since_id } }).sort([ ('id', -1) ]) ]},
indent=None if request.is_xhr else 2,
cls=MongoEncoder),
200,
latest_headers)
@app.route('/exclude/<path:ident>')
def exclude(ident):
"""Exclude given post.
"""
db['tweets'].update( { 'id_str': ident },
{ '$set': { 'exclude': True } })
return redirect('/admin.html')
@app.route('/set_moderated/<int:length>')
def set_moderated_length(length):
"""Set moderated queue length
"""
global MODERATED_SIZE
if length > 2 and length < 100:
MODERATED_SIZE = length
return redirect('/admin.html')
@app.route('/include/<path:ident>')
def include(ident):
"""Include given post.
"""
db['tweets'].update( { 'id_str': ident },
{ '$set': { 'exclude': False } })
return redirect('/admin.html')
@app.route('/api/session/clear/', methods=['POST'])
def signout():
"""
Perform a sign out, clears the user's session.
"""
flask.session.clear()
return 'OK'
def absolute_url(name):
"""
Flask's url_for with added SERVER_NAME
"""
host = app.config['SERVER_NAME'] or ('localhost:' + str(DEFAULT_PORT))
url = flask.url_for(name)
return 'http://%s%s' % (host, url)
def devserver(extra_conf=None):
"""
Start a development server
"""
from werkzeug.wsgi import SharedDataMiddleware
# Load the "example" conf
root = app.root_path.split(os.path.dirname(__file__))[0]
conf = os.path.join(root, 'backend', 'var', 'conf', 'fontana-example.conf')
app.config.from_pyfile(conf)
if extra_conf:
app.config.from_pyfile(os.path.join(root, extra_conf))
# Serve the frontend files
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/': app.config['STATIC_DIR']
})
# Setup a index.html redirect for convenience sake.
app.route('/')(lambda: flask.redirect('index.html'))
# Run the development or production server
if app.config.get('PROD'):
app.run(debug=False, host='0.0.0.0', port=DEFAULT_PORT)
else:
app.run()
if __name__ == "__main__":
# This will get invoked when you run `python backend/src/fontana.py`
if len(sys.argv) == 2:
devserver(sys.argv[1])
else:
devserver()<|fim▁end|> | import bson
import os
from flask import request, redirect |
<|file_name|>usb_policy_allowed_devices.cc<|end_file_name|><|fim▁begin|>// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/usb/usb_policy_allowed_devices.h"
#include <string>
#include <vector>
#include "base/bind.h"
#include "base/strings/string_split.h"
#include "base/values.h"
#include "components/content_settings/core/common/pref_names.h"
#include "components/prefs/pref_service.h"
#include "services/device/public/mojom/usb_device.mojom.h"
#include "services/device/public/mojom/usb_manager.mojom.h"
#include "url/gurl.h"
namespace {
constexpr char kPrefDevicesKey[] = "devices";
constexpr char kPrefUrlsKey[] = "urls";
constexpr char kPrefVendorIdKey[] = "vendor_id";
constexpr char kPrefProductIdKey[] = "product_id";
} // namespace
UsbPolicyAllowedDevices::UsbPolicyAllowedDevices(PrefService* pref_service) {
pref_change_registrar_.Init(pref_service);
// Add an observer for |kManagedWebUsbAllowDevicesForUrls| to call
// CreateOrUpdateMap when the value is changed. The lifetime of
// |pref_change_registrar_| is managed by this class, therefore it is safe to
// use base::Unretained here.
pref_change_registrar_.Add(
prefs::kManagedWebUsbAllowDevicesForUrls,
base::BindRepeating(&UsbPolicyAllowedDevices::CreateOrUpdateMap,
base::Unretained(this)));
CreateOrUpdateMap();
}
UsbPolicyAllowedDevices::~UsbPolicyAllowedDevices() {}
bool UsbPolicyAllowedDevices::IsDeviceAllowed(
const url::Origin& origin,
const device::mojom::UsbDeviceInfo& device_info) {
return IsDeviceAllowed(
origin, std::make_pair(device_info.vendor_id, device_info.product_id));
}
bool UsbPolicyAllowedDevices::IsDeviceAllowed(
const url::Origin& origin,
const std::pair<int, int>& device_ids) {
// Search through each set of URL pair that match the given device. The
// keys correspond to the following URL pair sets:
// * (vendor_id, product_id): A set corresponding to the exact device.
// * (vendor_id, -1): A set corresponding to any device with |vendor_id|.
// * (-1, -1): A set corresponding to any device.
const std::pair<int, int> set_keys[] = {
std::make_pair(device_ids.first, device_ids.second),
std::make_pair(device_ids.first, -1), std::make_pair(-1, -1)};
for (const auto& key : set_keys) {
const auto entry = usb_device_ids_to_urls_.find(key);
if (entry == usb_device_ids_to_urls_.cend())
continue;
if (entry->second.find(origin) != entry->second.end())
return true;
}
return false;
}
void UsbPolicyAllowedDevices::CreateOrUpdateMap() {
const base::Value* pref_value = pref_change_registrar_.prefs()->Get(
prefs::kManagedWebUsbAllowDevicesForUrls);
usb_device_ids_to_urls_.clear();
// A policy has not been assigned.
if (!pref_value) {
return;
}
// The pref value has already been validated by the policy handler, so it is
// safe to assume that |pref_value| follows the policy template.
for (const auto& item : pref_value->GetList()) {
const base::Value* urls_list = item.FindKey(kPrefUrlsKey);
std::set<url::Origin> parsed_set;
// A urls item can contain a pair of URLs that are delimited by a comma. If
// it does not contain a second URL, set the embedding URL to an empty GURL
// to signify a wildcard embedded URL.
for (const auto& urls_value : urls_list->GetList()) {
std::vector<std::string> urls =
base::SplitString(urls_value.GetString(), ",", base::TRIM_WHITESPACE,
base::SPLIT_WANT_ALL);
// Skip invalid URL entries.
if (urls.empty())
continue;
auto requesting_origin = url::Origin::Create(GURL(urls[0]));
absl::optional<url::Origin> embedding_origin;
if (urls.size() == 2 && !urls[1].empty())
embedding_origin = url::Origin::Create(GURL(urls[1]));
// In order to be compatible with legacy (requesting,embedding) entries
// without breaking any access specified, we will grant the permission to
// the embedder if present because under permission delegation the
// top-level origin has the permission. If only the requesting origin is
// present, use that instead.
auto origin = embedding_origin.has_value() ? embedding_origin.value()
: requesting_origin;
<|fim▁hole|>
// Ignore items with empty parsed URLs.
if (parsed_set.empty())
continue;
// For each device entry in the map, create or update its respective URL
// set.
const base::Value* devices = item.FindKey(kPrefDevicesKey);
for (const auto& device : devices->GetList()) {
// A missing ID signifies a wildcard for that ID, so a sentinel value of
// -1 is assigned.
const base::Value* vendor_id_value = device.FindKey(kPrefVendorIdKey);
const base::Value* product_id_value = device.FindKey(kPrefProductIdKey);
int vendor_id = vendor_id_value ? vendor_id_value->GetInt() : -1;
int product_id = product_id_value ? product_id_value->GetInt() : -1;
DCHECK(vendor_id != -1 || product_id == -1);
auto key = std::make_pair(vendor_id, product_id);
usb_device_ids_to_urls_[key].insert(parsed_set.begin(), parsed_set.end());
}
}
}<|fim▁end|> | parsed_set.insert(std::move(origin));
} |
<|file_name|>p05_remove_search_criteria_reports.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
def execute():
try:
webnotes.conn.sql("""delete from `tabSearch Criteria` where ifnull(standard, 'No') = 'Yes'""")<|fim▁hole|> pass<|fim▁end|> | except Exception, e: |
<|file_name|>wrap-layout.d.ts<|end_file_name|><|fim▁begin|>declare module "ui/layouts/wrap-layout" {
import layout = require("ui/layouts/layout-base");
import dependencyObservable = require("ui/core/dependency-observable");
/**
* WrapLayout position children in rows or columns depending on orientation property
* until space is filled and then wraps them on new row or column.
*/
class WrapLayout extends layout.LayoutBase {
/**
* Represents the observable property backing the orientation property of each WrapLayout instance.
*/<|fim▁hole|> */
public static itemWidthProperty: dependencyObservable.Property;
/**
* Represents the observable property backing the itemHeight property of each WrapLayout instance.
*/
public static itemHeightProperty: dependencyObservable.Property;
/**
* Gets or sets the flow direction. Default value is horizontal.
* If orientation is horizontal items are arranged in rows, else items are arranged in columns.
*/
orientation: string;
/**
* Gets or sets the width used to measure and layout each child.
* Default value is Number.NaN which does not restrict children.
*/
itemWidth: number;
/**
* Gets or sets the height used to measure and layout each child.
* Default value is Number.NaN which does not restrict children.
*/
itemHeight: number;
}
}<|fim▁end|> | public static orientationProperty: dependencyObservable.Property;
/**
* Represents the observable property backing the itemWidth property of each WrapLayout instance. |
<|file_name|>test_worker_medium.py<|end_file_name|><|fim▁begin|># -*- Mode: Python; test-case-name:flumotion.test.test_worker_worker -*-
# vi:si:et:sw=4:sts=4:ts=4
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007,2008,2009 Fluendo, S.L.
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
#
# This file may be distributed and/or modified under the terms of
# the GNU Lesser General Public License version 2.1 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.LGPL" in the source distribution for more information.
#
# Headers in this file shall remain intact.
from twisted.internet import defer
from twisted.spread import pb
from flumotion.common import testsuite
from flumotion.test import realm
from flumotion.twisted import pb as fpb
from flumotion.worker import medium
class TestWorkerAvatar(fpb.PingableAvatar):
def __init__(self, avatarId, mind):
fpb.PingableAvatar.__init__(self, avatarId)
self.setMind(mind)
class TestWorkerRealm(realm.TestRealm):
deferredAvatar = None
deferredLogout = None
def getDeferredAvatar(self):
if self.deferredAvatar is None:
self.deferredAvatar = defer.Deferred()
return self.deferredAvatar
def getDeferredLogout(self):
if self.deferredLogout is None:
self.deferredLogout = defer.Deferred()
return self.deferredLogout
def requestAvatar(self, avatarId, keycard, mind, *ifaces):
avatar = TestWorkerAvatar(avatarId, mind)
self.getDeferredAvatar().callback(avatar)
return (pb.IPerspective, avatar,
lambda: self.avatarLogout(avatar))
def avatarLogout(self, avatar):
self.debug('worker logged out: %s', avatar.avatarId)
self.getDeferredLogout().callback(avatar)
class TestWorkerMedium(testsuite.TestCase):<|fim▁hole|> def tearDown(self):
return self.realm.shutdown()
def testConnect(self):
m = medium.WorkerMedium(None)
connectionInfo = self.realm.getConnectionInfo()
connectionInfo.authenticator.avatarId = 'foo'
m.startConnecting(connectionInfo)
def connected(avatar):
m.stopConnecting()
return self.realm.getDeferredLogout()
def disconnected(avatar):
self.assertEquals(avatar.avatarId, 'foo')
d = self.realm.getDeferredAvatar()
d.addCallback(connected)
d.addCallback(disconnected)
return d<|fim▁end|> |
def setUp(self):
self.realm = TestWorkerRealm()
|
<|file_name|>variables_2.js<|end_file_name|><|fim▁begin|>var searchData=
[
['slot_5fcount',['SLOT_COUNT',['../classmastermind_1_1_mastermind.html#ad4cfc8127641ff8dfe89d65ae232331c',1,'mastermind::Mastermind']]]<|fim▁hole|><|fim▁end|> | ]; |
<|file_name|>test_closeness_centrality.py<|end_file_name|><|fim▁begin|>"""
Tests for closeness centrality.
"""
import pytest
import networkx as nx
from networkx.testing import almost_equal
class TestClosenessCentrality:
@classmethod
def setup_class(cls):
cls.K = nx.krackhardt_kite_graph()
cls.P3 = nx.path_graph(3)
cls.P4 = nx.path_graph(4)
cls.K5 = nx.complete_graph(5)
cls.C4 = nx.cycle_graph(4)
cls.T = nx.balanced_tree(r=2, h=2)
cls.Gb = nx.Graph()
cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
F = nx.florentine_families_graph()
cls.F = F
cls.LM = nx.les_miserables_graph()
# Create random undirected, unweighted graph for testing incremental version
cls.undirected_G = nx.fast_gnp_random_graph(n=100, p=0.6, seed=123)
cls.undirected_G_cc = nx.closeness_centrality(cls.undirected_G)
def test_wf_improved(self):
G = nx.union(self.P4, nx.path_graph([4, 5, 6]))
c = nx.closeness_centrality(G)
cwf = nx.closeness_centrality(G, wf_improved=False)
res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25, 4: 0.222, 5: 0.333, 6: 0.222}
wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5, 4: 0.667, 5: 1.0, 6: 0.667}
for n in G:
assert almost_equal(c[n], res[n], places=3)
assert almost_equal(cwf[n], wf_res[n], places=3)
def test_digraph(self):
G = nx.path_graph(3, create_using=nx.DiGraph())
c = nx.closeness_centrality(G)
cr = nx.closeness_centrality(G.reverse())
d = {0: 0.0, 1: 0.500, 2: 0.667}
dr = {0: 0.667, 1: 0.500, 2: 0.0}
for n in sorted(self.P3):
assert almost_equal(c[n], d[n], places=3)
assert almost_equal(cr[n], dr[n], places=3)
def test_k5_closeness(self):
c = nx.closeness_centrality(self.K5)
d = {0: 1.000, 1: 1.000, 2: 1.000, 3: 1.000, 4: 1.000}
for n in sorted(self.K5):
assert almost_equal(c[n], d[n], places=3)
def test_p3_closeness(self):
c = nx.closeness_centrality(self.P3)
d = {0: 0.667, 1: 1.000, 2: 0.667}
for n in sorted(self.P3):
assert almost_equal(c[n], d[n], places=3)
def test_krackhardt_closeness(self):
c = nx.closeness_centrality(self.K)
d = {
0: 0.529,
1: 0.529,
2: 0.500,
3: 0.600,
4: 0.500,
5: 0.643,
6: 0.643,
7: 0.600,
8: 0.429,
9: 0.310,
}
for n in sorted(self.K):
assert almost_equal(c[n], d[n], places=3)
def test_florentine_families_closeness(self):
c = nx.closeness_centrality(self.F)
d = {
"Acciaiuoli": 0.368,
"Albizzi": 0.483,
"Barbadori": 0.4375,
"Bischeri": 0.400,
"Castellani": 0.389,
"Ginori": 0.333,
"Guadagni": 0.467,
"Lamberteschi": 0.326,
"Medici": 0.560,
"Pazzi": 0.286,
"Peruzzi": 0.368,
"Ridolfi": 0.500,
"Salviati": 0.389,
"Strozzi": 0.4375,
"Tornabuoni": 0.483,
}
for n in sorted(self.F):
assert almost_equal(c[n], d[n], places=3)
def test_les_miserables_closeness(self):
c = nx.closeness_centrality(self.LM)
d = {
"Napoleon": 0.302,
"Myriel": 0.429,
"MlleBaptistine": 0.413,
"MmeMagloire": 0.413,
"CountessDeLo": 0.302,
"Geborand": 0.302,
"Champtercier": 0.302,
"Cravatte": 0.302,
"Count": 0.302,
"OldMan": 0.302,
"Valjean": 0.644,
"Labarre": 0.394,
"Marguerite": 0.413,
"MmeDeR": 0.394,
"Isabeau": 0.394,
"Gervais": 0.394,
"Listolier": 0.341,
"Tholomyes": 0.392,
"Fameuil": 0.341,
"Blacheville": 0.341,
"Favourite": 0.341,
"Dahlia": 0.341,
"Zephine": 0.341,
"Fantine": 0.461,
"MmeThenardier": 0.461,
"Thenardier": 0.517,
"Cosette": 0.478,
"Javert": 0.517,
"Fauchelevent": 0.402,
"Bamatabois": 0.427,
"Perpetue": 0.318,
"Simplice": 0.418,
"Scaufflaire": 0.394,
"Woman1": 0.396,
"Judge": 0.404,
"Champmathieu": 0.404,
"Brevet": 0.404,
"Chenildieu": 0.404,
"Cochepaille": 0.404,
"Pontmercy": 0.373,
"Boulatruelle": 0.342,
"Eponine": 0.396,
"Anzelma": 0.352,
"Woman2": 0.402,
"MotherInnocent": 0.398,
"Gribier": 0.288,
"MmeBurgon": 0.344,
"Jondrette": 0.257,
"Gavroche": 0.514,
"Gillenormand": 0.442,
"Magnon": 0.335,
"MlleGillenormand": 0.442,
"MmePontmercy": 0.315,
"MlleVaubois": 0.308,
"LtGillenormand": 0.365,
"Marius": 0.531,
"BaronessT": 0.352,
"Mabeuf": 0.396,
"Enjolras": 0.481,
"Combeferre": 0.392,
"Prouvaire": 0.357,
"Feuilly": 0.392,
"Courfeyrac": 0.400,
"Bahorel": 0.394,
"Bossuet": 0.475,
"Joly": 0.394,
"Grantaire": 0.358,
"MotherPlutarch": 0.285,
"Gueulemer": 0.463,
"Babet": 0.463,
"Claquesous": 0.452,
"Montparnasse": 0.458,
"Toussaint": 0.402,
"Child1": 0.342,
"Child2": 0.342,
"Brujon": 0.380,
"MmeHucheloup": 0.353,
}
for n in sorted(self.LM):
assert almost_equal(c[n], d[n], places=3)
def test_weighted_closeness(self):
edges = [
("s", "u", 10),
("s", "x", 5),
("u", "v", 1),
("u", "x", 2),
("v", "y", 1),
("x", "u", 3),
("x", "v", 5),
("x", "y", 2),
("y", "s", 7),
("y", "v", 6),
]
XG = nx.Graph()
XG.add_weighted_edges_from(edges)
c = nx.closeness_centrality(XG, distance="weight")
d = {"y": 0.200, "x": 0.286, "s": 0.138, "u": 0.235, "v": 0.200}
for n in sorted(XG):
assert almost_equal(c[n], d[n], places=3)
#<|fim▁hole|> @staticmethod
def pick_add_edge(g):
u = nx.utils.arbitrary_element(g)
possible_nodes = set(g.nodes())
neighbors = list(g.neighbors(u)) + [u]
possible_nodes.difference_update(neighbors)
v = nx.utils.arbitrary_element(possible_nodes)
return (u, v)
@staticmethod
def pick_remove_edge(g):
u = nx.utils.arbitrary_element(g)
possible_nodes = list(g.neighbors(u))
v = nx.utils.arbitrary_element(possible_nodes)
return (u, v)
def test_directed_raises(self):
with pytest.raises(nx.NetworkXNotImplemented):
dir_G = nx.gn_graph(n=5)
prev_cc = None
edge = self.pick_add_edge(dir_G)
insert = True
nx.incremental_closeness_centrality(dir_G, edge, prev_cc, insert)
def test_wrong_size_prev_cc_raises(self):
with pytest.raises(nx.NetworkXError):
G = self.undirected_G.copy()
edge = self.pick_add_edge(G)
insert = True
prev_cc = self.undirected_G_cc.copy()
prev_cc.pop(0)
nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
def test_wrong_nodes_prev_cc_raises(self):
with pytest.raises(nx.NetworkXError):
G = self.undirected_G.copy()
edge = self.pick_add_edge(G)
insert = True
prev_cc = self.undirected_G_cc.copy()
num_nodes = len(prev_cc)
prev_cc.pop(0)
prev_cc[num_nodes] = 0.5
nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
def test_zero_centrality(self):
G = nx.path_graph(3)
prev_cc = nx.closeness_centrality(G)
edge = self.pick_remove_edge(G)
test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insertion=False)
G.remove_edges_from([edge])
real_cc = nx.closeness_centrality(G)
shared_items = set(test_cc.items()) & set(real_cc.items())
assert len(shared_items) == len(real_cc)
assert 0 in test_cc.values()
def test_incremental(self):
# Check that incremental and regular give same output
G = self.undirected_G.copy()
prev_cc = None
for i in range(5):
if i % 2 == 0:
# Remove an edge
insert = False
edge = self.pick_remove_edge(G)
else:
# Add an edge
insert = True
edge = self.pick_add_edge(G)
# start = timeit.default_timer()
test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
# inc_elapsed = (timeit.default_timer() - start)
# print(f"incremental time: {inc_elapsed}")
if insert:
G.add_edges_from([edge])
else:
G.remove_edges_from([edge])
# start = timeit.default_timer()
real_cc = nx.closeness_centrality(G)
# reg_elapsed = (timeit.default_timer() - start)
# print(f"regular time: {reg_elapsed}")
# Example output:
# incremental time: 0.208
# regular time: 0.276
# incremental time: 0.00683
# regular time: 0.260
# incremental time: 0.0224
# regular time: 0.278
# incremental time: 0.00804
# regular time: 0.208
# incremental time: 0.00947
# regular time: 0.188
assert set(test_cc.items()) == set(real_cc.items())
prev_cc = test_cc<|fim▁end|> | # Tests for incremental closeness centrality.
# |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | import { isTypedArray } from "../index";
export = isTypedArray; |
<|file_name|>plotting.py<|end_file_name|><|fim▁begin|>import numpy as np
import matplotlib.pyplot as plt
from stimulus import *
from myintegrator import *
from functions import *
import matplotlib.gridspec as gridspec
import cPickle as pickle
#-------------------------------------------------------------------
#-------------------------------------------------------------------
#-----------------Stimulation of Populations------------------------
#-------------------------------------------------------------------
# settin`g up the simulation
#times = 100
#delta = 50
#period = 30
patterns=np.identity(n)
patterns=[patterns[:,i] for i in range(n)]
mystim=stimulus(patterns,lagStim,delta,period,times)
mystim.inten=amp
#integrator
npts=int(np.floor(delay/dt)+1) # points delay
tmax=times*(lagStim+n*(period+delta))+100.+mystim.delay_begin
thetmax=tmax+40000
#t = np.linspace(0,thetmax,100000)
u,uI,connectivity,WEI,t = pickle.load(open('dyn_stimulation_SA.p','rb'))
#-----------------------------------------------------------------------------------------
#-------------------------------- Dynamics-----------------------------------------------
#----------------------------------------------------------------------------------------
#initial conditions
tmaxdyn=500
mystim.inten=0.
theintegrator=myintegrator(delay,dt,n,tmaxdyn)
theintegrator.fast=False
#integration
u_ret,uI_ret,connectivity_ret,WEI_ret,t_ret = pickle.load(open('dyn_retrieval_SA.p','rb'))
u_ret_PA,uI_ret_PA,connectivity_ret_PA,WEI_ret_PA,t_ret_PA = pickle.load(open('dyn_retrieval_PA.p','rb'))
#-------------------------------------------------------------------
#-----------------Stimulation of Populations------------------------
#-------------------------------------------------------------------
rc={'axes.labelsize': 32, 'font.size': 30, 'legend.fontsize': 25.0, 'axes.titlesize': 35}
plt.rcParams.update(**rc)
plt.rcParams['image.cmap'] = 'jet'
fig = plt.figure(figsize=(19, 11))
gs = gridspec.GridSpec(2, 2)#height_ratios=[3,3,2])
gs.update(wspace=0.44,hspace=0.03)
gs0 = gridspec.GridSpec(2, 2)
gs0.update(wspace=0.05,hspace=0.4,left=0.54,right=1.,top=0.88,bottom=0.1106)
#gs1.update(wspace=0.05,hspace=0.4,left=0.1245,right=1.,top=0.21,bottom=0.05)
# Excitatory and Inhibitory weights
ax1A = plt.subplot(gs[0,0])
ax1B = plt.subplot(gs[1,0])
#sequence
axSA = plt.subplot(gs0[1,0])
axPA = plt.subplot(gs0[1,1])
#stimulation
ax2B= plt.subplot(gs0[0,0])
ax2C= plt.subplot(gs0[0,1])
colormap = plt.cm.Accent
ax2B.set_prop_cycle(plt.cycler('color',[colormap(i) for i in np.linspace(0, 0.9,n)]))
ax2B.plot(t,phi(u[:,:],theta,uc),lw=3)
mystim.inten=.1
elstim=np.array([sum(mystim.stim(x)) for x in t])
ax2B.plot(t,elstim,'k',lw=3)
ax2B.fill_between(t,np.zeros(len(t)),elstim,alpha=0.5,edgecolor='k', facecolor='darkgrey')
ax2B.set_ylim([0,1.2])
ax2B.set_xlim([0,600])
ax2B.set_yticks([0.5,1])
ax2B.set_xticks([0,200,400])
ax2B.set_xticklabels([0.,.2,.4])
ax2B.set_xlabel('Time (s)')
ax2B.set_ylabel('Rate')<|fim▁hole|>
ax2C.set_prop_cycle(plt.cycler('color',[colormap(i) for i in np.linspace(0, 0.9,n)]))
ax2C.plot(t,phi(u[:,:],theta,uc),lw=3)
mystim.inten=.1
elstim=np.array([sum(mystim.stim(x)) for x in t])
ax2C.plot(t,elstim,'k',lw=3)
ax2C.fill_between(t,np.zeros(len(t)),elstim,alpha=0.5,edgecolor='k', facecolor='darkgrey')
ax2C.set_xlim([89475,90075])
ax2C.set_xticks([89500,89700,89900])
ax2C.set_xticklabels([89.5,89.7,89.9])
ax2C.set_ylim([0,1.2])
ax2C.set_yticks([])
ax2C.set_xlabel('Time (s)')
#ax2C.set_ylabel('Rate')
#----------------------------------------------------------------------
#------------Synaptic Weights------------------------------------------
#----------------------------------------------------------------------
for i in range(10):
ax1A.plot(t,connectivity[:,i,i],'c',lw=3)
for i in range(0,9):
ax1A.plot(t,connectivity[:,i+1,i],'y',lw=3)
for i in range(8):
ax1A.plot(t,connectivity[:,i+2,i],'g',lw=3)
for i in range(9):
ax1A.plot(t,connectivity[:,i,i+1],'r',lw=3)
for i in range(8):
ax1A.plot(t,connectivity[:,i,i+2],'b',lw=3)
ax1A.set_xticks([])
ax1A.axvline(x=tmax,ymin=0,ymax=2.,linewidth=2,ls='--',color='gray',alpha=0.7)
#ax1A.set_xticklabels([0,50,100,150])
ax1A.set_ylim([0,1.8])
ax1A.set_xlim([0,250000])
ax1A.set_yticks([0,0.5,1.,1.5])
#ax1A.set_xlabel('Time (s)')
ax1A.set_ylabel('Synaptic Weights')
ax1A.set_title('(A)',y=1.04)
#------------------------------------------------------------------------
#-------------Homeostatic Variable --------------------------------------
#------------------------------------------------------------------------
ax1B.set_prop_cycle(plt.cycler('color',[colormap(i) for i in np.linspace(0, 0.9,n)]))
ax1B.plot(t,WEI[:],lw=3)
ax1B.axvline(x=tmax,ymin=0,ymax=2.,linewidth=2,ls='--',color='gray',alpha=0.7)
ax1B.set_ylim([0.,3.4])
ax1B.set_yticks([0.,1.,2.,3.])
ax1B.set_xlim([0,250000])
ax1B.set_xticks([0,50000,100000,150000,200000,250000])
ax1B.set_xticklabels([0,50,100,150,200,250])
ax1B.set_xlabel('Time (s)')
ax1B.set_ylabel(r'$W_{EI}$')
#plot sequence
axSA.set_prop_cycle(plt.cycler('color',[colormap(i) for i in np.linspace(0, 0.9,n)]))
axSA.plot(t_ret,phi(u_ret[:,:],theta,uc),lw=5)
axSA.set_ylim([0,1.2])
axSA.set_xlim([0,370])
axSA.set_xticks([0,100,200,300])
axSA.set_yticks([0.5,1])
axSA.set_xlabel('Time (ms)')
axSA.set_ylabel('Rate')
#axSA.set_title('(C)',y=1.04)
axSA.set_title('(C)',x=1.028,y=1.04)
# plot PA
axPA.set_prop_cycle(plt.cycler('color',[colormap(i) for i in np.linspace(0, 0.9,n)]))
axPA.plot(t_ret_PA,phi(u_ret_PA[:,:],theta,uc),lw=5)
axPA.set_ylim([0,1.2])
axPA.set_xlim([0,370])
axPA.set_xticks([0,100,200,300])
axPA.set_yticks([])
axPA.set_xlabel('Time (ms)')
#plt.show()
plt.savefig('fig6.pdf', bbox_inches='tight')<|fim▁end|> | ax2B.set_title('(B)',x=1.028,y=1.04) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import config
from . import state
from . import vrrp
class address(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-interfaces - based on the path /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: The list of configured IPv4 addresses on the interface.
"""
__slots__ = ("_path_helper", "_extmethods", "__ip", "__config", "__state", "__vrrp")
_yang_name = "address"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__ip = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="ip",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="leafref",
is_config=True,
)
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
self.__vrrp = YANGDynClass(
base=vrrp.vrrp,
is_container="container",
yang_name="vrrp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"interfaces",
"interface",
"subinterfaces",
"subinterface",
"ipv4",
"addresses",
"address",
]
def _get_ip(self):
"""
Getter method for ip, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/ip (leafref)
YANG Description: References the configured IP address
"""
return self.__ip
def _set_ip(self, v, load=False):
"""
Setter method for ip, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/ip (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip() directly.
YANG Description: References the configured IP address
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="ip",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ip must be of a type compatible with leafref""",
"defined-type": "leafref",<|fim▁hole|>
self.__ip = t
if hasattr(self, "_set"):
self._set()
def _unset_ip(self):
self.__ip = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="ip",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="leafref",
is_config=True,
)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/config (container)
YANG Description: Configuration data for each configured IPv4
address on the interface
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration data for each configured IPv4
address on the interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/state (container)
YANG Description: Operational state data for each IPv4 address
configured on the interface
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state data for each IPv4 address
configured on the interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
def _get_vrrp(self):
"""
Getter method for vrrp, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/vrrp (container)
YANG Description: Enclosing container for VRRP groups handled by this
IP interface
"""
return self.__vrrp
def _set_vrrp(self, v, load=False):
"""
Setter method for vrrp, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/vrrp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_vrrp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vrrp() directly.
YANG Description: Enclosing container for VRRP groups handled by this
IP interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=vrrp.vrrp,
is_container="container",
yang_name="vrrp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """vrrp must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=vrrp.vrrp, is_container='container', yang_name="vrrp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""",
}
)
self.__vrrp = t
if hasattr(self, "_set"):
self._set()
def _unset_vrrp(self):
self.__vrrp = YANGDynClass(
base=vrrp.vrrp,
is_container="container",
yang_name="vrrp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
ip = __builtin__.property(_get_ip, _set_ip)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
vrrp = __builtin__.property(_get_vrrp, _set_vrrp)
_pyangbind_elements = OrderedDict(
[("ip", ip), ("config", config), ("state", state), ("vrrp", vrrp)]
)<|fim▁end|> | "generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)""",
}
) |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![crate_type="staticlib"]
#![feature(alloc)]
#![feature(allocator)]
#![feature(arc_counts)]
#![feature(augmented_assignments)]
#![feature(asm)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(core_str_ext)]
#![feature(core_slice_ext)]
#![feature(fnbox)]
#![feature(fundamental)]
#![feature(lang_items)]
#![feature(op_assign_traits)]
#![feature(unboxed_closures)]
#![feature(unsafe_no_drop_flag)]
#![feature(unwind_attributes)]
#![feature(vec_push_all)]
#![feature(zero_one)]
#![feature(collections_range)]
#![feature(old_wrapping)]
#![no_std]
#[macro_use]
extern crate alloc;
#[macro_use]
extern crate collections;
extern crate system;
use acpi::Acpi;
use alloc::boxed::Box;
use collections::string::{String, ToString};
use collections::vec::Vec;
use core::cell::UnsafeCell;
use core::{ptr, mem, usize};
use core::slice::SliceExt;
use common::event::{self, EVENT_KEY, EventOption};
use common::memory;
use common::paging::Page;
use common::time::Duration;
use drivers::pci;
use drivers::io::{Io, Pio};
use drivers::ps2::*;
use drivers::rtc::*;
use drivers::serial::*;
use env::Environment;
use graphics::display;
use scheduler::{Context, Regs, TSS};
use scheduler::context::context_switch;
use schemes::Url;
use schemes::context::*;
use schemes::debug::*;
use schemes::display::*;
use schemes::interrupt::*;
use schemes::memory::*;
use schemes::test::*;
use syscall::execute::execute;
use syscall::handle::*;
pub use system::externs::*;
/// Common std-like functionality
#[macro_use]
pub mod common;
#[macro_use]
pub mod macros;
/// Allocation
pub mod alloc_system;
/// ACPI
pub mod acpi;
/// Disk drivers
pub mod disk;
/// Various drivers
pub mod drivers;
/// Environment
pub mod env;
/// Filesystems
pub mod fs;
/// Various graphical methods
pub mod graphics;
/// Panic
pub mod panic;
/// Schemes
pub mod schemes;
/// Scheduling
pub mod scheduler;
/// Sync primatives
pub mod sync;
/// System calls
pub mod syscall;
/// USB input/output
pub mod usb;
pub static mut TSS_PTR: Option<&'static mut TSS> = None;
pub static mut ENV_PTR: Option<&'static mut Environment> = None;
pub fn env() -> &'static Environment {
unsafe {
match ENV_PTR {
Some(&mut ref p) => p,
None => unreachable!(),
}
}
}
/// Pit duration
static PIT_DURATION: Duration = Duration {
secs: 0,
nanos: 2250286,
};
/// Idle loop (active while idle)
unsafe fn idle_loop() {
loop {
asm!("cli" : : : : "intel", "volatile");
let mut halt = true;
for i in env().contexts.lock().iter().skip(1) {
if i.interrupted {
halt = false;
break;
}
}
if halt {
asm!("sti
hlt"
:
:
:
: "intel", "volatile");
} else {
asm!("sti"
:
:
:
: "intel", "volatile");
}
context_switch(false);
}
}
/// Event poll loop
fn poll_loop() {
loop {
env().on_poll();
unsafe { context_switch(false) };
}
}
/// Event loop
fn event_loop() {
{
let mut console = env().console.lock();
console.instant = false;
}
let mut cmd = String::new();
loop {
loop {
let mut console = env().console.lock();
match env().events.lock().pop_front() {
Some(event) => {
if console.draw {
match event.to_option() {
EventOption::Key(key_event) => {
if key_event.pressed {
match key_event.scancode {
event::K_F2 => {
console.draw = false;
}
event::K_BKSP => if !cmd.is_empty() {
console.write(&[8]);
cmd.pop();
},
_ => match key_event.character {
'\0' => (),
'\n' => {
console.command = Some(cmd.clone());
cmd.clear();
console.write(&[10]);
}
_ => {
cmd.push(key_event.character);
console.write(&[key_event.character as u8]);
}
},
}
}
}
_ => (),
}
} else {
if event.code == EVENT_KEY && event.b as u8 == event::K_F1 && event.c > 0 {
console.draw = true;
console.redraw = true;
} else {
// TODO: Magical orbital hack
}
}
}
None => break,
}
}
{
let mut console = env().console.lock();
console.instant = false;
if console.draw && console.redraw {
console.redraw = false;
console.display.flip();
}
}
unsafe { context_switch(false) };
}
}
static BSS_TEST_ZERO: usize = 0;
static BSS_TEST_NONZERO: usize = usize::MAX;
/// Initialize kernel
unsafe fn init(tss_data: usize) {
// Test
assume!(true);
// Zero BSS, this initializes statics that are set to 0
{
extern {
static mut __bss_start: u8;
static mut __bss_end: u8;
}
let start_ptr = &mut __bss_start;
let end_ptr = &mut __bss_end;
if start_ptr as *const _ as usize <= end_ptr as *const _ as usize {
let size = end_ptr as *const _ as usize - start_ptr as *const _ as usize;
memset(start_ptr, 0, size);
}
assert_eq!(BSS_TEST_ZERO, 0);
assert_eq!(BSS_TEST_NONZERO, usize::MAX);
}
// Setup paging, this allows for memory allocation
Page::init();
memory::cluster_init();
// Unmap first page to catch null pointer errors (after reading memory map)
Page::new(0).unmap();
TSS_PTR = Some(&mut *(tss_data as *mut TSS));
ENV_PTR = Some(&mut *Box::into_raw(Environment::new()));
match ENV_PTR {
Some(ref mut env) => {
env.contexts.lock().push(Context::root());
env.console.lock().draw = true;
debug!("Redox {} bits\n", mem::size_of::<usize>() * 8);
if let Some(acpi) = Acpi::new() {
env.schemes.push(UnsafeCell::new(acpi));
}
*(env.clock_realtime.lock()) = Rtc::new().time();
env.schemes.push(UnsafeCell::new(Ps2::new()));
env.schemes.push(UnsafeCell::new(Serial::new(0x3F8, 0x4)));
pci::pci_init(env);
env.schemes.push(UnsafeCell::new(DebugScheme::new()));
env.schemes.push(UnsafeCell::new(box DisplayScheme));
env.schemes.push(UnsafeCell::new(box ContextScheme));
env.schemes.push(UnsafeCell::new(box InterruptScheme));
env.schemes.push(UnsafeCell::new(box MemoryScheme));
env.schemes.push(UnsafeCell::new(box TestScheme));
Context::spawn("kpoll".to_string(),
box move || {
poll_loop();
});
Context::spawn("kevent".to_string(),
box move || {
event_loop();
});
env.contexts.lock().enabled = true;
Context::spawn("kinit".to_string(),
box move || {
{
let wd_c = "file:/\0";
do_sys_chdir(wd_c.as_ptr());
let stdio_c = "debug:\0";
do_sys_open(stdio_c.as_ptr(), 0);
do_sys_open(stdio_c.as_ptr(), 0);
do_sys_open(stdio_c.as_ptr(), 0);
}
execute(Url::from_str("file:/apps/init/main.bin"), Vec::new());
debug!("INIT: Failed to execute\n");
loop {
context_switch(false);
}
});
},
None => unreachable!(),
}
}
#[cold]
#[inline(never)]
#[no_mangle]<|fim▁hole|> {
let contexts = ::env().contexts.lock();
if let Some(context) = contexts.current() {
debugln!("PID {}: {}", context.pid, context.name);
}
}
debugln!(" INT {:X}: {}", interrupt, $name);
debugln!(" CS: {:08X} IP: {:08X} FLG: {:08X}", regs.cs, regs.ip, regs.flags);
debugln!(" SS: {:08X} SP: {:08X} BP: {:08X}", regs.ss, regs.sp, regs.bp);
debugln!(" AX: {:08X} BX: {:08X} CX: {:08X} DX: {:08X}", regs.ax, regs.bx, regs.cx, regs.dx);
debugln!(" DI: {:08X} SI: {:08X}", regs.di, regs.di);
let cr0: usize;
let cr2: usize;
let cr3: usize;
let cr4: usize;
unsafe {
asm!("mov $0, cr0" : "=r"(cr0) : : : "intel", "volatile");
asm!("mov $0, cr2" : "=r"(cr2) : : : "intel", "volatile");
asm!("mov $0, cr3" : "=r"(cr3) : : : "intel", "volatile");
asm!("mov $0, cr4" : "=r"(cr4) : : : "intel", "volatile");
}
debugln!(" CR0: {:08X} CR2: {:08X} CR3: {:08X} CR4: {:08X}", cr0, cr2, cr3, cr4);
let sp = regs.sp as *const u32;
for y in -15..16 {
debug!(" {:>3}:", y * 8 * 4);
for x in 0..8 {
debug!(" {:08X}", unsafe { ptr::read(sp.offset(-(x + y * 8))) });
}
debug!("\n");
}
})
};
macro_rules! exception {
($name:expr) => ({
exception_inner!($name);
loop {
do_sys_exit(usize::MAX);
}
})
};
macro_rules! exception_error {
($name:expr) => ({
let error = regs.ip;
regs.ip = regs.cs;
regs.cs = regs.flags;
regs.flags = regs.sp;
regs.sp = regs.ss;
regs.ss = 0;
//regs.ss = regs.error;
exception_inner!($name);
debugln!(" ERR: {:08X}", error);
loop {
do_sys_exit(usize::MAX);
}
})
};
if interrupt >= 0x20 && interrupt < 0x30 {
if interrupt >= 0x28 {
Pio::<u8>::new(0xA0).write(0x20);
}
Pio::<u8>::new(0x20).write(0x20);
}
//Do not catch init interrupt
if interrupt < 0xFF {
env().interrupts.lock()[interrupt as usize] += 1;
}
match interrupt {
0x20 => {
{
let mut clock_monotonic = env().clock_monotonic.lock();
*clock_monotonic = *clock_monotonic + PIT_DURATION;
}
{
let mut clock_realtime = env().clock_realtime.lock();
*clock_realtime = *clock_realtime + PIT_DURATION;
}
let switch = {
let mut contexts = ::env().contexts.lock();
if let Some(mut context) = contexts.current_mut() {
context.slices -= 1;
context.slice_total += 1;
context.slices == 0
} else {
false
}
};
if switch {
unsafe { context_switch(true) };
}
}
i @ 0x21 ... 0x2F => env().on_irq(i as u8 - 0x20),
0x80 => if !syscall_handle(regs) {
exception!("Unknown Syscall");
},
0xFF => {
unsafe {
init(regs.ax);
idle_loop();
}
},
0x0 => exception!("Divide by zero exception"),
0x1 => exception!("Debug exception"),
0x2 => exception!("Non-maskable interrupt"),
0x3 => exception!("Breakpoint exception"),
0x4 => exception!("Overflow exception"),
0x5 => exception!("Bound range exceeded exception"),
0x6 => exception!("Invalid opcode exception"),
0x7 => exception!("Device not available exception"),
0x8 => exception_error!("Double fault"),
0x9 => exception!("Coprocessor Segment Overrun"), // legacy
0xA => exception_error!("Invalid TSS exception"),
0xB => exception_error!("Segment not present exception"),
0xC => exception_error!("Stack-segment fault"),
0xD => exception_error!("General protection fault"),
0xE => exception_error!("Page fault"),
0x10 => exception!("x87 floating-point exception"),
0x11 => exception_error!("Alignment check exception"),
0x12 => exception!("Machine check exception"),
0x13 => exception!("SIMD floating-point exception"),
0x14 => exception!("Virtualization exception"),
0x1E => exception_error!("Security exception"),
_ => exception!("Unknown Interrupt"),
}
}<|fim▁end|> | /// Take regs for kernel calls and exceptions
pub extern "cdecl" fn kernel(interrupt: usize, mut regs: &mut Regs) {
macro_rules! exception_inner {
($name:expr) => ({ |
<|file_name|>types.py<|end_file_name|><|fim▁begin|>from typing import Any, Dict, List, Union
from flask import abort, flash, g, render_template, url_for
from flask_babel import format_number, lazy_gettext as _
from werkzeug.utils import redirect
from werkzeug.wrappers import Response
from openatlas import app
from openatlas.database.connect import Transaction
from openatlas.forms.form import build_move_form
from openatlas.models.entity import Entity
from openatlas.models.node import Node
from openatlas.util.table import Table<|fim▁hole|> items = []
for id_ in nodes:
item = g.nodes[id_]
count_subs = f' ({format_number(item.count_subs)})' \
if item.count_subs else ''
items.append({
'id': item.id,
'href': url_for('entity_view', id_=item.id),
'a_attr': {'href': url_for('entity_view', id_=item.id)},
'text':
item.name.replace("'", "'") +
f' {format_number(item.count)}{count_subs}',
'children': walk_tree(item.subs)})
return items
@app.route('/types')
@required_group('readonly')
def node_index() -> str:
nodes: Dict[str, Dict[Entity, str]] = \
{'standard': {}, 'custom': {}, 'places': {}, 'value': {}}
for node in g.nodes.values():
if node.root:
continue
type_ = 'custom'
if node.class_.name == 'administrative_unit':
type_ = 'places'
elif node.standard:
type_ = 'standard'
elif node.value_type:
type_ = 'value'
nodes[type_][node] = render_template(
'forms/tree_select_item.html',
name=sanitize(node.name),
data=walk_tree(Node.get_nodes(node.name)))
return render_template(
'types/index.html',
nodes=nodes,
title=_('types'),
crumbs=[_('types')])
@app.route('/types/delete/<int:id_>', methods=['POST', 'GET'])
@required_group('editor')
def node_delete(id_: int) -> Response:
node = g.nodes[id_]
root = g.nodes[node.root[-1]] if node.root else None
if node.standard or node.subs or node.count or (root and root.locked):
abort(403)
node.delete()
flash(_('entity deleted'), 'info')
return redirect(
url_for('entity_view', id_=root.id) if root else url_for('node_index'))
@app.route('/types/move/<int:id_>', methods=['POST', 'GET'])
@required_group('editor')
def node_move_entities(id_: int) -> Union[str, Response]:
node = g.nodes[id_]
root = g.nodes[node.root[-1]]
if root.value_type: # pragma: no cover
abort(403)
form = build_move_form(node)
if form.validate_on_submit():
Transaction.begin()
Node.move_entities(
node,
getattr(form, str(root.id)).data,
form.checkbox_values.data)
Transaction.commit()
flash(_('Entities were updated'), 'success')
if node.class_.name == 'administrative_unit':
tab = 'places'
elif root.standard:
tab = 'standard'
elif node.value_type: # pragma: no cover
tab = 'value'
else:
tab = 'custom'
return redirect(
f"{url_for('node_index')}#menu-tab-{tab}_collapse-{root.id}")
getattr(form, str(root.id)).data = node.id
return render_template(
'types/move.html',
table=Table(
header=['#', _('selection')],
rows=[[item, item.label.text] for item in form.selection]),
root=root,
form=form,
entity=node,
crumbs=[
[_('types'), url_for('node_index')],
root,
node,
_('move entities')])
@app.route('/types/untyped/<int:id_>')
@required_group('editor')
def show_untyped_entities(id_: int) -> str:
hierarchy = g.nodes[id_]
table = Table(['name', 'class', 'first', 'last', 'description'])
for entity in Node.get_untyped(hierarchy.id):
table.rows.append([
link(entity),
entity.class_.label,
entity.first,
entity.last,
entity.description])
return render_template(
'table.html',
entity=hierarchy,
table=table,
crumbs=[
[_('types'),
url_for('node_index')],
link(hierarchy),
_('untyped entities')])<|fim▁end|> | from openatlas.util.util import link, required_group, sanitize
def walk_tree(nodes: List[int]) -> List[Dict[str, Any]]: |
<|file_name|>prob11_DiamondInTheRough-lee.js<|end_file_name|><|fim▁begin|>// Diamond-in-the-Rough
// Code Wars program written in JavaScript for the RingoJS environment
//
<|fim▁hole|>// The MIT License (MIT)
//
// Copyright (c) 2015 Lee Jenkins
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
var stdin = require("system").stdin;
var stdout = require("system").stdout;
"use strict";
(function SpiralTriangles() {
function run() {
var inputData = readDiamondInfo();
while( inputData.size > 0 ) {
printDiamonds( inputData );
inputData = readDiamondInfo();
}
};
function printDiamonds( inputData ) {
var midSize = inputData.size / 2;
for( var gridRow=0; gridRow<inputData.rows; ++gridRow ) {
for( var diamondRow=0; diamondRow<inputData.size; ++diamondRow ) {
var line = "";
for( var gridCol=0; gridCol<inputData.cols; ++gridCol ) {
for( var diamondCol=0; diamondCol<inputData.size; ++diamondCol ) {
var c = "#";
if( diamondRow < midSize ) { // top half
if( diamondCol >= (midSize-(diamondRow+1)) && diamondCol < midSize ) {
c = "/";
}
else if( diamondCol >= midSize && diamondCol <= (midSize+diamondRow) ) {
c = "\\";
}
}
else { // bottom half
if( diamondCol >= (diamondRow-midSize) && diamondCol < midSize ) {
c = "\\";
}
else if( diamondCol >= midSize && diamondCol < (inputData.size+midSize-diamondRow) ) {
c = "/";
}
}
line += c;
}
}
print( line );
}
}
};
function readDiamondInfo() {
var tokens = stdin.readLine().split(/\s+/);
return {
size: parseInt( tokens[0] ),
rows: parseInt( tokens[1] ),
cols: parseInt( tokens[2] )
};
};
run();
}) ();<|fim▁end|> | |
<|file_name|>LintabDevice.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (C) 2010 Lucas Madar and Peter Brewer
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contributors:
* Lucas Madar
* Peter Brewer
******************************************************************************/
package org.tellervo.desktop.hardware.device;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tellervo.desktop.admin.model.GroupsWithPermissionsTableModel;
import org.tellervo.desktop.hardware.AbstractMeasuringDevice;
import org.tellervo.desktop.hardware.AbstractSerialMeasuringDevice;
import org.tellervo.desktop.hardware.MeasuringSampleIOEvent;
import gnu.io.SerialPortEvent;
/**
* The LINTAB platform is made by RINNTECH. The original platform uses a protocol
* that RINNTECH claim is proprietary. An agreement was made whereby RINNTECH
* would produce and supply new boxes that would attach to LINTAB platforms that
* communicate with a non-proprietary ASCII-based protocol. Users must purchase
* such an adapter to use LINTAB platforms with Tellervo (or any other software
* other than TSAP-Win).
*
* These new boxes include a serial and USB connection. The USB connection is
* provided by an internal USB-to-serial adapter (driver from www.ftdichip.com).
*
* Both serial and virtual-serial connections use the following parameters:
* - Baud: 1200
* - Data bits : 8
* - Stop bits : 1
* - Parity : none
* - Flow control : none
*
* There is no way for the user to alter these settings.
*
* Data is transmitted by LINTAB whenever the platform is moved. The data is
* as follows:
* [integer position in 1/1000mm];[add button state �0� or �1�][reset button state �0� or �1�][LF]
*
* LINTAB also accepts commands. To force a manual data record output the ASCII-command
* GETDATA should be sent to LINTAB. A reset of the counter is done by sending the
* ASCII-command RESET to LINTAB. After a command a linefeed (0x0A) or carriage return
* (0x0D) must sent to execute the command.
*
* @author peterbrewer
*
*/
public class LintabDevice extends AbstractSerialMeasuringDevice{
private static final int EVE_ENQ = 5;
private Boolean fireOnNextValue = false;
private String previousFireState = "0";
private Boolean resetting = false;
private Boolean isInitialized = false;
int resetCounter = 0;
private final static Logger log = LoggerFactory.getLogger(LintabDevice.class);
@Override
public void setDefaultPortParams()
{
baudRate = BaudRate.B_1200;
dataBits = DataBits.DATABITS_8;
stopBits = StopBits.STOPBITS_1;
parity = PortParity.NONE;
flowControl = FlowControl.NONE;
lineFeed = LineFeed.NONE;
unitMultiplier = UnitMultiplier.TIMES_1;
this.correctionMultiplier = 1.0;
this.measureInReverse = true;
this.measureCumulatively = true;
}
@Override
public String toString() {
return "LINTAB with ASCII adapter";
}
@Override
public boolean doesInitialize() {
return false;
}
@Override
public void serialEvent(SerialPortEvent e) {
if(e.getEventType() == SerialPortEvent.DATA_AVAILABLE) {
InputStream input;
try {
input = getSerialPort().getInputStream();
StringBuffer readBuffer = new StringBuffer();
int intReadFromPort;
/* LINTAB data appears in the following format:
* [integer position in 1/1000mm];[add button state ‘0’ or ’1’][reset button state ‘0’ or ‘1’][LF]
* It should look like "140;10" or "46;10" with a LF.
* With every change of the LINTAB table state (move table, button press, button release) a
* new data record is sent with a line feed (0x0A) at the end of the line.
* This means that a lot of the data needs to be ignored.
*/
//Read from port into buffer while not LF (10)
while ((intReadFromPort=input.read()) != 10){
//System.out.println(intReadFromPort);
//If a timeout then show bad sample
if(intReadFromPort == -1) {
fireMeasuringSampleEvent(this, MeasuringSampleIOEvent.BAD_SAMPLE_EVENT, null);
return;
}
readBuffer.append((char) intReadFromPort);
}
String strReadBuffer = readBuffer.toString();
fireMeasuringSampleEvent(this, MeasuringSampleIOEvent.RAW_DATA, String.valueOf(strReadBuffer), DataDirection.RECEIVED);
// Ignore "0;10" data as this is a side-effect of Lintab's hardware button
if (strReadBuffer.equals("0;10")) return;
// Ignore repeated 'fire' requests
String thisFireState = strReadBuffer.substring(strReadBuffer.indexOf(";")+1, strReadBuffer.indexOf(";")+2);
if (previousFireState.equals("1") && thisFireState.equals("1")) return;
// Keep track of the state of the 'fire' button
previousFireState = thisFireState;
//Chop the three characters off the right side of the string to leave the number.
String strReadPosition = strReadBuffer.substring(0,(strReadBuffer.length())-3);
// Check that Lintab has actually reset when we asked. Sometimes if a hardware
// switch is used quickly, it doesn't hear the reset request
if(resetting)
{
if(!strReadPosition.equals("0"))
{
log.debug("Platform reset request ignored... retrying (attempt "+resetCounter+")");
zeroMeasurement();
return;
}
else if (resetCounter>10)
{
log.error("Lintab appears to be continually ignoring reset requests!");
}
resetRequestTrack(false);
}
isInitialized = true;
// Round up to integer of 1/1000th mm
Float fltValue = new Float(strReadPosition);
Integer intValue = Math.round(fltValue);
// Inverse if reverse measuring mode is on
if(getReverseMeasuring())
{
intValue = 0 - intValue;
}
// Handle any correction factor
intValue = getCorrectedValue(intValue);
//Only process the data if the add button is set and the reset button is not set.
if( strReadBuffer.endsWith(";10") || fireOnNextValue)
{
// Do calculation if working in cumulative mode
if(this.measureCumulatively)
{
Integer cumValue = intValue;
intValue = intValue - getPreviousPosition();
setPreviousPosition(cumValue);
}
fireOnNextValue = false;
fireMeasuringSampleEvent(this, MeasuringSampleIOEvent.NEW_SAMPLE_EVENT, intValue);
// Only zero the measurement if we're not measuring cumulatively
if(!measureCumulatively)
{
zeroMeasurement();
}
}
else if( strReadBuffer.endsWith(";01") || strReadBuffer.endsWith(";11"))
{
zeroMeasurement();
}
else
{
// Not recording this value just updating current value counter
fireMeasuringSampleEvent(this, MeasuringSampleIOEvent.UPDATED_CURRENT_VALUE_EVENT, intValue);
}
}
catch (IOException ioe) {
fireMeasuringSampleEvent(this, MeasuringSampleIOEvent.ERROR, "Error reading from serial port");
}
}
}
/**
* Lintab boxes sometimes ignore reset requests, so we need to ask several
* times to make sure it is accepted. This function keeps track of requests,
* to ensure we don't enter an infinite loop.
*
* @param reset
*/
private void resetRequestTrack(Boolean reset)
{
if (reset == true)
{
resetting = true;
resetCounter++;
}
else
{
resetting = false;
resetCounter = 0;
}
}
/**
* Send zero command to LINTAB 6
*/
@Override
public void zeroMeasurement()
{
if(isInitialized)
{
String strCommand = "RESET";
resetRequestTrack(true);
this.sendData(strCommand);
this.setPreviousPosition(0);
}<|fim▁hole|> /**
* Send request for data to LINTAB 6
*/
@Override
public void requestMeasurement()
{
fireOnNextValue=true;
String strCommand = "GETDATA";
this.sendData(strCommand);
}
/**
* Send a command to the LINTAB 6 platform.
*
* @param strCommand
*/
private void sendData(String strCommand)
{
//After a command a linefeed (0x0A) or carriage return (0x0D) must be sent to execute the command.
strCommand = strCommand+"\r";
OutputStream output;
try {
output = getSerialPort().getOutputStream();
OutputStream outToPort=new DataOutputStream(output);
byte[] command = strCommand.getBytes();
outToPort.write(command);
fireMeasuringSampleEvent(this, MeasuringSampleIOEvent.RAW_DATA, strCommand, DataDirection.SENT);
}
catch (IOException ioe) {
fireMeasuringSampleEvent(this, MeasuringSampleIOEvent.ERROR, "Error writing to serial port", DataDirection.SENT);
}
}
@Override
public Boolean isRequestDataCapable() {
return true;
}
@Override
public Boolean isCurrentValueCapable() {
return true;
}
@Override
public Boolean isBaudEditable() {
return false;
}
@Override
public Boolean isDatabitsEditable() {
return false;
}
@Override
public Boolean isLineFeedEditable() {
return false;
}
@Override
public Boolean isParityEditable() {
return false;
}
@Override
public Boolean isStopbitsEditable() {
return false;
}
@Override
public Boolean isFlowControlEditable(){
return false;
}
@Override
public Boolean isUnitsEditable() {
return false;
}
@Override
public Boolean isMeasureCumulativelyConfigurable() {
return true;
}
@Override
public Boolean isReverseMeasureCapable() {
return true;
}
@Override
public Boolean isCorrectionFactorEditable() {
return true;
}
}<|fim▁end|> | }
|
<|file_name|>test.spec.js<|end_file_name|><|fim▁begin|>describe('Home Page', () => {<|fim▁hole|> it('Should load correctly', () => {
cy.visit('/')
cy.get('div.marketing-content')
.should('contain', 'Real-time Retrospectives')
});
it('Should login and write a post', () => {
cy.get('.MuiButton-root').click();
cy.get('.MuiTabs-flexContainer > [tabindex="-1"]').click();
cy.get('.MuiInput-input').focus().type('Zelensky');
cy.get('.MuiDialogContent-root .MuiButton-root').click();
// Home page should display the user name
cy.get('#content').should('contain', 'Welcome, Zelensky');
// And then allow creating a new session
cy.get('button').contains('Create a new session').click();
// And write a post
cy.get('input[placeholder*="What went well"]').focus().type('Slava Ukraini!{enter}');
// Reload the page
cy.reload();
// The post should still be there
cy.get('#content').should('contain', 'Slava Ukraini!');
});
});<|fim▁end|> | |
<|file_name|>repl.rs<|end_file_name|><|fim▁begin|>use commands;
use error::*;
use std::io::{self, Write};
use std::path::Path;
use std::u64;
pub fn run<P>(path: P) -> Result<()>
where P: AsRef<Path>
{
let path = path.as_ref();
io::stdout().write(b"\n>>> ")?;
io::stdout().flush()?;
loop {
let mut buffer = String::new();
io::stdin().read_line(&mut buffer)?;
let cmd = commands::Command::new(path).unwrap();
if buffer.starts_with("openFile") {
let mut input = buffer.split_whitespace();
input.next();
let file_name = input.next().unwrap();
cmd.open_file(file_name)?;
} else if buffer.starts_with("writeFile") {
let mut input = buffer.split_whitespace();
input.next();
let file_name = input.next().unwrap();
cmd.write_file(file_name)?;
} else if buffer.starts_with("get") {
let mut input = buffer.split_whitespace();
input.next();
let start = input.next().unwrap();
let end = input.next().unwrap();
let start = u64::from_str_radix(start, 10).unwrap();
let end = u64::from_str_radix(end, 10).unwrap();
let lines = cmd.get(start, end)?;
io::stdout().write(lines.as_bytes())?;
io::stdout().write(b"\n")?;
} else if buffer.starts_with("insert") {
let mut input = buffer.split_whitespace();
input.next();
let line = input.next().unwrap();
let column = input.next().unwrap();
let line = u64::from_str_radix(line, 10).unwrap();
let column = u64::from_str_radix(column, 10).unwrap();
let text = input.next().unwrap();
cmd.insert(line, column, text)?;
} else if buffer.starts_with("delete") {
let mut input = buffer.split_whitespace();
input.next();
let line = input.next().unwrap();
let column = input.next().unwrap();
let length = input.next().unwrap();
let line = u64::from_str_radix(line, 10).unwrap();
let column = u64::from_str_radix(column, 10).unwrap();
let length = u64::from_str_radix(length, 10).unwrap();
cmd.delete(line, column, length)?;
} else if buffer.starts_with("replace") {
let mut input = buffer.split_whitespace();<|fim▁hole|> input.next();
let line = input.next().unwrap();
let column = input.next().unwrap();
let length = input.next().unwrap();
let line = u64::from_str_radix(line, 10).unwrap();
let column = u64::from_str_radix(column, 10).unwrap();
let length = u64::from_str_radix(length, 10).unwrap();
let character = input.next().unwrap().chars().nth(0).unwrap();
cmd.replace(line, column, length, character)?;
} else if buffer.starts_with("quit") {
cmd.quit()?;
break;
} else {
io::stdout().write(b"Invalid input\n")?;
}
io::stdout().write(b">>> ")?;
io::stdout().flush()?;
}
Ok(())
}<|fim▁end|> | |
<|file_name|>plot_throughput_factor_experiment.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-<|fim▁hole|># All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the project, the name of copyright holder nor the names
# of its contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import math
import sys, re
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
from utils import *
from matplotlib import pylab
from scipy.stats import scoreatpercentile
pkt_size = 256
train_length = 6
# @author: Aaron Blankstein, with modifications by Malte Schwarzkopf
class boxplotter(object):
def __init__(self, median, top, bottom, whisk_top=None,
whisk_bottom=None, extreme_top=None):
self.median = median
self.top = top
self.bott = bottom
self.whisk_top = whisk_top
self.whisk_bott = whisk_bottom
self.extreme_top = extreme_top
def draw_on(self, ax, index, box_color = "blue",
median_color = "red", whisker_color = "black"):
width = .7
w2 = width / 2
ax.broken_barh([(index - w2, width)],
(self.bott,self.top - self.bott),
facecolor="white",edgecolor=box_color, lw=0.5)
ax.broken_barh([(index - w2, width)],
(self.median,0),
facecolor="white", edgecolor=median_color, lw=0.5)
if self.whisk_top is not None:
ax.broken_barh([(index - w2, width)],
(self.whisk_top,0),
facecolor="white", edgecolor=whisker_color, lw=0.5)
ax.broken_barh([(index , 0)],
(self.whisk_top, self.top-self.whisk_top),
edgecolor=box_color,linestyle="solid", lw=0.5)
if self.whisk_bott is not None:
ax.broken_barh([(index - w2, width)],
(self.whisk_bott,0),
facecolor="white", edgecolor=whisker_color, lw=0.5)
ax.broken_barh([(index , 0)],
(self.whisk_bott,self.bott-self.whisk_bott),
edgecolor=box_color,linestyle="solid", lw=0.5)
if self.extreme_top is not None:
ax.scatter([index], [self.extreme_top], marker='*',
lw=0.5)
def percentile_box_plot(ax, data, indexer=None, box_top=75,
box_bottom=25,whisker_top=99,whisker_bottom=1):
if indexer is None:
indexed_data = zip(range(1,len(data)+1), data)
else:
indexed_data = [(indexer(datum), datum) for datum in data]
def get_whisk(vector, w):
if w is None:
return None
return scoreatpercentile(vector, w)
for index, x in indexed_data:
bp = boxplotter(scoreatpercentile(x, 50),
scoreatpercentile(x, box_top),
scoreatpercentile(x, box_bottom),
get_whisk(x, whisker_top),
get_whisk(x, whisker_bottom),
scoreatpercentile(x, 100))
bp.draw_on(ax, index)
def worst_case_approx(setups, trainlength, plength):
base_worst = 4.0 * 3
#base_worst = 0.566
#packet_time = (plength + 18.0) * 8.0 / 10.0 / 1000.0
packet_time = plength * 8.0 / 10.0 / 1000.0
tmp = [x * (packet_time * trainlength) for x in setups]
worst = [x + base_worst for x in tmp]
for i in range(len(worst)):
print "WORST CASE %d: %f" % (setups[i], worst[i])
return worst
######################################
if len(sys.argv) < 2:
print "usage: plot_switch_experiment.py <input dir1> <input1 label> " \
"<input dir2> <input2 label> ... <output file>"
sys.exit(1)
paper_mode = True
if paper_mode:
set_paper_rcs()
# arg processing
if (len(sys.argv) - 1) % 2 == 1:
# odd number of args, have output name
outname = sys.argv[-1]
print "Output name specified: %s" % (outname)
else:
print "Please specify an output name!"
sys.exit(1)
inputdirs = []
labels = []
for i in range(1, len(sys.argv)-1, 2):
inputdirs.append(sys.argv[i])
labels.append(sys.argv[i+1])
# parsing
data = []
negs_ignored = 0
for indir in inputdirs:
ds = []
for line in open(indir).readlines():
#for line in open(indir).readlines():
if line.strip() == "":
continue
val = float(line.strip()) / 1000.0
if val > 0:
ds.append(val)
else:
negs_ignored += 1
data.append(ds)
print "Ignored %d negative latency values!" % (negs_ignored)
# plotting
fig = plt.figure(figsize=(3.33,2.22))
#plt.rc("font", size=7.0)
fig, ax = plt.subplots(figsize=(3.33,2.22))
pos = np.array(range(len(data)))+1
#bp = percentile_box_plot(ax, data)
plt.plot(pos, [np.mean(x) for x in data], marker='+', label='average',
lw=1.0, color='g')
plt.plot(pos, [np.percentile(x, 99) for x in data], marker='v',
label='99\\textsuperscript{th}\%ile',
lw=1.0, color='y', mfc='none', mec='y', mew=1.0)
plt.scatter(pos, [max(x) for x in data], marker='x',
label='100\\textsuperscript{th}\%ile',
lw=1.0, color='r')
# worst-case analytical approximation
#plt.plot(range(1, len(data)+1),
# worst_case_approx(range(0, len(data)), train_length, pkt_size),
# ':', color='r', label="modelled worst case", lw=1.0)
worst_case_approximation = worst_case_approx([10], train_length, pkt_size)[0]
wc_line = plt.axhline(worst_case_approximation, ls=':', color='r', lw=1.0)
#plt.axvline(worst_case_approx([10], train_length, pkt_size)[0] - 8, ls='--',
# color='k', lw=1.0, label="optimal network epoch")
first_legend = plt.legend(loc='upper left', frameon=False, handletextpad=0.1,
borderaxespad=0.05)
plt.gca().add_artist(first_legend)
plt.legend([wc_line], ["latency bound"], frameon=False, loc='upper center',
borderaxespad=0.05, handletextpad=0.1)
ax.set_xlabel('Throughput factor $f$')
ax.set_ylabel('End-to-end latency [$\mu$s]')
plt.ylim(0, 30.0)
plt.yticks(range(0, 31, 5), [str(x) for x in range(0, 31, 5)])
plt.xlim(0, len(inputdirs) + 1)
plt.xticks(range(pos[0], pos[-1] + 1, len(pos) / 5),
[round(worst_case_approximation / float(labels[i-1]), 1)
for i in range(pos[0], pos[-1] + 1, len(pos) / 5)])
plt.axvspan(0, 5, facecolor='0.8', alpha=0.5, zorder=0, lw=0.0)
plt.axvspan(20.5, 23, facecolor='0.8', alpha=0.5, zorder=0, lw=0.0)
plt.text(2, 31, "\\textbf{A}", fontsize=12)
plt.text(13, 31, "\\textbf{B}", fontsize=12)
plt.text(21.3, 31, "\\textbf{C}", fontsize=12)
#plt.setp(bp['whiskers'], color='k', linestyle='-' )
#plt.setp(bp['fliers'], markersize=3.0)
plt.savefig(outname, format="pdf", bbox_inches='tight', pad_inches=0.01)<|fim▁end|> | # Simple script which takes a file with one packet latency (expressed as a
# signed integer) per line and plots a trivial histogram.
# Copyright (c) 2015, Malte Schwarzkopf |
<|file_name|>other.py<|end_file_name|><|fim▁begin|>""" Contains functions to fetch info from different simple online APIs."""
import util.web
def urbandictionary_search(search):
"""
Searches urbandictionary's API for a given search term.
:param search: The search term str to search for.
:return: defenition str or None on no match or error.
"""
if str(search).strip():
urban_api_url = 'http://api.urbandictionary.com/v0/define?term=%s' % search
response = util.web.http_get(url=urban_api_url, json=True)
if response['json'] is not None:
try:
definition = response['json']['list'][0]['definition']
return definition.encode('ascii', 'ignore')
except (KeyError, IndexError):
return None
else:
return None
def weather_search(city):
"""
Searches worldweatheronline's API for weather data for a given city.<|fim▁hole|> :param city: The city str to search for.
:return: weather data str or None on no match or error.
"""
if str(city).strip():
api_key = ''
if not api_key:
return 'Missing api key.'
else:
weather_api_url = 'http://api.worldweatheronline.com/premium/v1/weather.ashx?key=%s&q=%s&format=json' % \
(api_key, city)
response = util.web.http_get(url=weather_api_url, json=True)
if response['json'] is not None:
try:
pressure = response['json']['data']['current_condition'][0]['pressure']
temp_c = response['json']['data']['current_condition'][0]['temp_C']
temp_f = response['json']['data']['current_condition'][0]['temp_F']
query = response['json']['data']['request'][0]['query'].encode('ascii', 'ignore')
result = '%s. Temperature: %sC (%sF) Pressure: %s millibars' % (query, temp_c, temp_f, pressure)
return result
except (IndexError, KeyError):
return None
else:
return None
def whois(ip):
"""
Searches ip-api for information about a given IP.
:param ip: The ip str to search for.
:return: information str or None on error.
"""
if str(ip).strip():
url = 'http://ip-api.com/json/%s' % ip
response = util.web.http_get(url=url, json=True)
if response['json'] is not None:
try:
city = response['json']['city']
country = response['json']['country']
isp = response['json']['isp']
org = response['json']['org']
region = response['json']['regionName']
zipcode = response['json']['zip']
info = country + ', ' + city + ', ' + region + ', Zipcode: ' + zipcode + ' Isp: ' + isp + '/' + org
return info
except KeyError:
return None
else:
return None
def chuck_norris():
"""
Finds a random Chuck Norris joke/quote.
:return: joke str or None on failure.
"""
url = 'http://api.icndb.com/jokes/random/?escape=javascript'
response = util.web.http_get(url=url, json=True)
if response['json'] is not None:
if response['json']['type'] == 'success':
joke = response['json']['value']['joke']
return joke
return None<|fim▁end|> | You must have a working API key to be able to use this function.
|
<|file_name|>Tests.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2011-2016 The Flericoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "Globals.h"
#include "CryptoNoteCore/Account.h"
#include "CryptoNoteCore/CryptoNoteFormatUtils.h"
#include "CryptoNoteCore/CryptoNoteTools.h"
#include "CryptoNoteCore/TransactionApi.h"
#include "Transfers/TransfersSynchronizer.h"
#include "Transfers/BlockchainSynchronizer.h"
#include <mutex>
#include <condition_variable>
#include <future>
#include <atomic>
#include "../IntegrationTestLib/TestWalletLegacy.h"
using namespace CryptoNote;
using namespace Crypto;
using namespace Tests::Common;
class IInterruptable {
public:
virtual void interrupt() = 0;
};
class WalletLegacyObserver : public IWalletLegacyObserver {
public:
virtual void actualBalanceUpdated(uint64_t actualBalance) override {
std::cout << "Actual balance updated = " << currency.formatAmount(actualBalance) << std::endl;
m_actualBalance = actualBalance;
m_sem.notify();
}
virtual void sendTransactionCompleted(TransactionId transactionId, std::error_code result) override {
std::cout << "Transaction sent, result = " << result << std::endl;
}
std::atomic<uint64_t> m_actualBalance;
Tests::Common::Semaphore m_sem;
};
class TransactionConsumer : public IBlockchainConsumer {
public:
TransactionConsumer() {
syncStart.timestamp = time(nullptr);
syncStart.height = 0;
}
virtual SynchronizationStart getSyncStart() override {
return syncStart;
}
virtual void onBlockchainDetach(uint32_t height) override {
std::lock_guard<std::mutex> lk(m_mutex);
auto it = m_transactions.lower_bound(height);
m_transactions.erase(it, m_transactions.end());
}
virtual bool onNewBlocks(const CompleteBlock* blocks, uint32_t startHeight, uint32_t count) override {
std::lock_guard<std::mutex> lk(m_mutex);
for(size_t i = 0; i < count; ++i) {
for (const auto& tx : blocks[i].transactions) {
m_transactions[startHeight + i].insert(tx->getTransactionHash());
}
}
m_cv.notify_all();
return true;
}
bool waitForTransaction(const Hash& txHash) {
std::unique_lock<std::mutex> lk(m_mutex);
while (!hasTransaction(txHash)) {
m_cv.wait_for(lk, std::chrono::seconds(1));
}
return true;
}
std::error_code onPoolUpdated(const std::vector<std::unique_ptr<ITransactionReader>>& addedTransactions, const std::vector<Crypto::Hash>& deletedTransactions) override {
//stub
return std::error_code();
}
const std::unordered_set<Crypto::Hash>& getKnownPoolTxIds() const override {
//stub
static std::unordered_set<Crypto::Hash> empty;
return empty;
}
std::error_code addUnconfirmedTransaction(const ITransactionReader& /*transaction*/) override {
throw std::runtime_error("Not implemented");
}
void removeUnconfirmedTransaction(const Crypto::Hash& /*transactionHash*/) override {
throw std::runtime_error("Not implemented");
}
virtual void addObserver(IBlockchainConsumerObserver* observer) override {
//stub
}
virtual void removeObserver(IBlockchainConsumerObserver* observer) override {
//stub
}
private:
bool hasTransaction(const Hash& txHash) {
for (const auto& kv : m_transactions) {
if (kv.second.count(txHash) > 0)
return true;
}
return false;
}
std::mutex m_mutex;
std::condition_variable m_cv;
std::map<uint64_t, std::unordered_set<Hash>> m_transactions;
SynchronizationStart syncStart;
};
class TransfersObserver : public ITransfersObserver, public IInterruptable {
public:
virtual void onTransactionUpdated(ITransfersSubscription* object, const Hash& transactionHash) override {
{
std::lock_guard<std::mutex> lk(m_mutex);
m_transfers.push_back(transactionHash);
auto key = object->getAddress().spendPublicKey;
std::string address = Common::toHex(&key, sizeof(key));
LOG_DEBUG("Transfer to " + address);
}
m_cv.notify_all();
}
bool waitTransfer() {
std::unique_lock<std::mutex> lk(m_mutex);
size_t prevSize = m_transfers.size();
while (!m_interrupted && m_transfers.size() == prevSize) {
m_cv.wait_for(lk, std::chrono::seconds(10));
}
return true;
}
bool waitTransactionTransfer(const Hash& transactionHash) {
std::unique_lock<std::mutex> lk(m_mutex);
while (!m_interrupted) {
auto it = std::find(m_transfers.begin(), m_transfers.end(), transactionHash);
if (it == m_transfers.end()) {
m_cv.wait_for(lk, std::chrono::seconds(10));
} else {
m_transfers.erase(it);
break;
}
}
return true;
}
private:
bool hasTransaction(const Hash& transactionHash) {
return std::find(m_transfers.begin(), m_transfers.end(), transactionHash) != m_transfers.end();
}
void interrupt() override {
std::lock_guard<std::mutex> lock(m_mutex);
m_interrupted = true;
m_cv.notify_all();
}
private:
std::mutex m_mutex;
std::condition_variable m_cv;
std::vector<Hash> m_transfers;
bool m_interrupted = false;
};
class AccountGroup {
public:
enum {
TRANSACTION_SPENDABLE_AGE = 5
};
AccountGroup(ITransfersSynchronizer& sync) :
m_sync(sync) {}
void generateAccounts(size_t count) {
CryptoNote::AccountBase acc;
while (count--) {
acc.generate();
AccountSubscription sub;
sub.keys = reinterpret_cast<const AccountKeys&>(acc.getAccountKeys());
sub.syncStart.timestamp = 0;
sub.syncStart.height = 0;
sub.transactionSpendableAge = TRANSACTION_SPENDABLE_AGE;
m_accounts.push_back(sub);
m_addresses.push_back(currency.accountAddressAsString(acc));
}
}
void subscribeAll() {
m_observers.reset(new TransfersObserver[m_accounts.size()]);
for (size_t i = 0; i < m_accounts.size(); ++i) {
m_sync.addSubscription(m_accounts[i]).addObserver(&m_observers[i]);
}
}
std::vector<AccountPublicAddress> getAddresses() {
std::vector<AccountPublicAddress> addr;
for (const auto& acc : m_accounts) {
addr.push_back(acc.keys.address);
}
return addr;
}
ITransfersContainer& getTransfers(size_t idx) {
return m_sync.getSubscription(m_accounts[idx].keys.address)->getContainer();
}
std::vector<AccountSubscription> m_accounts;
std::vector<std::string> m_addresses;
ITransfersSynchronizer& m_sync;
std::unique_ptr<TransfersObserver[]> m_observers;
};
class MultisignatureTest : public TransfersTest {
public:
virtual void SetUp() override {
launchTestnet(2);
}
};
template <typename R>
class FutureGuard {
public:
FutureGuard(std::future<R>&& f) : m_future(std::move(f)) {
}
~FutureGuard() {
if (m_future.valid()) {
try {
m_future.get();
} catch (...) {
}
}
}
R get() {
return m_future.get();
}
private:
std::future<R> m_future;
};
class Interrupter {
public:
Interrupter(IInterruptable& interrpuptable) : m_interrpuptable(interrpuptable) {
}
~Interrupter() {
if (!m_cancelled) {
m_interrpuptable.interrupt();
}
}
void cancel() {
m_cancelled = true;
}
private:
IInterruptable& m_interrpuptable;
bool m_cancelled = false;
};
TEST_F(TransfersTest, base) {
uint64_t TRANSFER_AMOUNT;
currency.parseAmount("500000.5", TRANSFER_AMOUNT);
launchTestnet(2);
std::unique_ptr<CryptoNote::INode> node1;
std::unique_ptr<CryptoNote::INode> node2;
nodeDaemons[0]->makeINode(node1);
nodeDaemons[1]->makeINode(node2);
CryptoNote::AccountBase dstAcc;
dstAcc.generate();
AccountKeys dstKeys = reinterpret_cast<const AccountKeys&>(dstAcc.getAccountKeys());
BlockchainSynchronizer blockSync(*node2.get(), currency.genesisBlockHash());
TransfersSyncronizer transferSync(currency, blockSync, *node2.get());
TransfersObserver transferObserver;
WalletLegacyObserver walletObserver;
AccountSubscription sub;
sub.syncStart.timestamp = 0;
sub.syncStart.height = 0;
sub.keys = dstKeys;
sub.transactionSpendableAge = 5;
ITransfersSubscription& transferSub = transferSync.addSubscription(sub);
ITransfersContainer& transferContainer = transferSub.getContainer();
transferSub.addObserver(&transferObserver);
Tests::Common::TestWalletLegacy wallet1(m_dispatcher, m_currency, *node1);
ASSERT_FALSE(static_cast<bool>(wallet1.init()));
wallet1.wallet()->addObserver(&walletObserver);
ASSERT_TRUE(mineBlocks(*nodeDaemons[0], wallet1.address(), 1));
ASSERT_TRUE(mineBlocks(*nodeDaemons[0], wallet1.address(), currency.minedMoneyUnlockWindow()));
wallet1.waitForSynchronizationToHeight(static_cast<uint32_t>(2 + currency.minedMoneyUnlockWindow()));
// start syncing and wait for a transfer
FutureGuard<bool> waitFuture(std::async(std::launch::async, [&transferObserver] { return transferObserver.waitTransfer(); }));
Interrupter transferObserverInterrupter(transferObserver);
blockSync.start();
Hash txId;
ASSERT_FALSE(static_cast<bool>(wallet1.sendTransaction(currency.accountAddressAsString(dstAcc), TRANSFER_AMOUNT, txId)));
ASSERT_TRUE(mineBlocks(*nodeDaemons[0], wallet1.address(), 1));
ASSERT_TRUE(waitFuture.get());
transferObserverInterrupter.cancel();
std::cout << "Received transfer: " << currency.formatAmount(transferContainer.balance(ITransfersContainer::IncludeAll)) << std::endl;
ASSERT_EQ(TRANSFER_AMOUNT, transferContainer.balance(ITransfersContainer::IncludeAll));
ASSERT_GT(transferContainer.getTransactionOutputs(txId, ITransfersContainer::IncludeAll).size(), 0);
blockSync.stop();
}
std::unique_ptr<ITransaction> createTransferToMultisignature(
ITransfersContainer& tc, // money source
uint64_t amount,
uint64_t fee,
const AccountKeys& senderKeys,
const std::vector<AccountPublicAddress>& recipients,
uint32_t requiredSignatures) {
std::vector<TransactionOutputInformation> transfers;
tc.getOutputs(transfers, ITransfersContainer::IncludeAllUnlocked | ITransfersContainer::IncludeStateSoftLocked);
auto tx = createTransaction();
std::vector<std::pair<TransactionTypes::InputKeyInfo, KeyPair>> inputs;
uint64_t foundMoney = 0;
for (const auto& t : transfers) {
TransactionTypes::InputKeyInfo info;
info.amount = t.amount;
TransactionTypes::GlobalOutput globalOut;
globalOut.outputIndex = t.globalOutputIndex;
globalOut.targetKey = t.outputKey;
info.outputs.push_back(globalOut);
info.realOutput.outputInTransaction = t.outputInTransaction;
info.realOutput.transactionIndex = 0;
info.realOutput.transactionPublicKey = t.transactionPublicKey;
KeyPair kp;
tx->addInput(senderKeys, info, kp);
inputs.push_back(std::make_pair(info, kp));
foundMoney += info.amount;
if (foundMoney >= amount + fee) {
break;
}
}
// output to receiver
tx->addOutput(amount, recipients, requiredSignatures);
// change
uint64_t change = foundMoney - amount - fee;
if (change) {
tx->addOutput(change, senderKeys.address);
}
for (size_t inputIdx = 0; inputIdx < inputs.size(); ++inputIdx) {
tx->signInputKey(inputIdx, inputs[inputIdx].first, inputs[inputIdx].second);
}
return tx;
}
std::error_code submitTransaction(INode& node, ITransactionReader& tx) {
auto data = tx.getTransactionData();
CryptoNote::Transaction outTx;
fromBinaryArray(outTx, data);
LOG_DEBUG("Submitting transaction " + Common::toHex(tx.getTransactionHash().data, 32));
std::promise<std::error_code> result;
node.relayTransaction(outTx, [&result](std::error_code ec) { result.set_value(ec); });
auto err = result.get_future().get();
if (err) {
LOG_DEBUG("Error: " + err.message());
} else {
LOG_DEBUG("Submitted successfully");
}
return err;
}
std::unique_ptr<ITransaction> createTransferFromMultisignature(
AccountGroup& consilium, const AccountPublicAddress& receiver, const Hash& txHash, uint64_t amount, uint64_t fee) {
auto& tc = consilium.getTransfers(0);
std::vector<TransactionOutputInformation> transfers = tc.getTransactionOutputs(txHash,
ITransfersContainer::IncludeTypeMultisignature |
ITransfersContainer::IncludeStateSoftLocked |
ITransfersContainer::IncludeStateUnlocked);
EXPECT_FALSE(transfers.empty());
const TransactionOutputInformation& out = transfers[0];
auto tx = createTransaction();
MultisignatureInput msigInput;
msigInput.amount = out.amount;
msigInput.outputIndex = out.globalOutputIndex;
msigInput.signatureCount = out.requiredSignatures;
tx->addInput(msigInput);
tx->addOutput(amount, receiver);
uint64_t change = out.amount - amount - fee;
tx->addOutput(change, consilium.getAddresses(), out.requiredSignatures);
for (size_t i = 0; i < out.requiredSignatures; ++i) {
tx->signInputMultisignature(0, out.transactionPublicKey, out.outputInTransaction, consilium.m_accounts[i].keys);
}
return tx;
}
TEST_F(MultisignatureTest, createMulitisignatureTransaction) {
std::unique_ptr<CryptoNote::INode> node1;
std::unique_ptr<CryptoNote::INode> node2;
nodeDaemons[0]->makeINode(node1);
nodeDaemons[1]->makeINode(node2);
BlockchainSynchronizer blockSync(*node2.get(), currency.genesisBlockHash());
TransfersSyncronizer transferSync(currency, blockSync, *node2.get());
// add transaction collector
TransactionConsumer txConsumer;
blockSync.addConsumer(&txConsumer);
AccountGroup sender(transferSync);
AccountGroup consilium(transferSync);
sender.generateAccounts(1);
sender.subscribeAll();
consilium.generateAccounts(3);<|fim▁hole|>
auto senderSubscription = transferSync.getSubscription(sender.m_accounts[0].keys.address);
auto& senderContainer = senderSubscription->getContainer();
blockSync.start();
AccountPublicAddress senderAddress;
ASSERT_TRUE(currency.parseAccountAddressString(sender.m_addresses[0], senderAddress));
ASSERT_TRUE(mineBlocks(*nodeDaemons[0], senderAddress, 1 + currency.minedMoneyUnlockWindow()));
// wait for incoming transfer
while (senderContainer.balance() == 0) {
sender.m_observers[0].waitTransfer();
auto unlockedBalance = senderContainer.balance(ITransfersContainer::IncludeAllUnlocked | ITransfersContainer::IncludeStateSoftLocked);
auto totalBalance = senderContainer.balance(ITransfersContainer::IncludeAll);
LOG_DEBUG("Balance: " + currency.formatAmount(unlockedBalance) + " (" + currency.formatAmount(totalBalance) + ")");
}
uint64_t fundBalance = 0;
for (int iteration = 1; iteration <= 3; ++iteration) {
LOG_DEBUG("***** Iteration " + std::to_string(iteration) + " ******");
auto sendAmount = senderContainer.balance() / 2;
LOG_DEBUG("Creating transaction with amount = " + currency.formatAmount(sendAmount));
auto tx2msig = createTransferToMultisignature(
senderContainer, sendAmount, currency.minimumFee(), sender.m_accounts[0].keys, consilium.getAddresses(), 3);
auto txHash = tx2msig->getTransactionHash();
// Use node1, in order to tx will be in its pool when next block is being created
auto err = submitTransaction(*node1, *tx2msig);
ASSERT_EQ(std::error_code(), err);
ASSERT_TRUE(mineBlocks(*nodeDaemons[0], senderAddress, 1));
LOG_DEBUG("Waiting for transaction to be included in block...");
txConsumer.waitForTransaction(txHash);
LOG_DEBUG("Transaction in blockchain, waiting for observers to receive transaction...");
uint64_t expectedFundBalance = fundBalance + sendAmount;
// wait for consilium to receive the transfer
for (size_t i = 0; i < consilium.m_accounts.size(); ++i) {
auto& observer = consilium.m_observers[i];
auto sub = transferSync.getSubscription(consilium.m_accounts[i].keys.address);
ASSERT_TRUE(sub != nullptr);
while (true) {
observer.waitTransactionTransfer(txHash);
uint64_t unlockedBalance = sub->getContainer().balance(ITransfersContainer::IncludeTypeMultisignature |
ITransfersContainer::IncludeStateSoftLocked | ITransfersContainer::IncludeStateUnlocked);
if (unlockedBalance == expectedFundBalance) {
break;
}
}
}
LOG_DEBUG("Creating transaction to spend multisignature output");
uint64_t returnAmount = sendAmount / 2;
auto spendMsigTx = createTransferFromMultisignature(
consilium, sender.m_accounts[0].keys.address, txHash, returnAmount, currency.minimumFee());
auto spendMsigTxHash = spendMsigTx->getTransactionHash();
err = submitTransaction(*node1, *spendMsigTx);
ASSERT_EQ(std::error_code(), err);
ASSERT_TRUE(mineBlocks(*nodeDaemons[0], senderAddress, 1));
LOG_DEBUG("Waiting for transaction to be included in block...");
txConsumer.waitForTransaction(spendMsigTxHash);
LOG_DEBUG("Checking left balances");
uint64_t leftAmount = expectedFundBalance - returnAmount - currency.minimumFee();
for (size_t i = 0; i < consilium.m_accounts.size(); ++i) {
auto& observer = consilium.m_observers[i];
for (uint64_t unlockedBalance = leftAmount + 1; unlockedBalance != leftAmount;) {
observer.waitTransactionTransfer(spendMsigTxHash);
unlockedBalance = consilium.getTransfers(i).balance(ITransfersContainer::IncludeTypeMultisignature |
ITransfersContainer::IncludeStateSoftLocked | ITransfersContainer::IncludeStateUnlocked);
}
}
fundBalance = leftAmount;
}
blockSync.stop();
LOG_DEBUG("Success!!!");
}<|fim▁end|> | consilium.subscribeAll(); |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.contrib import admin
import views
urlpatterns = patterns('',
url(r'^add_model_abox', 'tadaa.views.add_model_abox'),
url(r'^add_model', 'tadaa.views.add_model'),
url(r'^list_models', 'tadaa.views.list_models', name='list_models'),
url(r'^about', 'tadaa.views.about'),
url(r'^predict', 'tadaa.views.predict', name='predict'),
url(r'^list_predictions', 'tadaa.views.list_predictionruns', name='list_predictionruns'),
url(r'^list_memberships/([0-9]+)', 'tadaa.views.list_memberships'),
url(r'^get_classes', 'tadaa.views.get_classes'),
url(r'^online_entity_annotation', views.OnlineEntityAnnotation.as_view()),
url(r'^view_classes_stat', views.online_annotation_entity_stat),
url(r'^view_annotation_stat', views.online_annotation_annotation_stat),
url(r'^view_annotation', views.view_annotation),
url(r'^list_annotations', views.list_annotations),
url(r'^annotation_results', views.annotation_results),
url(r'^advanced_annotation', views.advance_annotation),
url(r'^do_type', views.do_type),
url(r'^annotation_stats', views.annotation_stats),
url(r'live_monitor', views.live_monitor),
url(r'^admin/', include(admin.site.urls)),<|fim▁hole|><|fim▁end|> | url(r'^home', 'tadaa.views.home'),
url('', 'tadaa.views.home'),
) |
<|file_name|>Mp4AudioHeader.java<|end_file_name|><|fim▁begin|>package org.jaudiotagger.audio.mp4;
import org.jaudiotagger.audio.generic.GenericAudioHeader;
import org.jaudiotagger.audio.mp4.atom.Mp4EsdsBox;
/**
* Store some additional attributes not available for all audio types
*/
public class Mp4AudioHeader extends GenericAudioHeader {
/**
* The key for the kind field<br>
*
* @see #content
*/
public final static String FIELD_KIND = "KIND";
/**
* The key for the profile<br>
*
* @see #content
*/
public final static String FIELD_PROFILE = "PROFILE";
/**
* The key for the ftyp brand<br>
*
* @see #content
*/
public final static String FIELD_BRAND = "BRAND";
public void setKind(Mp4EsdsBox.Kind kind) {
content.put(FIELD_KIND, kind);
}
/**
* @return kind
*/
public Mp4EsdsBox.Kind getKind() {
return (Mp4EsdsBox.Kind) content.get(FIELD_KIND);
}
/**
* The key for the profile
*
* @param profile
*/
public void setProfile(Mp4EsdsBox.AudioProfile profile) {
content.put(FIELD_PROFILE, profile);
}
/**
* @return audio profile
*/
public Mp4EsdsBox.AudioProfile getProfile() {<|fim▁hole|> /**
* @param brand
*/
public void setBrand(String brand) {
content.put(FIELD_BRAND, brand);
}
/**
* @return brand
*/
public String getBrand() {
return (String) content.get(FIELD_BRAND);
}
}<|fim▁end|> | return (Mp4EsdsBox.AudioProfile) content.get(FIELD_PROFILE);
}
|
<|file_name|>firebase.py<|end_file_name|><|fim▁begin|>try:
import urlparse
except ImportError:
#py3k
from urllib import parse as urlparse
import json
from .firebase_token_generator import FirebaseTokenGenerator
from .decorators import http_connection
from .multiprocess_pool import process_pool
from .jsonutil import JSONEncoder
__all__ = ['FirebaseAuthentication', 'FirebaseApplication']
@http_connection(60)
def make_get_request(url, params, headers, connection):
"""
Helper function that makes an HTTP GET request to the given firebase
endpoint. Timeout is 60 seconds.
`url`: The full URL of the firebase endpoint (DSN appended.)
`params`: Python dict that is appended to the URL like a querystring.
`headers`: Python dict. HTTP request headers.
`connection`: Predefined HTTP connection instance. If not given, it
is supplied by the `decorators.http_connection` function.
The returning value is a Python dict deserialized by the JSON decoder. However,
if the status code is not 2x or 403, an requests.HTTPError is raised.
connection = connection_pool.get_available_connection()
response = make_get_request('http://firebase.localhost/users', {'print': silent'},
{'X_FIREBASE_SOMETHING': 'Hi'}, connection)
response => {'1': 'John Doe', '2': 'Jane Doe'}
"""
timeout = getattr(connection, 'timeout')
response = connection.get(url, params=params, headers=headers, timeout=timeout)
if response.ok or response.status_code == 403:
return response.json() if response.content else None
else:
response.raise_for_status()
@http_connection(60)
def make_put_request(url, data, params, headers, connection):
"""
Helper function that makes an HTTP PUT request to the given firebase
endpoint. Timeout is 60 seconds.
`url`: The full URL of the firebase endpoint (DSN appended.)
`data`: JSON serializable dict that will be stored in the remote storage.
`params`: Python dict that is appended to the URL like a querystring.
`headers`: Python dict. HTTP request headers.
`connection`: Predefined HTTP connection instance. If not given, it
is supplied by the `decorators.http_connection` function.
The returning value is a Python dict deserialized by the JSON decoder. However,
if the status code is not 2x or 403, an requests.HTTPError is raised.
connection = connection_pool.get_available_connection()
response = make_put_request('http://firebase.localhost/users',
'{"1": "Ozgur Vatansever"}',
{'X_FIREBASE_SOMETHING': 'Hi'}, connection)
response => {'1': 'Ozgur Vatansever'} or {'error': 'Permission denied.'}
"""
timeout = getattr(connection, 'timeout')
response = connection.put(url, data=data, params=params, headers=headers,
timeout=timeout)
if response.ok or response.status_code == 403:
return response.json() if response.content else None
else:
response.raise_for_status()
@http_connection(60)
def make_post_request(url, data, params, headers, connection):
"""
Helper function that makes an HTTP POST request to the given firebase
endpoint. Timeout is 60 seconds.
`url`: The full URL of the firebase endpoint (DSN appended.)
`data`: JSON serializable dict that will be stored in the remote storage.
`params`: Python dict that is appended to the URL like a querystring.
`headers`: Python dict. HTTP request headers.
`connection`: Predefined HTTP connection instance. If not given, it
is supplied by the `decorators.http_connection` function.
The returning value is a Python dict deserialized by the JSON decoder. However,
if the status code is not 2x or 403, an requests.HTTPError is raised.
connection = connection_pool.get_available_connection()
response = make_put_request('http://firebase.localhost/users/',
'{"Ozgur Vatansever"}', {'X_FIREBASE_SOMETHING': 'Hi'}, connection)
response => {u'name': u'-Inw6zol_2f5ThHwVcSe'} or {'error': 'Permission denied.'}
"""
timeout = getattr(connection, 'timeout')
response = connection.post(url, data=data, params=params, headers=headers,
timeout=timeout)
if response.ok or response.status_code == 403:
return response.json() if response.content else None
else:
response.raise_for_status()
@http_connection(60)
def make_patch_request(url, data, params, headers, connection):
"""
Helper function that makes an HTTP PATCH request to the given firebase
endpoint. Timeout is 60 seconds.
`url`: The full URL of the firebase endpoint (DSN appended.)
`data`: JSON serializable dict that will be stored in the remote storage.
`params`: Python dict that is appended to the URL like a querystring.
`headers`: Python dict. HTTP request headers.
`connection`: Predefined HTTP connection instance. If not given, it
is supplied by the `decorators.http_connection` function.
The returning value is a Python dict deserialized by the JSON decoder. However,
if the status code is not 2x or 403, an requests.HTTPError is raised.
connection = connection_pool.get_available_connection()
response = make_put_request('http://firebase.localhost/users/1',
'{"Ozgur Vatansever"}', {'X_FIREBASE_SOMETHING': 'Hi'}, connection)
response => {'Ozgur Vatansever'} or {'error': 'Permission denied.'}
"""
timeout = getattr(connection, 'timeout')
response = connection.patch(url, data=data, params=params, headers=headers,
timeout=timeout)
if response.ok or response.status_code == 403:
return response.json() if response.content else None
else:
response.raise_for_status()
@http_connection(60)
def make_delete_request(url, params, headers, connection):
"""
Helper function that makes an HTTP DELETE request to the given firebase
endpoint. Timeout is 60 seconds.
`url`: The full URL of the firebase endpoint (DSN appended.)
`params`: Python dict that is appended to the URL like a querystring.
`headers`: Python dict. HTTP request headers.
`connection`: Predefined HTTP connection instance. If not given, it
is supplied by the `decorators.http_connection` function.
The returning value is NULL. However, if the status code is not 2x or 403,
an requests.HTTPError is raised.
connection = connection_pool.get_available_connection()
response = make_put_request('http://firebase.localhost/users/1',
{'X_FIREBASE_SOMETHING': 'Hi'}, connection)
response => NULL or {'error': 'Permission denied.'}
"""
timeout = getattr(connection, 'timeout')
response = connection.delete(url, params=params, headers=headers, timeout=timeout)
if response.ok or response.status_code == 403:
return response.json() if response.content else None
else:
response.raise_for_status()
class FirebaseUser(object):
"""
Class that wraps the credentials of the authenticated user. Think of
this as a container that holds authentication related data.
"""
def __init__(self, email, firebase_auth_token, provider, id=None):
self.email = email
self.firebase_auth_token = firebase_auth_token
self.provider = provider
self.id = id
class FirebaseAuthentication(object):
"""
Class that wraps the Firebase SimpleLogin mechanism. Actually this
class does not trigger a connection, simply fakes the auth action.
In addition, the provided email and password information is totally
useless and they never appear in the ``auth`` variable at the server.
"""
def __init__(self, secret, email, debug=False, admin=False, extra=None):
self.authenticator = FirebaseTokenGenerator(secret, debug, admin)
self.email = email
self.provider = 'password'
self.extra = (extra or {}).copy()
self.extra.update({'debug': debug, 'admin': admin,
'email': self.email, 'provider': self.provider})
def get_user(self):
"""
Method that gets the authenticated user. The returning user has
the token, email and the provider data.
"""
token = self.authenticator.create_token(self.extra)
user_id = self.extra.get('id')
return FirebaseUser(self.email, token, self.provider, user_id)
class FirebaseApplication(object):
"""
Class that actually connects with the Firebase backend via HTTP calls.
It fully implements the RESTful specifications defined by Firebase. Data
is transmitted as in JSON format in both ways. This class needs a DSN value
that defines the base URL of the backend, and if needed, authentication
credentials are accepted and then are taken into consideration while
constructing HTTP requests.
There are also the corresponding asynchronous versions of each HTTP method.
The async calls make use of the on-demand process pool defined under the
module `async`.
auth = FirebaseAuthentication(FIREBASE_SECRET, '[email protected]', 'fbpw')
firebase = FirebaseApplication('https://firebase.localhost', auth)
That's all there is. Then you start connecting with the backend:
json_dict = firebase.get('/users', '1', {'print': 'pretty'})
print json_dict
{'1': 'John Doe', '2': 'Jane Doe', ...}
Async version is:
firebase.get('/users', '1', {'print': 'pretty'}, callback=log_json_dict)
The callback method is fed with the returning response.
"""
NAME_EXTENSION = '.json'
URL_SEPERATOR = '/'
def __init__(self, dsn, authentication=None):
assert dsn.startswith('https://'), 'DSN must be a secure URL'
self.dsn = dsn
self.authentication = authentication
def _build_endpoint_url(self, url, name=None):
"""
Method that constructs a full url with the given url and the
snapshot name.
Example:
full_url = _build_endpoint_url('/users', '1')
full_url => 'http://firebase.localhost/users/1.json'
"""
if not url.endswith(self.URL_SEPERATOR):
url = url + self.URL_SEPERATOR
if name is None:
name = ''
return '%s%s%s' % (urlparse.urljoin(self.dsn, url), name,
self.NAME_EXTENSION)
def _authenticate(self, params, headers):
"""
Method that simply adjusts authentication credentials for the
request.
`params` is the querystring of the request.
`headers` is the header of the request.
If auth instance is not provided to this class, this method simply
returns without doing anything.
"""
if self.authentication:
user = self.authentication.get_user()
params.update({'auth': user.firebase_auth_token})
headers.update(self.authentication.authenticator.HEADERS)
@http_connection(60)
def get(self, url, name, connection, params=None, headers=None):
"""
Synchronous GET request.
"""
if name is None: name = ''
params = params or {}
headers = headers or {}
endpoint = self._build_endpoint_url(url, name)
self._authenticate(params, headers)
return make_get_request(endpoint, params, headers, connection=connection)
def get_async(self, url, name, callback=None, params=None, headers=None):
"""
Asynchronous GET request with the process pool.
"""
if name is None: name = ''
params = params or {}
headers = headers or {}
endpoint = self._build_endpoint_url(url, name)
self._authenticate(params, headers)
process_pool.apply_async(make_get_request,
args=(endpoint, params, headers), callback=callback)
@http_connection(60)
def put(self, url, name, data, connection, params=None, headers=None):
"""
Synchronous PUT request. There will be no returning output from
the server, because the request will be made with ``silent``
parameter. ``data`` must be a JSONable value.
"""
assert name, 'Snapshot name must be specified'
params = params or {}
headers = headers or {}
endpoint = self._build_endpoint_url(url, name)
self._authenticate(params, headers)
data = json.dumps(data, cls=JSONEncoder)
return make_put_request(endpoint, data, params, headers,
connection=connection)
def put_async(self, url, name, data, callback=None, params=None, headers=None):
"""
Asynchronous PUT request with the process pool.
"""
if name is None: name = ''
params = params or {}
headers = headers or {}
endpoint = self._build_endpoint_url(url, name)
self._authenticate(params, headers)
data = json.dumps(data, cls=JSONEncoder)
process_pool.apply_async(make_put_request,
args=(endpoint, data, params, headers),
callback=callback)
@http_connection(60)
def post(self, url, data, connection, params=None, headers=None):
"""
Synchronous POST request. ``data`` must be a JSONable value.
"""
params = params or {}
headers = headers or {}
endpoint = self._build_endpoint_url(url, None)
self._authenticate(params, headers)
data = json.dumps(data, cls=JSONEncoder)
return make_post_request(endpoint, data, params, headers,
connection=connection)
def post_async(self, url, data, callback=None, params=None, headers=None):
"""
Asynchronous POST request with the process pool.
"""
params = params or {}
headers = headers or {}
endpoint = self._build_endpoint_url(url, None)
self._authenticate(params, headers)
data = json.dumps(data, cls=JSONEncoder)
process_pool.apply_async(make_post_request,
args=(endpoint, data, params, headers),
callback=callback)
@http_connection(60)
def patch(self, url, data, connection, params=None, headers=None):
"""
Synchronous POST request. ``data`` must be a JSONable value.
"""
params = params or {}
headers = headers or {}
endpoint = self._build_endpoint_url(url, None)
self._authenticate(params, headers)
data = json.dumps(data, cls=JSONEncoder)
return make_patch_request(endpoint, data, params, headers,
connection=connection)
def patch_async(self, url, data, callback=None, params=None, headers=None):
"""
Asynchronous PATCH request with the process pool.
"""
params = params or {}
headers = headers or {}
endpoint = self._build_endpoint_url(url, None)
self._authenticate(params, headers)
data = json.dumps(data, cls=JSONEncoder)
process_pool.apply_async(make_patch_request,
args=(endpoint, data, params, headers),
callback=callback)
@http_connection(60)
def delete(self, url, name, connection, params=None, headers=None):
"""
Synchronous DELETE request. ``data`` must be a JSONable value.
"""<|fim▁hole|> if not name: name = ''
params = params or {}
headers = headers or {}
endpoint = self._build_endpoint_url(url, name)
self._authenticate(params, headers)
return make_delete_request(endpoint, params, headers, connection=connection)
def delete_async(self, url, name, callback=None, params=None, headers=None):
"""
Asynchronous DELETE request with the process pool.
"""
if not name: name = ''
params = params or {}
headers = headers or {}
endpoint = self._build_endpoint_url(url, name)
self._authenticate(params, headers)
process_pool.apply_async(make_delete_request,
args=(endpoint, params, headers), callback=callback)<|fim▁end|> | |
<|file_name|>platformstyle.js<|end_file_name|><|fim▁begin|>// Copyright (C) 2015 Sam Parkinson
// This program is free software; you can redistribute it and/or
// modify it under the terms of the The GNU Affero General Public
// License as published by the Free Software Foundation; either
// version 3 of the License, or (at your option) any later version.
//
// You should have received a copy of the GNU Affero General Public
// License along with this library; if not, write to the Free Software
// Foundation, 51 Franklin Street, Suite 500 Boston, MA 02110-1335 USA
//
window.platform = {
android: /Android/i.test(navigator.userAgent),
FF: /Firefox/i.test(navigator.userAgent),
mobile: /Mobi/i .test(navigator.userAgent),
tablet: /Tablet/i .test(navigator.userAgent)
}
platform.androidWebkit = platform.android && !platform.FF;
platform.FFOS = platform.FF
&& (platform.mobile || platform.tablet)
&& !platform.android;
console.log('On platform: ', platform);
window.platformColor = {
header: platform.FF? '#00539F' : '#2196F3',
doHeaderShadow: !platform.FF,<|fim▁hole|> background: platform.FF? '#00CAF2' : '#96D3F3'
}
document.querySelector('meta[name=theme-color]')
.content = platformColor.header;
function showButtonHighlight(x, y, r, event, scale, stage) {
if (platform.FFOS) return {};
return showMaterialHighlight(x, y, r, event, scale, stage);
}<|fim▁end|> | |
<|file_name|>plotf.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
import sys
import numpy as np
from spc import SPC
import matplotlib.pyplot as plt
def plot(files, fac=1.0):
for f in files:
if f.split('.')[-1] == 'xy':
td = np.loadtxt(f)
plt.plot(td[:, 0], np.log(1. / td[:, 1]) * fac, label=f)
elif f.split('.')[-1] == 'spc':
td = SPC(f)
plt.plot(td.xdata, np.log(1. / np.array(td.ydata)), label=f)
plt.legend()
plt.show()
if __name__ == '__main__':
files = sys.argv[2:]
fac = float(sys.argv[1])
plot(files, fac)<|fim▁end|> | #!/usr/bin/env python3 |
<|file_name|>gen_utils.py<|end_file_name|><|fim▁begin|>import json
import shutil
import sys
import warnings
from itertools import zip_longest
import s3fs
from smart_open import open
from tqdm import tqdm
def session_type():
if 'IPython' not in sys.modules:
# IPython hasn't been imported, definitely not
return "python"
from IPython import get_ipython
# check for `kernel` attribute on the IPython instance
if getattr(get_ipython(), 'kernel', None) is not None:
return "kernel"
return "ipython"
def make_tqdm_iterator(**kwargs):
options = {
"file": sys.stdout,
"leave": True
}
options.update(kwargs)
if session_type() == 'kernel':
# from IPython import display
# capture_stderr = StringIO()
# with RedirectStdStreams(stderr=capture_stderr):
# try:
# iterator = tqdm_notebook(**options)
# except:
# failed = True
# else:
# failed = False
# err_out = capture_stderr.getvalue()
# capture_stderr.close()
# if failed or err_out.lower().find("widget javascript not detected") \
# >-1:
# display.clear_output(wait=True)
# iterator = tqdm(**options)
iterator = tqdm(**options)
else:
iterator = tqdm(**options)
return iterator
def get_relationship_variable_id(path):
_, r = path[0]
child_link_name = r.child_variable.id
for _, r in path[1:]:
parent_link_name = child_link_name
child_link_name = '%s.%s' % (r.parent_entity.id,
parent_link_name)
return child_link_name
def find_descendents(cls):
"""
A generator which yields all descendent classes of the given class
(including the given class)
Args:
cls (Class): the class to find descendents of
"""
yield cls
for sub in cls.__subclasses__():
for c in find_descendents(sub):
yield c
def check_schema_version(cls, cls_type):
if isinstance(cls_type, str):
if cls_type == 'entityset':
from featuretools.entityset.serialize import SCHEMA_VERSION
version_string = cls.get('schema_version')
elif cls_type == 'features':
from featuretools.feature_base.features_serializer import SCHEMA_VERSION
version_string = cls.features_dict['schema_version']
current = SCHEMA_VERSION.split('.')
saved = version_string.split('.')
warning_text_upgrade = ('The schema version of the saved %s'
'(%s) is greater than the latest supported (%s). '
'You may need to upgrade featuretools. Attempting to load %s ...'
% (cls_type, version_string, SCHEMA_VERSION, cls_type))
for c_num, s_num in zip_longest(current, saved, fillvalue=0):
if c_num > s_num:
break
elif c_num < s_num:
warnings.warn(warning_text_upgrade)
break
warning_text_outdated = ('The schema version of the saved %s'
'(%s) is no longer supported by this version'
'of featuretools. Attempting to load %s ...'
% (cls_type, version_string, cls_type))
# Check if saved has older major version.
if current[0] > saved[0]:
warnings.warn(warning_text_outdated)
def use_smartopen_es(file_path, path, transport_params=None, read=True):
if read:
with open(path, "rb", transport_params=transport_params) as fin:
with open(file_path, 'wb') as fout:
shutil.copyfileobj(fin, fout)
else:
with open(file_path, 'rb') as fin:<|fim▁hole|>def use_s3fs_es(file_path, path, read=True):
s3 = s3fs.S3FileSystem(anon=True)
if read:
s3.get(path, file_path)
else:
s3.put(file_path, path)
def use_smartopen_features(path, features_dict=None, transport_params=None, read=True):
if read:
with open(path, 'r', encoding='utf-8', transport_params=transport_params) as f:
features_dict = json.load(f)
return features_dict
else:
with open(path, "w", transport_params=transport_params) as f:
json.dump(features_dict, f)
def use_s3fs_features(file_path, features_dict=None, read=True):
s3 = s3fs.S3FileSystem(anon=True)
if read:
with s3.open(file_path, "r", encoding='utf-8') as f:
features_dict = json.load(f)
return features_dict
else:
with s3.open(file_path, "w") as f:
features = json.dumps(features_dict, ensure_ascii=False)
f.write(features)<|fim▁end|> | with open(path, 'wb', transport_params=transport_params) as fout:
shutil.copyfileobj(fin, fout)
|
<|file_name|>D3Utils.d.ts<|end_file_name|><|fim▁begin|>import { IDataType } from 'phovea_core';<|fim▁hole|> static transform(x?: number, y?: number, rotate?: number, scaleX?: number, scaleY?: number): d3.Transform;
/**
* utility function to handle selections
* @param data
* @param $data
* @param selector what type of object are the data bound ot
* @returns {function(any, any): undefined} the click handler
*/
static selectionUtil(data: IDataType, $data: d3.Selection<any>, selector: string): (d: any, i: number) => void;
/**
* utility function to define a vis
* @param name the name of the vis - will be used during toString
* @param defaultOptions a function or an object containing the default options of this vis
* @param initialSize a function or the size to compute the initial size of this vis
* @param build the builder function
* @param functions an object of additional functions to the vis
* @returns a function class for this vis
*/
static defineVis(name: string, defaultOptions: any, initialSize: number[], build: ($parent: d3.Selection<any>, data: IDataType, size: number[]) => d3.Selection<any>, functions?: any): any;
static defineVis(name: string, defaultOptions: (data: IDataType, options: any) => any, initialSize: number[], build: ($parent: d3.Selection<any>, data: IDataType, size: number[]) => d3.Selection<any>, functions?: any): any;
static defineVis(name: string, defaultOptions: any, initialSize: (data: IDataType) => number[], build: ($parent: d3.Selection<any>, data: IDataType) => d3.Selection<any>, functions?: any): any;
static defineVis(name: string, defaultOptions: (data: IDataType, options: any) => any, initialSize: (data: IDataType) => number[], build: ($parent: d3.Selection<any>, data: IDataType, size: number[]) => d3.Selection<any>, functions?: any): any;
}<|fim▁end|> | import * as d3 from 'd3';
export declare class D3Utils { |
<|file_name|>hashmap-lifetimes.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {<|fim▁hole|> let mut it = my_stuff.iter();
my_stuff.swap(1, 43); //~ ERROR cannot borrow
}<|fim▁end|> | let mut my_stuff = std::collections::HashMap::new();
my_stuff.insert(0i, 42i);
|
<|file_name|>gizmos.py<|end_file_name|><|fim▁begin|># BlenderBIM Add-on - OpenBIM Blender Add-on
# Copyright (C) 2020, 2021 Maxim Vasilyev <[email protected]>
#
# This file is part of BlenderBIM Add-on.
#
# BlenderBIM Add-on is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BlenderBIM Add-on is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BlenderBIM Add-on. If not, see <http://www.gnu.org/licenses/>.
import bpy
import blf
import math
import gpu, bgl
from bpy import types
from mathutils import Vector, Matrix
from mathutils import geometry
from bpy_extras import view3d_utils
from blenderbim.bim.module.drawing.shaders import DotsGizmoShader, ExtrusionGuidesShader, BaseLinesShader
from ifcopenshell.util.unit import si_conversions
"""Gizmos under the hood
## Transforms:
source/blender/windowmanager/gizmo/WM_gizmo_types.h
matrix_basis -- "Transformation of this gizmo." = placement in scene
matrix_offset -- "Custom offset from origin." = local transforms according to state/value
matrix_space -- "The space this gizmo is being modified in." used by some gizmos for undefined purposes
matrix_world -- final matrix, scaled according to viewport zoom and custom scale_basis
source/blender/windowmanager/gizmo/intern/wm_gizmo.c:WM_gizmo_calc_matrix_final_params
final = space @ (autoscale * (basis @ offset))
final = space @ (basis @ offset) -- if gizmo.use_draw_scale == False
final = space @ ((autoscale * basis) @ offset) -- if gizmo.use_draw_offset_scale
source/blender/windowmanager/gizmo/intern/wm_gizmo.c:wm_gizmo_calculate_scale
autoscale = gizmo.scale_basis * magic(preferences, matrix_space, matrix_basis, context.region_data)
magic -- making 1.0 to match preferences.view.gizmo_size pixels (75 by default)
## Selection
select_id -- apparently, id of a selectable part
test_select -- expected to return id of selection, doesn't seem to work
draw_select -- fake-draw of selection geometry for gpu-side cursor tracking
"""
# some geometries for Gizmo.custom_shape shaders
CUBE = (
(+1, +1, +1),
(-1, +1, +1),
(+1, -1, +1), # top<|fim▁hole|> (+1, -1, +1),
(-1, +1, +1),
(-1, -1, +1),
(+1, +1, +1),
(+1, -1, +1),
(+1, +1, -1), # right
(+1, +1, -1),
(+1, -1, +1),
(+1, -1, -1),
(+1, +1, +1),
(+1, +1, -1),
(-1, +1, +1), # back
(-1, +1, +1),
(+1, +1, -1),
(-1, +1, -1),
(-1, -1, -1),
(-1, +1, -1),
(+1, -1, -1), # bot
(+1, -1, -1),
(-1, +1, -1),
(+1, +1, -1),
(-1, -1, -1),
(-1, -1, +1),
(-1, +1, -1), # left
(-1, +1, -1),
(-1, -1, +1),
(-1, +1, +1),
(-1, -1, -1),
(+1, -1, -1),
(-1, -1, +1), # front
(-1, -1, +1),
(+1, -1, -1),
(+1, -1, +1),
)
DISC = (
(0.0, 0.0, 0.0),
(1.0, 0.0, 0),
(0.8660254037844387, 0.49999999999999994, 0),
(0.0, 0.0, 0.0),
(0.8660254037844387, 0.49999999999999994, 0),
(0.5000000000000001, 0.8660254037844386, 0),
(0.0, 0.0, 0.0),
(0.5000000000000001, 0.8660254037844386, 0),
(6.123233995736766e-17, 1.0, 0),
(0.0, 0.0, 0.0),
(6.123233995736766e-17, 1.0, 0),
(-0.4999999999999998, 0.8660254037844387, 0),
(0.0, 0.0, 0.0),
(-0.4999999999999998, 0.8660254037844387, 0),
(-0.8660254037844385, 0.5000000000000003, 0),
(0.0, 0.0, 0.0),
(-0.8660254037844385, 0.5000000000000003, 0),
(-1.0, 1.2246467991473532e-16, 0),
(0.0, 0.0, 0.0),
(-1.0, 1.2246467991473532e-16, 0),
(-0.8660254037844388, -0.4999999999999997, 0),
(0.0, 0.0, 0.0),
(-0.8660254037844388, -0.4999999999999997, 0),
(-0.5000000000000004, -0.8660254037844384, 0),
(0.0, 0.0, 0.0),
(-0.5000000000000004, -0.8660254037844384, 0),
(-1.8369701987210297e-16, -1.0, 0),
(0.0, 0.0, 0.0),
(-1.8369701987210297e-16, -1.0, 0),
(0.49999999999999933, -0.866025403784439, 0),
(0.0, 0.0, 0.0),
(0.49999999999999933, -0.866025403784439, 0),
(0.8660254037844384, -0.5000000000000004, 0),
(0.0, 0.0, 0.0),
(0.8660254037844384, -0.5000000000000004, 0),
(1.0, 0.0, 0),
)
X3DISC = (
(0.0, 0.0, 0.0),
(1.0, 0.0, 0),
(0.8660254037844387, 0.49999999999999994, 0),
(0.0, 0.0, 0.0),
(0.8660254037844387, 0.49999999999999994, 0),
(0.5000000000000001, 0.8660254037844386, 0),
(0.0, 0.0, 0.0),
(0.5000000000000001, 0.8660254037844386, 0),
(6.123233995736766e-17, 1.0, 0),
(0.0, 0.0, 0.0),
(6.123233995736766e-17, 1.0, 0),
(-0.4999999999999998, 0.8660254037844387, 0),
(0.0, 0.0, 0.0),
(-0.4999999999999998, 0.8660254037844387, 0),
(-0.8660254037844385, 0.5000000000000003, 0),
(0.0, 0.0, 0.0),
(-0.8660254037844385, 0.5000000000000003, 0),
(-1.0, 1.2246467991473532e-16, 0),
(0.0, 0.0, 0.0),
(-1.0, 1.2246467991473532e-16, 0),
(-0.8660254037844388, -0.4999999999999997, 0),
(0.0, 0.0, 0.0),
(-0.8660254037844388, -0.4999999999999997, 0),
(-0.5000000000000004, -0.8660254037844384, 0),
(0.0, 0.0, 0.0),
(-0.5000000000000004, -0.8660254037844384, 0),
(-1.8369701987210297e-16, -1.0, 0),
(0.0, 0.0, 0.0),
(-1.8369701987210297e-16, -1.0, 0),
(0.49999999999999933, -0.866025403784439, 0),
(0.0, 0.0, 0.0),
(0.49999999999999933, -0.866025403784439, 0),
(0.8660254037844384, -0.5000000000000004, 0),
(0.0, 0.0, 0.0),
(0.8660254037844384, -0.5000000000000004, 0),
(1.0, 0.0, 0),
(0.0, 0.0, 0.0),
(0, 1.0, 0.0),
(0, 0.8660254037844387, 0.49999999999999994),
(0.0, 0.0, 0.0),
(0, 0.8660254037844387, 0.49999999999999994),
(0, 0.5000000000000001, 0.8660254037844386),
(0.0, 0.0, 0.0),
(0, 0.5000000000000001, 0.8660254037844386),
(0, 6.123233995736766e-17, 1.0),
(0.0, 0.0, 0.0),
(0, 6.123233995736766e-17, 1.0),
(0, -0.4999999999999998, 0.8660254037844387),
(0.0, 0.0, 0.0),
(0, -0.4999999999999998, 0.8660254037844387),
(0, -0.8660254037844385, 0.5000000000000003),
(0.0, 0.0, 0.0),
(0, -0.8660254037844385, 0.5000000000000003),
(0, -1.0, 1.2246467991473532e-16),
(0.0, 0.0, 0.0),
(0, -1.0, 1.2246467991473532e-16),
(0, -0.8660254037844388, -0.4999999999999997),
(0.0, 0.0, 0.0),
(0, -0.8660254037844388, -0.4999999999999997),
(0, -0.5000000000000004, -0.8660254037844384),
(0.0, 0.0, 0.0),
(0, -0.5000000000000004, -0.8660254037844384),
(0, -1.8369701987210297e-16, -1.0),
(0.0, 0.0, 0.0),
(0, -1.8369701987210297e-16, -1.0),
(0, 0.49999999999999933, -0.866025403784439),
(0.0, 0.0, 0.0),
(0, 0.49999999999999933, -0.866025403784439),
(0, 0.8660254037844384, -0.5000000000000004),
(0.0, 0.0, 0.0),
(0, 0.8660254037844384, -0.5000000000000004),
(0, 1.0, 0.0),
(0.0, 0.0, 0.0),
(0.0, 0, 1.0),
(0.49999999999999994, 0, 0.8660254037844387),
(0.0, 0.0, 0.0),
(0.49999999999999994, 0, 0.8660254037844387),
(0.8660254037844386, 0, 0.5000000000000001),
(0.0, 0.0, 0.0),
(0.8660254037844386, 0, 0.5000000000000001),
(1.0, 0, 6.123233995736766e-17),
(0.0, 0.0, 0.0),
(1.0, 0, 6.123233995736766e-17),
(0.8660254037844387, 0, -0.4999999999999998),
(0.0, 0.0, 0.0),
(0.8660254037844387, 0, -0.4999999999999998),
(0.5000000000000003, 0, -0.8660254037844385),
(0.0, 0.0, 0.0),
(0.5000000000000003, 0, -0.8660254037844385),
(1.2246467991473532e-16, 0, -1.0),
(0.0, 0.0, 0.0),
(1.2246467991473532e-16, 0, -1.0),
(-0.4999999999999997, 0, -0.8660254037844388),
(0.0, 0.0, 0.0),
(-0.4999999999999997, 0, -0.8660254037844388),
(-0.8660254037844384, 0, -0.5000000000000004),
(0.0, 0.0, 0.0),
(-0.8660254037844384, 0, -0.5000000000000004),
(-1.0, 0, -1.8369701987210297e-16),
(0.0, 0.0, 0.0),
(-1.0, 0, -1.8369701987210297e-16),
(-0.866025403784439, 0, 0.49999999999999933),
(0.0, 0.0, 0.0),
(-0.866025403784439, 0, 0.49999999999999933),
(-0.5000000000000004, 0, 0.8660254037844384),
(0.0, 0.0, 0.0),
(-0.5000000000000004, 0, 0.8660254037844384),
(0.0, 0, 1.0),
)
class CustomGizmo:
# FIXME: highliting/selection doesnt work
def draw_very_custom_shape(self, ctx, custom_shape, select_id=None):
# similar to draw_custom_shape
shape, batch, shader = custom_shape
shader.bind()
if select_id is not None:
gpu.select.load_id(select_id)
else:
if self.is_highlight:
color = (*self.color_highlight, self.alpha_highlight)
else:
color = (*self.color, self.alpha)
shader.uniform_float("color", color)
shape.glenable()
shape.uniform_region(ctx)
# shader.uniform_float('modelMatrix', self.matrix_world)
with gpu.matrix.push_pop():
gpu.matrix.multiply_matrix(self.matrix_world)
batch.draw()
bgl.glDisable(bgl.GL_BLEND)
class OffsetHandle:
"""Handling mouse to offset gizmo from base along Z axis"""
# FIXME: works a bit weird for rotated objects
def invoke(self, ctx, event):
self.init_value = self.target_get_value("offset") / self.scale_value
coordz = self.project_mouse(ctx, event)
if coordz is None:
return {"CANCELLED"}
self.init_coordz = coordz
return {"RUNNING_MODAL"}
def modal(self, ctx, event, tweak):
coordz = self.project_mouse(ctx, event)
if coordz is None:
return {"CANCELLED"}
delta = coordz - self.init_coordz
if "PRECISE" in tweak:
delta /= 10.0
value = max(0, self.init_value + delta)
value *= self.scale_value
# ctx.area.header_text_set(f"coords: {self.init_coordz} - {coordz}, delta: {delta}, value: {value}")
ctx.area.header_text_set(f"Depth: {value}")
self.target_set_value("offset", value)
return {"RUNNING_MODAL"}
def project_mouse(self, ctx, event):
"""Projecting mouse coords to local axis Z"""
# logic from source/blender/editors/gizmo_library/gizmo_types/arrow3d_gizmo.c:gizmo_arrow_modal
mouse = Vector((event.mouse_region_x, event.mouse_region_y))
region = ctx.region
region3d = ctx.region_data
ray_orig = view3d_utils.region_2d_to_origin_3d(region, region3d, mouse)
ray_norm = view3d_utils.region_2d_to_vector_3d(region, region3d, mouse)
# 'arrow' origin and direction
base = Vector((0, 0, 0))
axis = Vector((0, 0, 1))
# projecttion of the arrow to a plane, perpendicular to view ray
axis_proj = axis - ray_norm * axis.dot(ray_norm)
# intersection of the axis with the plane through view origin perpendicular to the arrow projection
coords = geometry.intersect_line_plane(base, axis, ray_orig, axis_proj)
return coords.z
def exit(self, ctx, cancel):
if cancel:
self.target_set_value("offset", self.init_value)
else:
self.group.update(ctx)
class UglyDotGizmo(OffsetHandle, types.Gizmo):
"""three orthogonal circles"""
bl_idname = "BIM_GT_uglydot_3d"
bl_target_properties = ({"id": "offset", "type": "FLOAT", "array_length": 1},)
__slots__ = (
"scale_value",
"custom_shape",
"init_value",
"init_coordz",
)
def setup(self):
self.custom_shape = self.new_custom_shape(type="TRIS", verts=X3DISC)
def refresh(self):
offset = self.target_get_value("offset") / self.scale_value
self.matrix_offset.col[3][2] = offset # z-shift
def draw(self, ctx):
self.refresh()
self.draw_custom_shape(self.custom_shape)
def draw_select(self, ctx, select_id):
self.refresh()
self.draw_custom_shape(self.custom_shape, select_id=select_id)
class DotGizmo(CustomGizmo, OffsetHandle, types.Gizmo):
"""Single dot viewport-aligned"""
# FIXME: make it selectable
bl_idname = "BIM_GT_dot_2d"
bl_target_properties = ({"id": "offset", "type": "FLOAT", "array_length": 1},)
__slots__ = (
"scale_value",
"custom_shape",
)
def setup(self):
shader = DotsGizmoShader()
self.custom_shape = shader, shader.batch(pos=((0, 0, 0),)), shader.prog
self.use_draw_scale = False
def refresh(self):
offset = self.target_get_value("offset") / self.scale_value
self.matrix_offset.col[3][2] = offset # z-shifted
def draw(self, ctx):
self.refresh()
self.draw_very_custom_shape(ctx, self.custom_shape)
def draw_select(self, ctx, select_id):
self.refresh()
self.draw_very_custom_shape(ctx, self.custom_shape, select_id=select_id)
# doesn't get called
# def test_select(self, ctx, location):
# pass
class ExtrusionGuidesGizmo(CustomGizmo, types.Gizmo):
"""Extrusion guides
Noninteractive gizmo to indicate extrusion depth and planes.
Draws main segment and orthogonal cross at endpoints.
"""
bl_idname = "BIM_GT_extrusion_guides"
bl_target_properties = ({"id": "depth", "type": "FLOAT", "array_length": 1},)
__slots__ = ("scale_value", "custom_shape")
def setup(self):
shader = ExtrusionGuidesShader()
self.custom_shape = shader, shader.batch(pos=((0, 0, 0), (0, 0, 1))), shader.prog
self.use_draw_scale = False
def refresh(self):
depth = self.target_get_value("depth") / self.scale_value
self.matrix_offset.col[2][2] = depth # z-scaled
def draw(self, ctx):
self.refresh()
self.draw_very_custom_shape(ctx, self.custom_shape)
class DimensionLabelGizmo(types.Gizmo):
"""Text label for a dimension"""
# does not work properly, fonts are totally screwed up
bl_idname = "BIM_GT_dimension_label"
bl_target_properties = ({"id": "value", "type": "FLOAT", "array_length": 1},)
__slots__ = "text_label"
def setup(self):
pass
def refresh(self, ctx):
value = self.target_get_value("value")
self.matrix_offset.col[3][2] = value * 0.5
unit_system = ctx.scene.unit_settings.system
self.text_label = bpy.utils.units.to_string(unit_system, "LENGTH", value, 3, split_unit=False)
def draw(self, ctx):
self.refresh(ctx)
self.draw_text(ctx)
def draw_text(self, ctx):
font_id = 0
font_size = 16
dpi = ctx.preferences.system.dpi
# pos = self.matrix_world @ Vector((0, 0, 0, 1))
# pos = Vector((0, 0, 0.5))
# region = ctx.region
# region3d = ctx.region_data
# pos = view3d_utils.location_3d_to_region_2d(region, region3d, pos)
# text = self.text_label
blf.size(font_id, font_size, dpi)
blf.position(font_id, 0, 0, 0)
blf.color(font_id, *self.color, self.alpha)
blf.draw(font_id, "ABC")
class ExtrusionWidget(types.GizmoGroup):
bl_idname = "bim.extrusion_widget"
bl_label = "Extrusion Gizmos"
bl_space_type = "VIEW_3D"
bl_region_type = "WINDOW"
bl_options = {"3D", "PERSISTENT", "SHOW_MODAL_ALL"}
@classmethod
def poll(cls, ctx):
obj = ctx.object
return (
obj
and obj.type == "MESH"
and obj.data.BIMMeshProperties.ifc_parameters.get("IfcExtrudedAreaSolid/Depth") is not None
)
def setup(self, ctx):
target = ctx.object
prop = target.data.BIMMeshProperties.ifc_parameters.get("IfcExtrudedAreaSolid/Depth")
basis = target.matrix_world.normalized()
theme = ctx.preferences.themes[0].user_interface
scale_value = self.get_scale_value(ctx.scene.unit_settings.system, ctx.scene.unit_settings.length_unit)
gz = self.handle = self.gizmos.new("BIM_GT_uglydot_3d")
gz.matrix_basis = basis
gz.scale_basis = 0.1
gz.color = gz.color_highlight = tuple(theme.gizmo_primary)
gz.alpha = 0.5
gz.alpha_highlight = 1.0
gz.use_draw_modal = True
gz.target_set_prop("offset", prop, "value")
gz.scale_value = scale_value
gz = self.guides = self.gizmos.new("BIM_GT_extrusion_guides")
gz.matrix_basis = basis
gz.color = gz.color_highlight = tuple(theme.gizmo_secondary)
gz.alpha = gz.alpha_highlight = 0.5
gz.use_draw_modal = True
gz.target_set_prop("depth", prop, "value")
gz.scale_value = scale_value
# gz = self.label = self.gizmos.new('GIZMO_GT_dimension_label')
# gz.matrix_basis = basis
# gz.color = tuple(theme.gizmo_secondary)
# gz.alpha = 0.5
# gz.use_draw_modal = True
# gz.target_set_prop('value', target.demo, 'depth')
def refresh(self, ctx):
"""updating gizmos"""
target = ctx.object
basis = target.matrix_world.normalized()
self.handle.matrix_basis = basis
self.guides.matrix_basis = basis
def update(self, ctx):
"""updating object"""
bpy.ops.bim.update_parametric_representation()
target = ctx.object
prop = target.data.BIMMeshProperties.ifc_parameters.get("IfcExtrudedAreaSolid/Depth")
self.handle.target_set_prop("offset", prop, "value")
self.guides.target_set_prop("depth", prop, "value")
@staticmethod
def get_scale_value(system, length_unit):
scale_value = 1
if system == "METRIC":
if length_unit == "KILOMETERS":
scale_value /= 1000
elif length_unit == "CENTIMETERS":
scale_value *= 100
elif length_unit == "MILLIMETERS":
scale_value *= 1000
elif length_unit == "MICROMETERS":
scale_value *= 1000000
elif system == "IMPERIAL":
if length_unit == "MILES":
scale_value /= si_conversions["mile"]
elif length_unit == "FEET":
scale_value /= si_conversions["foot"]
elif length_unit == "INCHES":
scale_value /= si_conversions["inch"]
elif length_unit == "THOU":
scale_value /= si_conversions["thou"]
return scale_value<|fim▁end|> | |
<|file_name|>styles.js<|end_file_name|><|fim▁begin|>import { StyleSheet } from 'react-native'
const s = StyleSheet.create({
flexRowAround: {
flexDirection: 'row',
justifyContent: 'space-around',
},
dot: {
height: 7,
width: 7,
borderRadius: 3.5,
},
green: {
color: '#50d2c2',
},
flexWrap: {
flexWrap: 'wrap',
},
textCenter: {
textAlign: 'center',
},
flex: {
flex: 1,<|fim▁hole|> },
justifyCenter: {
justifyContent: 'center',
},
alignCenter: {
alignItems: 'center',
},
row: {
flexDirection: 'row',
},
column: {
flexDirection: 'column',
},
flexAround: {
justifyContent: 'space-around',
},
flexBetween: {
justifyContent: 'space-between',
},
activeWeek: {
backgroundColor: '#50d2c2',
},
activeCalender: {
backgroundColor: '#fff',
},
pTop: {
paddingTop: 13,
paddingBottom: 5,
},
pBottom: {
paddingBottom: 13,
},
p: {
paddingTop: 13,
paddingBottom: 13,
},
weekView: {
backgroundColor: '#f8f8f8',
},
calenderView: {
backgroundColor: '#50d2c2',
},
disabledMonth: {
color: '#9be5db',
},
white: {
color: '#fff',
},
backWhite: {
backgroundColor: '#fff',
},
});
export default s
export const setHeight = height => ({ height });
export const setWidth = width => ({ width });
export const setPaddingTop = paddingTop => ({ paddingTop });
export const setPaddingBottom = paddingBottom => ({ paddingBottom });
export const setPaddingLeft = paddingLeft => ({ paddingLeft });
export const setPaddingRight = paddingRight => ({ paddingRight });
export const setFontSize = fontSize => ({ fontSize });
export const setFlex = flex => ({ flex });
export const setColor = color => ({ color });
export const setBackGroundColor = backgroundColor => ({ backgroundColor });
export const setBorderColor = borderColor => ({ borderColor });
export const setPosition = position => ({ position });
export const setBottom = bottom => ({ bottom });
export const setLeft = left => ({ left });
export const setRight = right => ({ right });
export const setTop = top => ({ top });
export const setMarginTop = marginTop => ({ marginTop });
export const setMarginBottom = marginBottom => ({ marginBottom });
export const setMarginLeft = marginLeft => ({ marginLeft });
export const setMarginRight = marginRight => ({ marginRight });
export const setPadding = function() {
switch (arguments.length) {
case 1:
return { paddinTop: arguments[0] }
case 2:
return { paddingTop: arguments[0], paddingRight: arguments[1] }
case 3:
return { paddingTop: arguments[0], paddingRight: arguments[1], paddingBottom: arguments[2] }
case 4:
return { paddingTop: arguments[0], paddingRight: arguments[1], paddingBottom: arguments[2], paddingLeft: arguments[3] }
default:
return { padding: arguments[0] }
}
}
export const setMargin = function() {
switch (arguments.length) {
case 1:
return { paddinTop: arguments[0] }
case 2:
return { marginTop: arguments[0], marginRight: arguments[1] }
case 3:
return { marginTop: arguments[0], marginRight: arguments[1], marginBottom: arguments[2] }
case 4:
return { marginTop: arguments[0], marginRight: arguments[1], marginBottom: arguments[2], marginLeft: arguments[3] }
default:
return { margin: arguments[0] }
}
}<|fim▁end|> | },
activeMonth:{
backgroundColor: '#50d2c2',
borderRadius: 5 |
<|file_name|>pnl_mrf_example.cpp<|end_file_name|><|fim▁begin|>//#include "stdafx.h"
#include <pnl_dll.hpp>
#include <boost/smart_ptr.hpp>
#include <iostream>
namespace {
namespace local {
// [ref] testGetFactorsMRF2() in ${PNL_ROOT}/c_pgmtk/tests/src/AGetParametersTest.cpp
<|fim▁hole|> const int numNodeTypes = 2;
const int numCliques = 6;
#if 0
const int cliqueSizes[] = { 2, 2, 2, 2, 2, 2 };
const int clique0[] = { 0, 1 };
const int clique1[] = { 1, 2 };
const int clique2[] = { 1, 3 };
const int clique3[] = { 2, 4 };
const int clique4[] = { 2, 5 };
const int clique5[] = { 3, 6 };
const int *cliques[] = { clique0, clique1, clique2, clique3, clique4, clique5 };
pnl::CNodeType *nodeTypes = new pnl::CNodeType [numNodeTypes];
nodeTypes[0].SetType(true, 3);
nodeTypes[1].SetType(true, 4);
int *nodeAssociation = new int [numNodes];
for (int i = 0; i < numNodes; ++i)
nodeAssociation[i] = i % 2;
pnl::CMRF2 *mrf2 = pnl::CMRF2::Create(numNodes, numNodeTypes, nodeTypes, nodeAssociation, numCliques, cliqueSizes, cliques);
#else
pnl::intVecVector cliques;
{
const int clique0[] = { 0, 1 };
const int clique1[] = { 1, 2 };
const int clique2[] = { 1, 3 };
const int clique3[] = { 2, 4 };
const int clique4[] = { 2, 5 };
const int clique5[] = { 3, 6 };
cliques.push_back(pnl::intVector(clique0, clique0 + sizeof(clique0) / sizeof(clique0[0])));
cliques.push_back(pnl::intVector(clique1, clique1 + sizeof(clique1) / sizeof(clique1[0])));
cliques.push_back(pnl::intVector(clique2, clique2 + sizeof(clique2) / sizeof(clique2[0])));
cliques.push_back(pnl::intVector(clique3, clique3 + sizeof(clique3) / sizeof(clique3[0])));
cliques.push_back(pnl::intVector(clique4, clique4 + sizeof(clique4) / sizeof(clique4[0])));
cliques.push_back(pnl::intVector(clique5, clique5 + sizeof(clique5) / sizeof(clique5[0])));
}
//pnl::nodeTypeVector nodeTypes;
//nodeTypes.push_back(pnl::CNodeType(true, 3));
//nodeTypes.push_back(pnl::CNodeType(true, 4));
pnl::nodeTypeVector nodeTypes(numNodeTypes);
nodeTypes[0].SetType(true, 3);
nodeTypes[1].SetType(true, 4);
pnl::intVector nodeAssociation(numNodes);
for (int i = 0; i < numNodes; ++i)
nodeAssociation[i] = i % 2;
pnl::CMRF2 *mrf2 = pnl::CMRF2::Create(numNodes, nodeTypes, nodeAssociation, cliques);
#endif
mrf2->AllocFactors();
for (int i = 0; i < numCliques; ++i)
mrf2->AllocFactor(i);
// get content of Graph
mrf2->GetGraph()->Dump();
//
const int numQueries = 13;
const int queryLength[] = { 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2 };
const int queries[13][2] =
{
{ 0 },
{ 1 },
{ 2 },
{ 3 },
{ 4 },
{ 5 },
{ 6 },
{ 0, 1 },
{ 1, 3 },
{ 1, 2 },
{ 4, 2 },
{ 2, 5 },
{ 6, 3 }
};
pnl::pFactorVector params;
for (int i = 0; i < numQueries; ++i)
{
mrf2->GetFactors(queryLength[i], queries[i], ¶ms);
// TODO [add] >>
params.clear();
}
#if 0
delete [] nodeAssociation;
delete [] nodeTypes;
#endif
return mrf2;
}
} // namespace local
} // unnamed namespace
namespace my_pnl {
void mrf_example()
{
// simple pairwise MRF
std::cout << "========== simple pairwise MRF" << std::endl;
{
const boost::scoped_ptr<pnl::CMRF2> mrf2(local::create_simple_pairwise_mrf());
if (!mrf2)
{
std::cout << "fail to create a probabilistic graphical model at " << __LINE__ << " in " << __FILE__ << std::endl;
return;
}
}
}
} // namespace my_pnl<|fim▁end|> | pnl::CMRF2 * create_simple_pairwise_mrf()
{
const int numNodes = 7;
|
<|file_name|>make_bb_spectrum_plot.py<|end_file_name|><|fim▁begin|>import ROOT
from math import pi, sqrt, pow, exp
import scipy.integrate
import numpy
from array import array
alpha = 7.2973e-3
m_e = 0.51099892
Z_Xe = 54
Q = 2.4578
def F(Z, KE):
E = KE + m_e
W = E/m_e
Z0 = Z + 2
if W <= 1:
W = 1 + 1e-4
if W > 2.2:
a = -8.46e-2 + 2.48e-2*Z0 + 2.37e-4*Z0**2
b = 1.15e-2 + 3.58e-4*Z0 - 6.17e-5*Z0**2
else:
a = -0.811 + 4.46e-2*Z0 + 1.08e-4*Z0**2
b = 0.673 - 1.82e-2*Z0 + 6.38e-5*Z0**2
x = sqrt(W-1)
p = sqrt(W**2 - 1)
if (p <= 0):
result = 1
else:
result = W/p*exp(a + b*x)
return result
def D(D, K, i):
Z = Z_Xe
T0 = Q/m_e
E1 = 0.5*(K+D) + 1<|fim▁hole|> p1 = sqrt(E1**2 - 1)
p2 = sqrt(E2**2 - 1)
T1 = E1 - 1
T2 = E2 - 1
return p1*E1*F(Z, T1*m_e)*p2*E2*F(Z, T1*m_e)*pow(T0 - K, i)
def SumSpectrum(K, i):
if K < 0:
return 0
elif K > Q:
return 0
a = -K/m_e
b = K/m_e
x = scipy.integrate.quad(D, a, b, (K/m_e, i))[0]
if x < 0:
return 0
else:
return x
def gauss_conv(x, y, res):
N = len(x)
mu = numpy.mean(x)
s = res*mu
gauss = [1.0/(s*sqrt(2*pi))*exp(-0.5*((a-mu)/s)**2) for a in x]
convolution = numpy.convolve(y, gauss,'same')
return convolution
def normalize(y, eps, f):
return [a*f for a in y]
N = 1000
min_E = 0.0
max_E = 1.2
E_scaled = array('d', numpy.linspace(min_E, max_E, N, False))
Es = array('d', (E*Q for E in E_scaled))
eps = (max_E - min_E)/N
bb0n = [0.5/eps if abs(E-Q)<eps else 0 for E in Es]
bb2n = [SumSpectrum(E, 5) for E in Es]
bb0n_smeared = gauss_conv(Es, bb0n, 0.02)
bb2n_smeared = gauss_conv(Es, bb2n, 0.02)
bb0n_int = scipy.integrate.simps(bb0n_smeared, None, eps)
bb0n_norm = array('d', normalize(bb0n_smeared, eps, 1e-2/bb0n_int))
bb2n_int = scipy.integrate.simps(bb2n_smeared, None, eps)
bb2n_norm = array('d', normalize(bb2n_smeared, eps, 1/bb2n_int))
g_bb0n = ROOT.TGraph(N, E_scaled, bb0n_norm)
g_bb0n.SetTitle("")
g_bb0n.SetLineStyle(ROOT.kDashed)
g_bb2n = ROOT.TGraph(N, E_scaled, bb2n_norm)
g_bb2n.SetTitle("")
bb0nX = []
bb0nX.append([0.5/eps if abs(E-Q)<eps else 0 for E in Es])
for i in [1, 2, 3, 5, 7]:
bb0nX.append([SumSpectrum(E, i) for E in Es])
bb0nX_graphs = []
for bb0nXn in bb0nX:
bb0nX_int = scipy.integrate.simps(bb0nXn, None, eps)
bb0nX_norm = array('d', normalize(bb0nXn, eps, 1/bb0nX_int))
g_bb0nX = ROOT.TGraph(N, E_scaled, bb0nX_norm)
bb0nX_graphs.append(g_bb0nX)
min_E = 0.9
max_E = 1.1
E_scaled_z = array('d', numpy.linspace(min_E, max_E, N, False))
Es_z = array('d', (E*Q for E in E_scaled_z))
eps_z = (max_E - min_E)/N
bb0n_z = [0.5/eps_z if abs(E-Q)<eps_z else 0 for E in Es_z]
bb2n_z = [SumSpectrum(E, 5) for E in Es_z]
bb0n_smeared_z = gauss_conv(Es_z, bb0n_z, 0.02)
bb2n_smeared_z = gauss_conv(Es_z, bb2n_z, 0.02)
bb0n_norm_z = array('d', normalize(bb0n_smeared_z, eps, 1e-6/bb0n_int))
bb2n_norm_z = array('d', normalize(bb2n_smeared_z, eps, 1.0/bb2n_int))
g_bb0n_z = ROOT.TGraph(N, E_scaled_z, bb0n_norm_z)
g_bb0n_z.SetTitle("")
g_bb0n_z.SetLineStyle(ROOT.kDashed)
g_bb2n_z = ROOT.TGraph(N, E_scaled_z, bb2n_norm_z)
g_bb2n_z.SetTitle("")
#print("bb0n %f"%(sum((y*eps for y in bb0n_norm))))
#print("bb2n %f"%(sum((y*eps for y in bb2n_norm))))
c_both = ROOT.TCanvas("c_both","c_both")
p = ROOT.TPad("p", "p", 0, 0, 1, 1)
p.SetRightMargin(0.02)
p.SetTopMargin(0.02)
p.Draw()
p.cd()
g_bb2n.Draw("AL")
g_bb0n.Draw("L")
g_bb2n.GetYaxis().SetTitle("dN/dE")
g_bb2n.GetXaxis().SetTitle("Sum e^{-} Energy (E/Q)")
c_both.cd()
p_inset = ROOT.TPad("p_inset","p_inset",0.5, 0.5, 0.995, 0.995)
p_inset.SetRightMargin(0.05)
p_inset.SetTopMargin(0.05)
p_inset.Draw()
p_inset.cd()
g_bb2n_z.Draw("AL")
g_bb0n_z.Draw("L")
g_bb2n_z.GetYaxis().SetTitle("dN/dE")
g_bb2n_z.GetXaxis().SetTitle("Sum e^{-} Energy (E/Q)")
g_bb2n_z.GetYaxis().SetNoExponent(False)
# Zoom in so we can't see edge effects of the convolution
g_bb2n_z.GetXaxis().SetRangeUser(1-0.25*(1-min_E), 1+0.25*(max_E-1))
g_bb2n_z.GetYaxis().SetRangeUser(0, 0.0004)
c_z = ROOT.TCanvas("c_z","c_z")
c_z.SetRightMargin(0.05)
c_z.SetTopMargin(0.05)
g_bb2n_z.Draw("AL")
g_bb0n_z.Draw("L")
c = ROOT.TCanvas("c","c")
c.SetRightMargin(0.05)
c.SetTopMargin(0.05)
g_bb2n.Draw("AL")
g_bb0n.Draw("L")
c_majoron = ROOT.TCanvas("c_majoron")
c_majoron.SetRightMargin(0.05)
c_majoron.SetTopMargin(0.05)
colors = [ROOT.kBlack, ROOT.kRed, ROOT.kGreen, ROOT.kBlue,
ROOT.kMagenta, ROOT.kCyan]
draw_opt = "AL"
for i in xrange(len(bb0nX_graphs)):
bb0nX_graphs[-(i+1)].SetLineColor(colors[-(i+1)])
bb0nX_graphs[-(i+1)].Draw(draw_opt)
draw_opt = "L"
# Draw bb0n last so it doesn't scale others to 0
bb0nX_graphs[-1].SetTitle("")
bb0nX_graphs[-1].GetXaxis().SetRangeUser(0, 1.1)
bb0nX_graphs[-1].GetXaxis().SetTitle("Sum e^{-} Energy (E/Q)")
bb0nX_graphs[-1].GetYaxis().SetTitle("dN/dE")
l_majoron = ROOT.TLegend(0.45, 0.77, 0.85, 0.94)
l_majoron.SetFillColor(ROOT.kWhite)
l_majoron.SetNColumns(2)
l_majoron.AddEntry(bb0nX_graphs[0], "0#nu#beta#beta", "l")
l_majoron.AddEntry(bb0nX_graphs[1], "0#nu#beta#beta#chi^{0} (n=1)", "l")
l_majoron.AddEntry(bb0nX_graphs[4], "2#nu#beta#beta (n=5)", "l")
l_majoron.AddEntry(bb0nX_graphs[2], "0#nu#beta#beta#chi^{0} (n=2)", "l")
l_majoron.AddEntry(None, "", "")
l_majoron.AddEntry(bb0nX_graphs[3], "0#nu#beta#beta#chi^{0}(#chi^{0}) (n=3)", "l")
l_majoron.AddEntry(None, "", "")
l_majoron.AddEntry(bb0nX_graphs[5], "0#nu#beta#beta#chi^{0}#chi^{0} (n=7)", "l")
l_majoron.Draw()
dummy = raw_input("Press Enter...")<|fim▁end|> | E2 = 0.5*(K+D) + 1
|
<|file_name|>Geo.tsx<|end_file_name|><|fim▁begin|>import {getConfig} from "../../helpers/getConfig";
import {Post} from "../facebook/Post";
import {getQuestionTitleByType} from "../../helpers/getQuestionTitleByType";
import * as Model from "../../../common/models/questions/geolocation/Marker";
import {Button} from "react-bootstrap";
import {QuestionProps} from "./QuestionProps";
import * as _ from "lodash";
import {Location} from "../../../common/models/questions/geolocation/Location";
import {GeoAnswer} from "../../../common/models/questions/answers/GeoAnswer";
import {Col} from 'react-bootstrap';
import {GeoNameEntity} from "../../../common/models/GeoNameEntity";
import {GeoMarker} from "./GeoMarker";
import {Marker} from "../../../common/models/questions/geolocation/Marker";
import {GoogleMap} from "../GoogleMap";
const Loader = require('react-loader');
const Autosuggest = require('react-autosuggest');
interface Configuration {
zoom: number;
apiKey: string;
sensor: boolean;
marker: any;
}
interface GeoProps extends QuestionProps {
defaultLocation: Model.Marker;
userAnswer?: GeoAnswer;
answer?: GeoAnswer;
correct?: boolean;
}
interface GeoState {
value?: string;
place?: string;<|fim▁hole|> countryCode?: string;
conf?: Configuration;
suggestions?: GeoNameEntity[];
isLoading?: boolean;
latitude?: number;
longitude?: number;
selectedSuggestion?: GeoNameEntity;
locationText?: string;
zoom?: number;
markers?: Model.Marker[];
map?: google.maps.Map;
}
const selectedZoomlevel = 12;
const fullyZoomedOut = 1;
export class Geo extends React.Component<GeoProps, GeoState> {
private userMarker: Model.Marker;
private conf: Configuration;
constructor(props: GeoProps) {
super(props);
this.state = {
value: '',
place: '',
countryCode: null,
suggestions: [],
isLoading: false,
latitude: 0,
longitude: 0,
selectedSuggestion: null,
zoom: fullyZoomedOut,
markers: []
};
this.conf = getConfig('gmaps');
}
componentWillReceiveProps(props: GeoProps) {
this.setState({
latitude: 0,
longitude: 0,
place: '',
suggestions: [],
isLoading: false,
selectedSuggestion: null,
zoom: fullyZoomedOut,
markers: []
});
}
componentDidMount() {
}
loadSuggestions(place: string, countryCode?: string) {
this.setState({
isLoading: true
});
Meteor.call('Geolocation.getSuggestions', place, countryCode, function (error: Meteor.Error, result: GeoNameEntity[]) {
if (place === this.state.place && countryCode === this.state.countryCode) {
this.setState({
isLoading: false,
suggestions: result
});
}
}.bind(this));
}
onChange(event, {newValue}) {
const {place, countryCode} = this.decomposeEntry(newValue);
this.setState({
value: newValue,
place: place,
countryCode: countryCode
});
}
onSuggestionsUpdateRequested({value}) {
const {place, countryCode} = this.decomposeEntry(value);
this.loadSuggestions(place, countryCode);
}
decomposeEntry(entry: string): {place: string, countryCode: string} {
const entries: string[] = entry.split(',');
const place = entries[0].trim();
const countryCode = entries.length > 1 ? entries[1].trim() : null;
return {place: place, countryCode: countryCode};
}
getSuggestionValue(suggestion: GeoNameEntity) {
return `${suggestion.name}, ${suggestion.countryCode}`;
}
renderSuggestion(suggestion: GeoNameEntity) {
return (
<span className="location-suggestion">{suggestion.name}, <span className="admin">{suggestion.countryCode}</span></span>
);
}
onSuggestionSelected(event, {suggestion, suggestionValue, sectionIndex, method}) {
const entity: GeoNameEntity = suggestion;
this.setState({
selectedSuggestion: entity,
latitude: suggestion.latitude,
longitude: suggestion.longitude,
zoom: selectedZoomlevel,
markers: [new Marker(suggestion.latitude, suggestion.longitude)]
});
this.getLocationText(entity.latitude, entity.longitude).then((text) => this.setState({locationText: text}));
}
render() {
if (this.props.userAnswer) {
return this.renderAnswer();
} else {
return this.renderQuestions();
}
}
renderQuestions() {
const {value, suggestions} = this.state;
const inputProps = {
placeholder: 'Type a location',
value,
onChange: this.onChange.bind(this)
};
const lat = this.state.latitude ? this.state.latitude : 0;
const long = this.state.longitude ? this.state.longitude : 0;
return (
<div className="question question-geo">
<h4>{getQuestionTitleByType(this.props.type.toString()) }</h4>
<Col sm={12}>
<Post post={this.props.subject}/>
</Col>
<Col sm={12}>
<Autosuggest suggestions={suggestions}
onSuggestionsUpdateRequested={this.onSuggestionsUpdateRequested.bind(this)}
getSuggestionValue={this.getSuggestionValue.bind(this)}
renderSuggestion={this.renderSuggestion.bind(this)}
onSuggestionSelected={this.onSuggestionSelected.bind(this)}
inputProps={inputProps}
/>
<Loader loaded={!this.state.isLoading} scale={0.5} left="93%"/>
</Col>
<Col sm={12}>
<div className="map">
<GoogleMap latitude={lat} longitude={long}
zoom={this.state.zoom}
width={"auto"} height={250}
apiKey={this.conf.apiKey}
onClick={this.onMapClick.bind(this)}
markers={this.state.markers}
/>
</div>
<Button onClick={this.onDone.bind(this) }>Done</Button>
</Col>
</div>
);
}
renderAnswer() {
const markers = [new Marker(this.props.answer.data.latitude, this.props.answer.data.longitude, "green")];
const lat = this.state.latitude ? this.state.latitude : 0;
const long = this.state.longitude ? this.state.longitude : 0;
let styleName = "correct-geo";
if (!this.props.correct) {
markers.push(new Marker(this.props.userAnswer.data.latitude, this.props.userAnswer.data.longitude, "red"))
styleName = "wrong-geo";
}
return (
<div className="question question-geo">
<h4>{getQuestionTitleByType(this.props.type.toString()) }</h4>
<Col sm={12}>
<Post post={this.props.subject}/>
</Col>
<Col sm={12}>
<div className={`map ${styleName}`}>
<GoogleMap latitude={lat} longitude={long}
zoom={this.state.zoom}
width={"auto"} height={250}
apiKey={this.conf.apiKey}
onClick={this.onMapClick.bind(this)}
markers={markers}
/>
</div>
</Col>
</div>
);
}
getLocationText(lat: number, lng: number) {
return new Promise<string>((resolve, reject) =>
Meteor.call('Geolocation.getLocationName', new Marker(lat, lng), (error: Meteor.Error, result: string) => {
if (!error) {
resolve(result)
} else {
reject(error)
}
}));
}
onDone(e) {
if (this.state.markers.length > 0) {
const marker = this.state.markers[0];
this.props.onDone(new Location(marker.latitude, marker.longitude));
this.setState({
markers: [],
value: ''
});
}
}
onMapClick(lat: number, lng: number, zoom: number, center: google.maps.LatLng) {
//if the user hasn't answered the question yet we create or move the marker on the map
if (!this.props.userAnswer) {
const markers = this.state.markers;
if (markers.length == 0) {
markers.push(new Marker(lat, lng));
} else {
markers[0].latitude = lat;
markers[0].longitude = lng;
}
this.setState({
latitude: center.lat(),
longitude: center.lng(),
markers: markers,
zoom: zoom
});
}
}
}<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Intecture Developers.
//
// Licensed under the Mozilla Public License 2.0 <LICENSE or
// https://www.tldrlegal.com/l/mpl-2.0>. This file may not be copied,
// modified, or distributed except according to those terms.
//! OS abstractions for `Telemetry`.
mod centos;
mod debian;
mod fedora;
mod freebsd;
mod macos;
mod nixos;
mod ubuntu;
pub use self::centos::Centos;<|fim▁hole|>pub use self::fedora::Fedora;
pub use self::freebsd::Freebsd;
pub use self::macos::Macos;
pub use self::nixos::Nixos;
pub use self::ubuntu::Ubuntu;
use errors::*;
use futures::Future;
use super::Telemetry;
pub trait TelemetryProvider {
fn available() -> bool where Self: Sized;
fn load(&self) -> Box<Future<Item = Telemetry, Error = Error>>;
}
#[doc(hidden)]
pub fn factory() -> Result<Box<TelemetryProvider>> {
if Centos::available() {
Ok(Box::new(Centos))
}
else if Debian::available() {
Ok(Box::new(Debian))
}
else if Fedora::available() {
Ok(Box::new(Fedora))
}
else if Freebsd::available() {
Ok(Box::new(Freebsd))
}
else if Macos::available() {
Ok(Box::new(Macos))
}
else if Nixos::available() {
Ok(Box::new(Nixos))
}
else if Ubuntu::available() {
Ok(Box::new(Ubuntu))
} else {
Err(ErrorKind::ProviderUnavailable("Telemetry").into())
}
}<|fim▁end|> | pub use self::debian::Debian; |
<|file_name|>collection-versions.js<|end_file_name|><|fim▁begin|>const jwt = require("jwt-simple");
const co = require('co');
const config = require('../config');
const dbX = require('../db');
const coForEach = require('co-foreach');
<|fim▁hole|> let token = socket.handshake.query.token;
// let isReconnect = socket.handshake.query.isReconnect;
// console.log('isReconnect:', isReconnect);
let decoded = null;
try {
decoded = jwt.decode(token, config.jwtSecret);
} catch(error) {
switch (error) {
case 'Signature verification failed':
return next(new Error('authentication error: the jwt has been falsified'));
case 'Token expired':
return next(new Error('authentication error: the jwt has been expired'));
}
}
console.log('decoded:', decoded);
return next();
})
//
collectionVersionsNS.on('connection', (socket) => {
// const roomId = socket.client.id;
console.log(`${new Date()}: ${socket.client.id} connected to socket /collectionVersions`);
socket.on('clientCollectionVersions', (data) => {
const versionsClient = data['versions'];
co(function*() {
const db = yield dbX.dbPromise;
const versionsLatest = yield db.collection('versions').find({}).toArray();
const clientCollectionUpdates = {};
// console.log('versionsClient', versionsClient);
versionsClient.reduce((acc, curr) => {
switch (true) {
case curr['collection'] === 'gd': // prices is called gd at client
const pricesVersionLatest = versionsLatest.find(v => v['collection'] === 'prices');
if (curr['version'] !== pricesVersionLatest['version']) {
acc['gd'] = {version: pricesVersionLatest['version']};
}
break;
default:
const versionLatest = versionsLatest.find(v => {
return v['collection'] === curr['collection'];
});
if (curr['version'] !== versionLatest['version']) {
acc[curr['collection']] = {version: versionLatest['version']};
}
}
return acc;
}, clientCollectionUpdates);
const hasUpdates = Object.keys(clientCollectionUpdates).length;
if (hasUpdates) {
const collectionsToUpdate = Object.keys(clientCollectionUpdates);
// types, titles, staffs
yield coForEach(Object.keys(clientCollectionUpdates), function*(k) {
console.log('adding to clientCollectionUpdates:', k);
switch (k) {
case 'gd':
clientCollectionUpdates[k]['data'] = JSON.stringify(yield db.collection('prices').find({}, {
createdAt: 0, createdBy: 0, modifiedAt: 0, modifiedBy: 0
}).toArray());
break;
default:
// need two stringifies, otherwise, error at heroku without details
clientCollectionUpdates[k]['data'] = [{a: 1}];
// clientCollectionUpdates[k]['data'] = JSON.stringify(JSON.stringify(yield db.collection(k).find({}).toArray()));
}
});
socket.emit('collectionUpdate', clientCollectionUpdates);
} else {
socket.send({message: 'all collections up-to-date'});
}
}).catch(error => {
console.log(error.stack);
socket.emit('error', {
error: error.stack
})
})
})
// after connection, client sends collectionVersions, then server compares
// each time a collection is updated, update its version in the 'versions' collection
})
}<|fim▁end|> | module.exports = (io) => {
const collectionVersionsNS = io.of('/collectionVersions');
collectionVersionsNS.use((socket, next) => { |
<|file_name|>0003_convert_recomended_articles.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from molo.core.models import ArticlePage, ArticlePageRecommendedSections
from wagtail.wagtailcore.blocks import StreamValue
def create_recomended_articles(main_article, article_list):
'''
Creates recommended article objects from article_list
and _prepends_ to existing recommended articles.
'''
existing_recommended_articles = [
ra.recommended_article.specific
for ra in main_article.recommended_articles.all()]
ArticlePageRecommendedSections.objects.filter(page=main_article).delete()
for hyperlinked_article in article_list:
ArticlePageRecommendedSections(
page=main_article,
recommended_article=hyperlinked_article).save()
# re-create existing recommended articles
for article in existing_recommended_articles:
if article not in article_list:
ArticlePageRecommendedSections(
page=main_article,
recommended_article=article).save()
def convert_articles(apps, schema_editor):
'''
Derived from https://github.com/wagtail/wagtail/issues/2110
'''
articles = ArticlePage.objects.all().exact_type(ArticlePage)
for article in articles:
stream_data = []
linked_articles = []
for block in article.body.stream_data:
if block['type'] == 'page':
if ArticlePage.objects.filter(id=block['value']):
linked_articles.append(ArticlePage.objects.get(
id=block['value']))
else:
# add block to new stream_data
stream_data.append(block)
if linked_articles:
create_recomended_articles(article, linked_articles)
stream_block = article.body.stream_block
article.body = StreamValue(stream_block, stream_data, is_lazy=True)
article.save()
section = article.get_parent().specific
section.enable_recommended_section = True
section.enable_next_section = True
section.save()
class Migration(migrations.Migration):
dependencies = [
('iogt', '0002_create_importers_group'),
]
operations = [
migrations.RunPython(convert_articles),<|fim▁hole|><|fim▁end|> | ] |
<|file_name|>noParameterPropertiesRule.d.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2013 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.<|fim▁hole|> * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as ts from "typescript";
import * as Lint from "../index";
export declare class Rule extends Lint.Rules.AbstractRule {
static metadata: Lint.IRuleMetadata;
static FAILURE_STRING_FACTORY(ident: string): string;
apply(sourceFile: ts.SourceFile): Lint.RuleFailure[];
}<|fim▁end|> | * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* |
<|file_name|>server.js<|end_file_name|><|fim▁begin|>var util = require('util');
var async = require('async');
var path = require('path');
var Router = require('../utils/router.js');
var sandboxHelper = require('../utils/sandbox.js');
// Private fields
var modules, library, self, private = {}, shared = {};
private.loaded = false
// Constructor
function Server(cb, scope) {
library = scope;
self = this;
self.__private = private;
private.attachApi();
setImmediate(cb, null, self);
}
// Private methods
private.attachApi = function() {
var router = new Router();
router.use(function (req, res, next) {
if (modules) return next();<|fim▁hole|> });
router.get('/', function (req, res) {
if (private.loaded) {
res.render('wallet.html', {layout: false});
} else {
res.render('index.html');
}
});
router.get('/dapps/:id', function (req, res) {
res.render('dapps/' + req.params.id + '/index.html');
});
router.use(function (req, res, next) {
if (req.url.indexOf('/api/') == -1 && req.url.indexOf('/peer/') == -1) {
return res.redirect('/');
}
next();
// res.status(500).send({ success: false, error: 'api not found' });
});
library.network.app.use('/', router);
}
// Public methods
Server.prototype.sandboxApi = function (call, args, cb) {
sandboxHelper.callMethod(shared, call, args, cb);
}
// Events
Server.prototype.onBind = function (scope) {
modules = scope;
}
Server.prototype.onBlockchainReady = function () {
private.loaded = true;
}
Server.prototype.cleanup = function (cb) {
private.loaded = false;
cb();
}
// Shared
// Export
module.exports = Server;<|fim▁end|> | res.status(500).send({success: false, error: "Blockchain is loading"}); |
<|file_name|>notifications.rs<|end_file_name|><|fim▁begin|>use std::path::Path;
use std::fmt::{self, Display};
use url::Url;
use notify::NotificationLevel;
#[derive(Debug)]
pub enum Notification<'a> {
CreatingDirectory(&'a str, &'a Path),
LinkingDirectory(&'a Path, &'a Path),
CopyingDirectory(&'a Path, &'a Path),
RemovingDirectory(&'a str, &'a Path),
DownloadingFile(&'a Url, &'a Path),
/// Received the Content-Length of the to-be downloaded data.
DownloadContentLengthReceived(u64),
/// Received some data.
DownloadDataReceived(&'a [u8]),
/// Download has finished.
DownloadFinished,
NoCanonicalPath(&'a Path),
ResumingPartialDownload,
UsingCurl,
UsingHyper,
UsingRustls,
}
impl<'a> Notification<'a> {
pub fn level(&self) -> NotificationLevel {
use self::Notification::*;
match *self {
CreatingDirectory(_, _) | RemovingDirectory(_, _) => NotificationLevel::Verbose,
LinkingDirectory(_, _) |
CopyingDirectory(_, _) |
DownloadingFile(_, _) |
DownloadContentLengthReceived(_) |
DownloadDataReceived(_) |
DownloadFinished |
ResumingPartialDownload |
UsingCurl | UsingHyper | UsingRustls => NotificationLevel::Verbose,
NoCanonicalPath(_) => NotificationLevel::Warn,
}
}
}
impl<'a> Display for Notification<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> ::std::result::Result<(), fmt::Error> {
use self::Notification::*;
match *self {<|fim▁hole|> }
LinkingDirectory(_, dest) => write!(f, "linking directory from: '{}'", dest.display()),
CopyingDirectory(src, _) => write!(f, "coping directory from: '{}'", src.display()),
RemovingDirectory(name, path) => {
write!(f, "removing {} directory: '{}'", name, path.display())
}
DownloadingFile(url, _) => write!(f, "downloading file from: '{}'", url),
DownloadContentLengthReceived(len) => write!(f, "download size is: '{}'", len),
DownloadDataReceived(data) => write!(f, "received some data of size {}", data.len()),
DownloadFinished => write!(f, "download finished"),
NoCanonicalPath(path) => write!(f, "could not canonicalize path: '{}'", path.display()),
ResumingPartialDownload => write!(f, "resuming partial download"),
UsingCurl => write!(f, "downloading with curl"),
UsingHyper => write!(f, "downloading with hyper + native_tls"),
UsingRustls => write!(f, "downloading with hyper + rustls"),
}
}
}<|fim▁end|> | CreatingDirectory(name, path) => {
write!(f, "creating {} directory: '{}'", name, path.display()) |
<|file_name|>planetName.py<|end_file_name|><|fim▁begin|>def get_planet_name(id):
switch = {
1: "Mercury",<|fim▁hole|> 6: "Saturn",
7: "Uranus" ,
8: "Neptune"}
return switch[id]<|fim▁end|> | 2: "Venus",
3: "Earth",
4: "Mars",
5: "Jupiter", |
<|file_name|>footer.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
import { TodoStore } from '../shared/services/todo.store';
import { EmitterService } from '../shared/services/emitter.service';
import {
LogMethod,
LogProperty,
LogPropertyWithArgs,
LogClass,
LogClassWithArgs
} from '../shared/decorators/log.decorator';
import { FooterComponentSchema } from './footer-component.metadata';
<|fim▁hole|> */
@LogClassWithArgs('toto')
@Component(FooterComponentSchema)
export class FooterComponent {
/**
* Local reference of TodoStore
*/
todoStore: TodoStore;
/**
* Local id for EmitterService
*/
@LogProperty
id: string = 'FooterComponent';
/**
* Starting filter param
*/
@LogPropertyWithArgs('theCurrentFilter')
currentFilter: string = 'all';
/**
* The "constructor"
*
* @param {TodoStore} todoStore A TodoStore -> see {@link TodoStore}
*/
constructor(todoStore: TodoStore) {
this.todoStore = todoStore;
}
/**
* Removes all the completed todos
*/
@LogMethod
removeCompleted() {
this.todoStore.removeCompleted();
}
/**
* Display only completed todos
*/
displayCompleted() {
this.currentFilter = 'completed';
EmitterService.get(this.id).emit('displayCompleted');
}
/**
* Display only remaining todos
*/
displayRemaining() {
this.currentFilter = 'remaining';
EmitterService.get(this.id).emit('displayRemaining');
}
/**
* Display all todos
*/
displayAll() {
this.currentFilter = 'all';
EmitterService.get(this.id).emit('displayAll');
}
}<|fim▁end|> | /**
* The footer component |
<|file_name|>Image.py<|end_file_name|><|fim▁begin|># coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from extensions.rich_text_components import base
<|fim▁hole|>
name = 'Image'
category = 'Basic Input'
description = 'An image.'
frontend_name = 'image'
tooltip = 'Insert image'
_customization_arg_specs = [{
'name': 'filepath',
'description': (
'The name of the image file. (Allowed extensions: gif, jpeg, jpg, '
'png.)'),
'schema': {
'type': 'custom',
'obj_type': 'Filepath',
},
'default_value': '',
}, {
'name': 'alt',
'description': 'Alt text (for screen readers)',
'schema': {
'type': 'unicode',
},
'default_value': '',
}]
icon_data_url = (
'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAA'
'ABGdBTUEAAK/INwWK6QAAABl0RVh0%0AU29mdHdhcmUAQWRvYmUgSW1hZ2VSZWFkeXHJZ'
'TwAAAHwSURBVDjLpZM9a1RBFIafM/fevfcmC7uQ%0AjWEjUZKAYBHEVEb/gIWFjVVSWEj'
'6gI0/wt8gprPQykIsTP5BQLAIhBVBzRf52Gw22bk7c8YiZslu%0AgggZppuZ55z3nfdIC'
'IHrrBhg%2BePaa1WZPyk0s%2B6KWwM1khiyhDcvns4uxQAaZOHJo4nRLMtEJPpn%0AxY6'
'Cd10%2BfNl4DpwBTqymaZrJ8uoBHfZoyTqTYzvkSRMXlP2jnG8bFYbCXWJGePlsEq8iPQ'
'mFA2Mi%0AjEBhtpis7ZCWftC0LZx3xGnK1ESd741hqqUaqgMeAChgjGDDLqXkgMPTJtZ3'
'KJzDhTZpmtK2OSO5%0AIRB6xvQDRAhOsb5Lx1lOu5ZCHV4B6RLUExvh4s%2BZntHhDJAx'
'Sqs9TCDBqsc6j0iJdqtMuTROFBkI%0AcllCCGcSytFNfm1tU8k2GRo2pOI43h9ie6tOvT'
'JFbORyDsJFQHKD8fw%2BP9dWqJZ/I96TdEa5Nb1A%0AOavjVfti0dfB%2Bt4iXhWvyh27'
'y9zEbRRobG7z6fgVeqSoKvB5oIMQEODx7FLvIJo55KS9R7b5ldrD%0AReajpC%2BZ5z7G'
'AHJFXn1exedVbG36ijwOmJgl0kS7lXtjD0DkLyqc70uPnSuIIwk9QCmWd%2B9XGnOF%0A'
'DzP/M5xxBInhLYBcd5z/AAZv2pOvFcS/AAAAAElFTkSuQmCC%0A'
)<|fim▁end|> | class Image(base.BaseRichTextComponent):
"""A rich-text component representing an inline image.""" |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""config URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^cards/', include('cards.urls')),
url(r'^tournaments/', include('tournaments.urls')),<|fim▁hole|><|fim▁end|> | url(r'^stats/', include('stats.urls'))
] |
<|file_name|>htmlappletelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::HTMLAppletElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLAppletElementDerived;
use dom::bindings::js::JS;
use dom::bindings::error::ErrorResult;
use dom::document::Document;
use dom::element::HTMLAppletElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId};
use servo_util::str::DOMString;
#[deriving(Encodable)]
pub struct HTMLAppletElement {
htmlelement: HTMLElement
}
impl HTMLAppletElementDerived for EventTarget {
fn is_htmlappletelement(&self) -> bool {
match self.type_id {
NodeTargetTypeId(ElementNodeTypeId(HTMLAppletElementTypeId)) => true,
_ => false
}
}
}
impl HTMLAppletElement {
pub fn new_inherited(localName: DOMString, document: JS<Document>) -> HTMLAppletElement {
HTMLAppletElement {
htmlelement: HTMLElement::new_inherited(HTMLAppletElementTypeId, localName, document)
}
}
pub fn new(localName: DOMString, document: &JS<Document>) -> JS<HTMLAppletElement> {
let element = HTMLAppletElement::new_inherited(localName, document.clone());
Node::reflect_node(~element, document, HTMLAppletElementBinding::Wrap)
}
}
impl HTMLAppletElement {
pub fn Align(&self) -> DOMString {
~""
}
pub fn SetAlign(&mut self, _align: DOMString) -> ErrorResult {
Ok(())
}
pub fn Alt(&self) -> DOMString {
~""
}
pub fn SetAlt(&self, _alt: DOMString) -> ErrorResult {
Ok(())
}
pub fn Archive(&self) -> DOMString {
~""
}
pub fn SetArchive(&self, _archive: DOMString) -> ErrorResult {
Ok(())
}
pub fn Code(&self) -> DOMString {
~""
}
pub fn SetCode(&self, _code: DOMString) -> ErrorResult {
Ok(())
}
pub fn CodeBase(&self) -> DOMString {
~""
}
pub fn SetCodeBase(&self, _code_base: DOMString) -> ErrorResult {
Ok(())
}
pub fn Height(&self) -> DOMString {
~""
}
pub fn SetHeight(&self, _height: DOMString) -> ErrorResult {
Ok(())
}
pub fn Hspace(&self) -> u32 {
0
}<|fim▁hole|>
pub fn SetHspace(&mut self, _hspace: u32) -> ErrorResult {
Ok(())
}
pub fn Name(&self) -> DOMString {
~""
}
pub fn SetName(&mut self, _name: DOMString) -> ErrorResult {
Ok(())
}
pub fn Object(&self) -> DOMString {
~""
}
pub fn SetObject(&mut self, _object: DOMString) -> ErrorResult {
Ok(())
}
pub fn Vspace(&self) -> u32 {
0
}
pub fn SetVspace(&mut self, _vspace: u32) -> ErrorResult {
Ok(())
}
pub fn Width(&self) -> DOMString {
~""
}
pub fn SetWidth(&mut self, _width: DOMString) -> ErrorResult {
Ok(())
}
}<|fim▁end|> | |
<|file_name|>Channeling.ts<|end_file_name|><|fim▁begin|>import SPELLS from 'common/SPELLS';
import { AbilityEvent, Ability, CastEvent, EndChannelEvent } from 'parser/core/Events';
import { Options } from 'parser/core/Module';
import CoreChanneling from 'parser/shared/modules/Channeling';
import Penance from '../spells/Penance';
const PENANCE_MINIMUM_RECAST_TIME = 3500; // Minimum duration from one Penance to Another
const debug = false;
class Channeling extends CoreChanneling {
_previousPenanceTimestamp = this.owner.fight.start_time - PENANCE_MINIMUM_RECAST_TIME;
_hasCastigation: boolean = false;
_bolt: number = 0;
constructor(options: Options) {
super(options);
this._hasCastigation = this.selectedCombatant.hasTalent(SPELLS.CASTIGATION_TALENT.id);
}
isNewPenanceCast(timestamp: number) {
return timestamp - this._previousPenanceTimestamp > PENANCE_MINIMUM_RECAST_TIME;
}
onCast(event: CastEvent) {
if (!Penance.isPenance(event.ability.guid)) {
super.onCast(event);
return;
}
// Handle the first bolt of each cast
if (this.isNewPenanceCast(event.timestamp)) {
this._bolt = 0;
this._previousPenanceTimestamp = event.timestamp;
this.beginChannel(event, {
guid: SPELLS.PENANCE.id,
name: SPELLS.PENANCE.name,
type: 2,
abilityIcon: SPELLS.PENANCE.icon,
});
return;
}
// Handle following bolts
const maxBolts = this._hasCastigation ? 4 : 3;
this._bolt += 1;
// Bolt is 0 indexed, so we've fired all bolts when _bolt equals maxBolts - 1
if (this._bolt === maxBolts - 1) {
console.log('Ending Penance channel due to last bolt being fired');
this.endChannel(event);
}
}
cancelChannel(event: EndChannelEvent, ability: Ability) {
if (this.isChannelingSpell(SPELLS.PENANCE.id)) {<|fim▁hole|> ((this._currentChannel as unknown) as AbilityEvent<any> | null)?.ability.name,
'as ended since we started casting something else:',
event.ability.name,
);
this.endChannel(event);
} else {
super.cancelChannel(event, ability);
}
}
}
export default Channeling;<|fim▁end|> | // If a channeling spell is "canceled" it was actually just ended, so if it looks canceled then instead just mark it as ended
debug &&
this.debug(
'Marking', |
<|file_name|>gregorian.js.uncompressed.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:07e25b6c05d06d085c2840d85f2966476dc38544be904c978d7c66dbe688decb<|fim▁hole|><|fim▁end|> | size 4672 |
<|file_name|>dev.ts<|end_file_name|><|fim▁begin|>'use strict';
import { readFileAsString } from '../helpers/readFileAsString';
(function () {
const JavaScriptObfuscator: any = require('../../index');<|fim▁hole|> {
identifierNamesGenerator: 'mangled',
compact: false,
stringArray: true,
seed: 429105580
}
);
let obfuscatedCode: string = obfuscationResult.getObfuscatedCode();
let identifierNamesCache = obfuscationResult.getIdentifierNamesCache();
console.log(obfuscatedCode);
console.log(eval(obfuscatedCode));
console.log(identifierNamesCache);
})();<|fim▁end|> | const code: string = readFileAsString(__dirname + '/../functional-tests/javascript-obfuscator/fixtures/custom-nodes-identifier-names-collision.js');
let obfuscationResult = JavaScriptObfuscator.obfuscate(
code, |
<|file_name|>vmhc.py<|end_file_name|><|fim▁begin|>import sys
import os
import commands
import nipype.pipeline.engine as pe
import nipype.algorithms.rapidart as ra
import nipype.interfaces.fsl as fsl
import nipype.interfaces.io as nio
import nipype.interfaces.utility as util
from utils import *
from CPAC.vmhc import *
from nipype.interfaces.afni import preprocess
from CPAC.registration import create_wf_calculate_ants_warp, \
create_wf_c3d_fsl_to_itk, \
create_wf_collect_transforms, \
create_wf_apply_ants_warp
def create_vmhc(use_ants):
"""
Compute the map of brain functional homotopy, the high degree of synchrony in spontaneous activity between geometrically corresponding interhemispheric (i.e., homotopic) regions.
Parameters
----------
None
Returns
-------
vmhc_workflow : workflow
Voxel Mirrored Homotopic Connectivity Analysis Workflow
Notes
-----
`Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/vmhc/vmhc.py>`_
Workflow Inputs::
inputspec.brain : string (existing nifti file)
Anatomical image(without skull)
inputspec.brain_symmetric : string (existing nifti file)
MNI152_T1_2mm_brain_symmetric.nii.gz
inputspec.rest_res_filt : string (existing nifti file)
Band passed Image with nuisance signal regressed out(and optionally scrubbed). Recommended bandpass filter (0.001,0.1) )
inputspec.reorient : string (existing nifti file)
RPI oriented anatomical data
inputspec.example_func2highres_mat : string (existing affine transformation .mat file)
Specifies an affine transform that should be applied to the example_func before non linear warping
inputspec.standard : string (existing nifti file)
MNI152_T1_standard_resolution_brain.nii.gz
inputspec.symm_standard : string (existing nifti file)
MNI152_T1_2mm_symmetric.nii.gz
inputspec.twomm_brain_mask_dil : string (existing nifti file)
MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz
inputspec.config_file_twomm_symmetric : string (existing .cnf file)
T1_2_MNI152_2mm_symmetric.cnf
inputspec.rest_mask : string (existing nifti file)
A mask functional volume(derived by dilation from motion corrected functional volume)
fwhm_input.fwhm : list (float)
For spatial smoothing the Z-transformed correlations in MNI space.
Generally the value of this parameter is 1.5 or 2 times the voxel size of the input Image.
inputspec.mean_functional : string (existing nifti file)
The mean functional image for use in the func-to-anat registration matrix conversion
to ITK (ANTS) format, if the user selects to use ANTS.
Workflow Outputs::
outputspec.highres2symmstandard : string (nifti file)
Linear registration of T1 image to symmetric standard image
outputspec.highres2symmstandard_mat : string (affine transformation .mat file)
An affine transformation .mat file from linear registration and used in non linear registration
outputspec.highres2symmstandard_warp : string (nifti file)
warp file from Non Linear registration of T1 to symmetrical standard brain
outputspec.fnirt_highres2symmstandard : string (nifti file)
Non Linear registration of T1 to symmetrical standard brain
outputspec.highres2symmstandard_jac : string (nifti file)
jacobian determinant image from Non Linear registration of T1 to symmetrical standard brain
outputspec.rest_res_2symmstandard : string (nifti file)
nonlinear registration (func to standard) image
outputspec.VMHC_FWHM_img : string (nifti file)
pearson correlation between res2standard and flipped res2standard
outputspec.VMHC_Z_FWHM_img : string (nifti file)
Fisher Z transform map
outputspec.VMHC_Z_stat_FWHM_img : string (nifti file)
Z statistic map
Order of commands:
- Perform linear registration of Anatomical brain in T1 space to symmetric standard space. For details see `flirt <http://www.fmrib.ox.ac.uk/fsl/flirt/index.html>`_::
flirt
-ref MNI152_T1_2mm_brain_symmetric.nii.gz
-in mprage_brain.nii.gz
-out highres2symmstandard.nii.gz
-omat highres2symmstandard.mat
-cost corratio
-searchcost corratio
-dof 12
-interp trilinear
- Perform nonlinear registration (higres to standard) to symmetric standard brain. For details see `fnirt <http://fsl.fmrib.ox.ac.uk/fsl/fnirt/>`_::
fnirt
--in=head.nii.gz
--aff=highres2symmstandard.mat
--cout=highres2symmstandard_warp.nii.gz
--iout=fnirt_highres2symmstandard.nii.gz
--jout=highres2symmstandard_jac.nii.gz
--config=T1_2_MNI152_2mm_symmetric.cnf
--ref=MNI152_T1_2mm_symmetric.nii.gz
--refmask=MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz
--warpres=10,10,10
- Perform spatial smoothing on the input functional image(inputspec.rest_res_filt). For details see `PrinciplesSmoothing <http://imaging.mrc-cbu.cam.ac.uk/imaging/PrinciplesSmoothing>`_ `fslmaths <http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/intro/index.htm>`_::
fslmaths rest_res_filt.nii.gz
-kernel gauss FWHM/ sqrt(8-ln(2))
-fmean -mas rest_mask.nii.gz
rest_res_filt_FWHM.nii.gz
- Apply nonlinear registration (func to standard). For details see `applywarp <http://www.fmrib.ox.ac.uk/fsl/fnirt/warp_utils.html#applywarp>`_::
applywarp
--ref=MNI152_T1_2mm_symmetric.nii.gz
--in=rest_res_filt_FWHM.nii.gz
--out=rest_res_2symmstandard.nii.gz
--warp=highres2symmstandard_warp.nii.gz
--premat=example_func2highres.mat
- Copy and L/R swap the output of applywarp command (rest_res_2symmstandard.nii.gz). For details see `fslswapdim <http://fsl.fmrib.ox.ac.uk/fsl/fsl4.0/avwutils/index.html>`_::
fslswapdim
rest_res_2symmstandard.nii.gz
-x y z
tmp_LRflipped.nii.gz
- Calculate pearson correlation between rest_res_2symmstandard.nii.gz and flipped rest_res_2symmstandard.nii.gz(tmp_LRflipped.nii.gz). For details see `3dTcorrelate <http://afni.nimh.nih.gov/pub/dist/doc/program_help/3dTcorrelate.html>`_::
3dTcorrelate
-pearson
-polort -1
-prefix VMHC_FWHM.nii.gz
rest_res_2symmstandard.nii.gz
tmp_LRflipped.nii.gz
- Fisher Z Transform the correlation. For details see `3dcalc <http://afni.nimh.nih.gov/pub/dist/doc/program_help/3dcalc.html>`_::
3dcalc
-a VMHC_FWHM.nii.gz
-expr 'log((a+1)/(1-a))/2'
-prefix VMHC_FWHM_Z.nii.gz
- Calculate the number of volumes(nvols) in flipped rest_res_2symmstandard.nii.gz(tmp_LRflipped.nii.gz) ::
-Use Nibabel to do this
- Compute the Z statistic map ::
3dcalc
-a VMHC_FWHM_Z.nii.gz
-expr 'a*sqrt('${nvols}'-3)'
-prefix VMHC_FWHM_Z_stat.nii.gz
Workflow:
.. image:: ../images/vmhc_graph.dot.png
:width: 500
Workflow Detailed:
.. image:: ../images/vmhc_detailed_graph.dot.png
:width: 500
References
----------
.. [1] Zuo, X.-N., Kelly, C., Di Martino, A., Mennes, M., Margulies, D. S., Bangaru, S., Grzadzinski, R., et al. (2010). Growing together and growing apart: regional and sex differences in the lifespan developmental trajectories of functional homotopy. The Journal of neuroscience : the official journal of the Society for Neuroscience, 30(45), 15034-43. doi:10.1523/JNEUROSCI.2612-10.2010
Examples
--------
>>> vmhc_w = create_vmhc()
>>> vmhc_w.inputs.inputspec.brain_symmetric = 'MNI152_T1_2mm_brain_symmetric.nii.gz'
>>> vmhc_w.inputs.inputspec.symm_standard = 'MNI152_T1_2mm_symmetric.nii.gz'
>>> vmhc_w.inputs.inputspec.twomm_brain_mask_dil = 'MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz'
>>> vmhc_w.inputs.inputspec.config_file_twomm = 'T1_2_MNI152_2mm_symmetric.cnf'
>>> vmhc_w.inputs.inputspec.standard = 'MNI152_T1_2mm.nii.gz'
>>> vmhc_w.inputs.fwhm_input.fwhm = [4.5, 6]
>>> vmhc_w.get_node('fwhm_input').iterables = ('fwhm', [4.5, 6])
>>> vmhc_w.inputs.inputspec.rest_res = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/func/original/rest_res_filt.nii.gz')
>>> vmhc_w.inputs.inputspec.reorient = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/anat/mprage_RPI.nii.gz')
>>> vmhc_w.inputs.inputspec.brain = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/anat/mprage_brain.nii.gz')
>>> vmhc_w.inputs.inputspec.example_func2highres_mat = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/func/original/reg/example_func2highres.mat')
>>> vmhc_w.inputs.inputspec.rest_mask = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/func/original/rest_mask.nii.gz')
>>> vmhc_w.run() # doctest: +SKIP
"""
vmhc = pe.Workflow(name='vmhc_workflow')
inputNode = pe.Node(util.IdentityInterface(fields=['brain',
'brain_symmetric',
'rest_res',
'reorient',
'example_func2highres_mat',
'symm_standard',
'twomm_brain_mask_dil',
'config_file_twomm',
'rest_mask',
'standard',
'mean_functional']),
name='inputspec')
outputNode = pe.Node(util.IdentityInterface(fields=['highres2symmstandard',
'highres2symmstandard_mat',
'highres2symmstandard_warp',
'fnirt_highres2symmstandard',
'highres2symmstandard_jac',
'rest_res_2symmstandard',
'VMHC_FWHM_img',
'VMHC_Z_FWHM_img',
'VMHC_Z_stat_FWHM_img'
]),
name='outputspec')
inputnode_fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']),
name='fwhm_input')
if use_ants == False:
## Linear registration of T1 --> symmetric standard
linear_T1_to_symmetric_standard = pe.Node(interface=fsl.FLIRT(),
name='linear_T1_to_symmetric_standard')
linear_T1_to_symmetric_standard.inputs.cost = 'corratio'
linear_T1_to_symmetric_standard.inputs.cost_func = 'corratio'
linear_T1_to_symmetric_standard.inputs.dof = 12
linear_T1_to_symmetric_standard.inputs.interp = 'trilinear'
## Perform nonlinear registration
##(higres to standard) to symmetric standard brain
nonlinear_highres_to_symmetric_standard = pe.Node(interface=fsl.FNIRT(),
name='nonlinear_highres_to_symmetric_standard')
nonlinear_highres_to_symmetric_standard.inputs.fieldcoeff_file = True
nonlinear_highres_to_symmetric_standard.inputs.jacobian_file = True
nonlinear_highres_to_symmetric_standard.inputs.warp_resolution = (10, 10, 10)
# needs new inputs. needs input from resources for the field coeff of the template->symmetric.
# and needs the field coeff of the anatomical-to-template registration
## Apply nonlinear registration (func to standard)
nonlinear_func_to_standard = pe.Node(interface=fsl.ApplyWarp(),
name='nonlinear_func_to_standard')
elif use_ants == True:
# ANTS warp image etc.
calculate_ants_xfm_vmhc = create_wf_calculate_ants_warp(name='calculate_ants_xfm_vmhc')
fsl_to_itk_vmhc = create_wf_c3d_fsl_to_itk(0, name='fsl_to_itk_vmhc')
collect_transforms_vmhc = create_wf_collect_transforms(0, name='collect_transforms_vmhc')
apply_ants_xfm_vmhc = create_wf_apply_ants_warp(0,name='apply_ants_xfm_vmhc')
calculate_ants_xfm_vmhc.inputs.inputspec.dimension = 3
calculate_ants_xfm_vmhc.inputs.inputspec. \
use_histogram_matching = True
calculate_ants_xfm_vmhc.inputs.inputspec. \
winsorize_lower_quantile = 0.01
calculate_ants_xfm_vmhc.inputs.inputspec. \
winsorize_upper_quantile = 0.99
calculate_ants_xfm_vmhc.inputs.inputspec. \
metric = ['MI','MI','CC']
calculate_ants_xfm_vmhc.inputs.inputspec.metric_weight = [1,1,1]
calculate_ants_xfm_vmhc.inputs.inputspec. \
radius_or_number_of_bins = [32,32,4]
calculate_ants_xfm_vmhc.inputs.inputspec. \
sampling_strategy = ['Regular','Regular',None]
calculate_ants_xfm_vmhc.inputs.inputspec. \
sampling_percentage = [0.25,0.25,None]
calculate_ants_xfm_vmhc.inputs.inputspec. \
number_of_iterations = [[1000,500,250,100], \
[1000,500,250,100], [100,100,70,20]]
calculate_ants_xfm_vmhc.inputs.inputspec. \
convergence_threshold = [1e-8,1e-8,1e-9]
calculate_ants_xfm_vmhc.inputs.inputspec. \
convergence_window_size = [10,10,15]
calculate_ants_xfm_vmhc.inputs.inputspec. \
transforms = ['Rigid','Affine','SyN']
calculate_ants_xfm_vmhc.inputs.inputspec. \
transform_parameters = [[0.1],[0.1],[0.1,3,0]]
calculate_ants_xfm_vmhc.inputs.inputspec. \
shrink_factors = [[8,4,2,1],[8,4,2,1],[6,4,2,1]]
calculate_ants_xfm_vmhc.inputs.inputspec. \
smoothing_sigmas = [[3,2,1,0],[3,2,1,0],[3,2,1,0]]
apply_ants_xfm_vmhc.inputs.inputspec.interpolation = 'Gaussian'
apply_ants_xfm_vmhc.inputs.inputspec.input_image_type = 3
## copy and L/R swap file
copy_and_L_R_swap = pe.Node(interface=fsl.SwapDimensions(),
name='copy_and_L_R_swap')
copy_and_L_R_swap.inputs.new_dims = ('-x', 'y', 'z')
## caculate vmhc
pearson_correlation = pe.Node(interface=preprocess.TCorrelate(),
name='pearson_correlation')
pearson_correlation.inputs.pearson = True
pearson_correlation.inputs.polort = -1
pearson_correlation.inputs.outputtype = 'NIFTI_GZ'
z_trans = pe.Node(interface=preprocess.Calc(),
name='z_trans')
z_trans.inputs.expr = 'log((1+a)/(1-a))/2'
z_trans.inputs.outputtype = 'NIFTI_GZ'
z_stat = pe.Node(interface=preprocess.Calc(),
name='z_stat')
z_stat.inputs.outputtype = 'NIFTI_GZ'
NVOLS = pe.Node(util.Function(input_names=['in_files'],
output_names=['nvols'],
function=get_img_nvols),
name='NVOLS')
generateEXP = pe.Node(util.Function(input_names=['nvols'],
output_names=['expr'],
function=get_operand_expression),
name='generateEXP')
smooth = pe.Node(interface=fsl.MultiImageMaths(),
name='smooth')
if use_ants == False:
vmhc.connect(inputNode, 'brain',
linear_T1_to_symmetric_standard, 'in_file')
vmhc.connect(inputNode, 'brain_symmetric',
linear_T1_to_symmetric_standard, 'reference')
vmhc.connect(inputNode, 'reorient',
nonlinear_highres_to_symmetric_standard, 'in_file')
vmhc.connect(linear_T1_to_symmetric_standard, 'out_matrix_file',
nonlinear_highres_to_symmetric_standard, 'affine_file')
vmhc.connect(inputNode, 'symm_standard',
nonlinear_highres_to_symmetric_standard, 'ref_file')
vmhc.connect(inputNode, 'twomm_brain_mask_dil',
nonlinear_highres_to_symmetric_standard, 'refmask_file')
vmhc.connect(inputNode, 'config_file_twomm',
nonlinear_highres_to_symmetric_standard, 'config_file')
vmhc.connect(inputNode, 'rest_res',
smooth, 'in_file')
vmhc.connect(inputnode_fwhm, ('fwhm', set_gauss),
smooth, 'op_string')
vmhc.connect(inputNode, 'rest_mask',
smooth, 'operand_files')
vmhc.connect(smooth, 'out_file',
nonlinear_func_to_standard, 'in_file')
vmhc.connect(inputNode, 'standard',
nonlinear_func_to_standard, 'ref_file')
vmhc.connect(nonlinear_highres_to_symmetric_standard, 'fieldcoeff_file',
nonlinear_func_to_standard, 'field_file')
## func->anat matrix (bbreg)
vmhc.connect(inputNode, 'example_func2highres_mat',
nonlinear_func_to_standard, 'premat')
vmhc.connect(nonlinear_func_to_standard, 'out_file',
copy_and_L_R_swap, 'in_file')
vmhc.connect(nonlinear_func_to_standard, 'out_file',
pearson_correlation, 'xset')
elif use_ants == True:
# connections for ANTS stuff
# registration calculation stuff -- might go out the window
vmhc.connect(inputNode, 'brain',
calculate_ants_xfm_vmhc, 'inputspec.anatomical_brain')
vmhc.connect(inputNode, 'brain_symmetric',
calculate_ants_xfm_vmhc, 'inputspec.reference_brain')
# functional apply warp stuff
vmhc.connect(inputNode, 'rest_res',
smooth, 'in_file')
vmhc.connect(inputnode_fwhm, ('fwhm', set_gauss),
smooth, 'op_string')
vmhc.connect(inputNode, 'rest_mask',
smooth, 'operand_files')
vmhc.connect(smooth, 'out_file',
apply_ants_xfm_vmhc, 'inputspec.input_image')
vmhc.connect(calculate_ants_xfm_vmhc, 'outputspec.ants_rigid_xfm',
collect_transforms_vmhc, 'inputspec.linear_rigid')
vmhc.connect(calculate_ants_xfm_vmhc, 'outputspec.ants_affine_xfm',
collect_transforms_vmhc, 'inputspec.linear_affine')
vmhc.connect(calculate_ants_xfm_vmhc, 'outputspec.warp_field',
collect_transforms_vmhc, 'inputspec.warp_file')
## func->anat matrix (bbreg)
vmhc.connect(inputNode, 'example_func2highres_mat',
fsl_to_itk_vmhc, 'inputspec.affine_file')
vmhc.connect(inputNode, 'brain', fsl_to_itk_vmhc,
'inputspec.reference_file')
vmhc.connect(inputNode, 'mean_functional', fsl_to_itk_vmhc,
'inputspec.source_file')
vmhc.connect(fsl_to_itk_vmhc, 'outputspec.itk_transform',
collect_transforms_vmhc, 'inputspec.fsl_to_itk_affine')
<|fim▁hole|> vmhc.connect(inputNode, 'brain',
apply_ants_xfm_vmhc, 'inputspec.conversion_reference')
vmhc.connect(inputNode, 'mean_functional',
apply_ants_xfm_vmhc, 'inputspec.conversion_source')
'''
vmhc.connect(inputNode, 'brain_symmetric',
apply_ants_xfm_vmhc, 'inputspec.reference_image')
vmhc.connect(collect_transforms_vmhc, \
'outputspec.transformation_series', \
apply_ants_xfm_vmhc, 'inputspec.transforms')
vmhc.connect(apply_ants_xfm_vmhc, 'outputspec.output_image',
copy_and_L_R_swap, 'in_file')
vmhc.connect(apply_ants_xfm_vmhc, 'outputspec.output_image',
pearson_correlation, 'xset')
vmhc.connect(copy_and_L_R_swap, 'out_file',
pearson_correlation, 'yset')
vmhc.connect(pearson_correlation, 'out_file',
z_trans, 'in_file_a')
vmhc.connect(copy_and_L_R_swap, 'out_file',
NVOLS, 'in_files')
vmhc.connect(NVOLS, 'nvols',
generateEXP, 'nvols')
vmhc.connect(z_trans, 'out_file',
z_stat, 'in_file_a')
vmhc.connect(generateEXP, 'expr',
z_stat, 'expr')
if use_ants == False:
vmhc.connect(linear_T1_to_symmetric_standard, 'out_file',
outputNode, 'highres2symmstandard')
vmhc.connect(linear_T1_to_symmetric_standard, 'out_matrix_file',
outputNode, 'highres2symmstandard_mat')
vmhc.connect(nonlinear_highres_to_symmetric_standard, 'jacobian_file',
outputNode, 'highres2symmstandard_jac')
vmhc.connect(nonlinear_highres_to_symmetric_standard, 'fieldcoeff_file',
outputNode, 'highres2symmstandard_warp')
vmhc.connect(nonlinear_highres_to_symmetric_standard, 'warped_file',
outputNode, 'fnirt_highres2symmstandard')
vmhc.connect(nonlinear_func_to_standard, 'out_file',
outputNode, 'rest_res_2symmstandard')
elif use_ants == True:
# ANTS warp outputs to outputnode
vmhc.connect(calculate_ants_xfm_vmhc, 'outputspec.ants_affine_xfm',
outputNode, 'highres2symmstandard_mat')
vmhc.connect(calculate_ants_xfm_vmhc, 'outputspec.warp_field',
outputNode, 'highres2symmstandard_warp')
vmhc.connect(calculate_ants_xfm_vmhc, 'outputspec.normalized_output_brain',
outputNode, 'fnirt_highres2symmstandard')
vmhc.connect(apply_ants_xfm_vmhc, 'outputspec.output_image',
outputNode, 'rest_res_2symmstandard')
vmhc.connect(pearson_correlation, 'out_file',
outputNode, 'VMHC_FWHM_img')
vmhc.connect(z_trans, 'out_file',
outputNode, 'VMHC_Z_FWHM_img')
vmhc.connect(z_stat, 'out_file',
outputNode, 'VMHC_Z_stat_FWHM_img')
return vmhc<|fim▁end|> | ''' |
<|file_name|>html_diff.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from typing import Callable, Tuple, Text
from django.conf import settings
from diff_match_patch import diff_match_patch
import platform
import logging
# TODO: handle changes in link hrefs
def highlight_with_class(klass, text):
# type: (Text, Text) -> Text
return '<span class="%s">%s</span>' % (klass, text)
def highlight_inserted(text):
# type: (Text) -> Text
return highlight_with_class('highlight_text_inserted', text)
def highlight_deleted(text):
# type: (Text) -> Text
return highlight_with_class('highlight_text_deleted', text)
def highlight_replaced(text):
# type: (Text) -> Text
return highlight_with_class('highlight_text_replaced', text)
def chunkize(text, in_tag):
# type: (Text, bool) -> Tuple[List[Tuple[Text, Text]], bool]
start = 0
idx = 0
chunks = [] # type: List[Tuple[Text, Text]]
for c in text:
if c == '<':
in_tag = True
if start != idx:
chunks.append(('text', text[start:idx]))
start = idx
elif c == '>':
in_tag = False
if start != idx + 1:
chunks.append(('tag', text[start:idx + 1]))
start = idx + 1
idx += 1
if start != idx:
chunks.append(('tag' if in_tag else 'text', text[start:idx]))
return chunks, in_tag
def highlight_chunks(chunks, highlight_func):
# type: (List[Tuple[Text, Text]], Callable[[Text], Text]) -> Text
retval = u''
for type, text in chunks:
if type == 'text':
retval += highlight_func(text)
else:
retval += text
return retval
def verify_html(html):
# type: (Text) -> bool
# TODO: Actually parse the resulting HTML to ensure we don't
# create mal-formed markup. This is unfortunately hard because
# we both want pretty strict parsing and we want to parse html5
# fragments. For now, we do a basic sanity check.
in_tag = False
for c in html:
if c == '<':
if in_tag:<|fim▁hole|> return False
in_tag = True
elif c == '>':
if not in_tag:
return False
in_tag = False
if in_tag:
return False
return True
def highlight_html_differences(s1, s2):
# type: (Text, Text) -> Text
differ = diff_match_patch()
ops = differ.diff_main(s1, s2)
differ.diff_cleanupSemantic(ops)
retval = u''
in_tag = False
idx = 0
while idx < len(ops):
op, text = ops[idx]
next_op = None
if idx != len(ops) - 1:
next_op, next_text = ops[idx + 1]
if op == diff_match_patch.DIFF_DELETE and next_op == diff_match_patch.DIFF_INSERT:
# Replace operation
chunks, in_tag = chunkize(next_text, in_tag)
retval += highlight_chunks(chunks, highlight_replaced)
idx += 1
elif op == diff_match_patch.DIFF_INSERT and next_op == diff_match_patch.DIFF_DELETE:
# Replace operation
# I have no idea whether diff_match_patch generates inserts followed
# by deletes, but it doesn't hurt to handle them
chunks, in_tag = chunkize(text, in_tag)
retval += highlight_chunks(chunks, highlight_replaced)
idx += 1
elif op == diff_match_patch.DIFF_DELETE:
retval += highlight_deleted(' ')
elif op == diff_match_patch.DIFF_INSERT:
chunks, in_tag = chunkize(text, in_tag)
retval += highlight_chunks(chunks, highlight_inserted)
elif op == diff_match_patch.DIFF_EQUAL:
chunks, in_tag = chunkize(text, in_tag)
retval += text
idx += 1
if not verify_html(retval):
from zerver.lib.actions import internal_send_message
# We probably want more information here
logging.getLogger('').error('HTML diff produced mal-formed HTML')
if settings.ERROR_BOT is not None:
subject = "HTML diff failure on %s" % (platform.node(),)
internal_send_message(settings.ERROR_BOT, "stream",
"errors", subject, "HTML diff produced malformed HTML")
return s2
return retval<|fim▁end|> | |
<|file_name|>triggers.py<|end_file_name|><|fim▁begin|>from django.db import transaction
from denorm.db import base
class RandomBigInt(base.RandomBigInt):
def sql(self):
return '(9223372036854775806::INT8 * ((RANDOM()-0.5)*2.0) )::INT8'
class TriggerNestedSelect(base.TriggerNestedSelect):
def sql(self):
columns = self.columns
table = self.table
where = ",".join(["%s = %s" % (k, v) for k, v in self.kwargs.iteritems()])
return 'SELECT DISTINCT %(columns)s FROM %(table)s WHERE %(where)s' % locals(), tuple()
class TriggerActionInsert(base.TriggerActionInsert):
def sql(self):
table = self.model._meta.db_table
columns = "(" + ",".join(self.columns) + ")"
params = []
if isinstance(self.values, TriggerNestedSelect):
sql, nested_params = self.values.sql()
values = "(" + sql + ")"
params.extend(nested_params)
else:
values = "VALUES(" + ",".join(self.values) + ")"
sql = (
'BEGIN\n'
'INSERT INTO %(table)s %(columns)s %(values)s;\n'<|fim▁hole|> return sql, params
class TriggerActionUpdate(base.TriggerActionUpdate):
def sql(self):
table = self.model._meta.db_table
params = []
updates = ','.join(["%s=%s" % (k, v) for k, v in zip(self.columns, self.values)])
if isinstance(self.where, tuple):
where, where_params = self.where
else:
where, where_params = self.where, []
params.extend(where_params)
return 'UPDATE %(table)s SET %(updates)s WHERE %(where)s' % locals(), params
class Trigger(base.Trigger):
def name(self):
name = base.Trigger.name(self)
if self.content_type_field:
name += "_%s" % self.content_type
return name
def sql(self):
name = self.name()
params = []
action_set = set()
for a in self.actions:
sql, action_params = a.sql()
if sql:
action_set.add(sql)
params.extend(action_params)
actions = ";\n ".join(action_set) + ';'
table = self.db_table
time = self.time.upper()
event = self.event.upper()
content_type = self.content_type
ct_field = self.content_type_field
conditions = []
if event == "UPDATE":
for field, native_type in self.fields:
if native_type is None:
# If Django didn't know what this field type should be
# then compare it as text - Fixes a problem of trying to
# compare PostGIS geometry fields.
conditions.append("(OLD.%(f)s::%(t)s IS DISTINCT FROM NEW.%(f)s::%(t)s)" % {'f': field, 't': 'text'})
else:
conditions.append("( OLD.%(f)s IS DISTINCT FROM NEW.%(f)s )" % {'f': field})
conditions = ["(%s)" % "OR".join(conditions)]
if ct_field:
if event == "UPDATE":
conditions.append("(OLD.%(ctf)s=%(ct)s)OR(NEW.%(ctf)s=%(ct)s)" % {'ctf': ct_field, 'ct': content_type})
elif event == "INSERT":
conditions.append("(NEW.%s=%s)" % (ct_field, content_type))
elif event == "DELETE":
conditions.append("(OLD.%s=%s)" % (ct_field, content_type))
if not conditions:
cond = "TRUE"
else:
cond = "AND".join(conditions)
sql = """
CREATE OR REPLACE FUNCTION func_%(name)s()
RETURNS TRIGGER AS $$
BEGIN
IF %(cond)s THEN
%(actions)s
END IF;
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER %(name)s
%(time)s %(event)s ON %(table)s
FOR EACH ROW EXECUTE PROCEDURE func_%(name)s();
""" % locals()
return sql, params
class TriggerSet(base.TriggerSet):
def drop(self):
cursor = self.cursor()
cursor.execute("SELECT pg_class.relname, pg_trigger.tgname FROM pg_trigger LEFT JOIN pg_class ON (pg_trigger.tgrelid = pg_class.oid) WHERE pg_trigger.tgname LIKE 'denorm_%%';")
for table_name, trigger_name in cursor.fetchall():
cursor.execute('DROP TRIGGER %s ON %s;' % (trigger_name, table_name))
transaction.commit_unless_managed(using=self.using)
def install(self):
cursor = self.cursor()
cursor.execute("SELECT lanname FROM pg_catalog.pg_language WHERE lanname ='plpgsql'")
if not cursor.fetchall():
cursor.execute('CREATE LANGUAGE plpgsql')
for name, trigger in self.triggers.iteritems():
sql, args = trigger.sql()
cursor.execute(sql, args)
transaction.commit_unless_managed(using=self.using)<|fim▁end|> | 'EXCEPTION WHEN unique_violation THEN -- do nothing\n'
'END\n'
) % locals() |
<|file_name|>get_landmines.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This file emits the list of reasons why a particular build needs to be clobbered
(or a list of 'landmines').
"""
import sys
def main():
"""
ALL LANDMINES ARE EMITTED FROM HERE.
"""
print 'Need to clobber after ICU52 roll.'
print 'Landmines test.'
print 'Activating MSVS 2013.'
print 'Revert activation of MSVS 2013.'
print 'Activating MSVS 2013 again.'
print 'Clobber after ICU roll.'<|fim▁hole|> print 'Clobbering to hopefully resolve problem with mksnapshot'
print 'Clobber after ICU roll.'
print 'Clobber after Android NDK update.'
print 'Clober to fix windows build problems.'
print 'Clober again to fix windows build problems.'
print 'Clobber to possibly resolve failure on win-32 bot.'
return 0
if __name__ == '__main__':
sys.exit(main())<|fim▁end|> | print 'Moar clobbering...'
print 'Remove build/android.gypi'
print 'Cleanup after windows ninja switch attempt.'
print 'Switching to pinned msvs toolchain.' |
<|file_name|>AuthWrapper.jest.tsx<|end_file_name|><|fim▁begin|>import React from "react"
import { mount } from "enzyme"<|fim▁hole|>
jest.mock("sharify")
jest.mock("querystring", () => ({
parse: jest.fn().mockReturnValue({}),
}))
jest.mock("desktop/components/cookies/index.coffee", () => ({
get: jest.fn(),
set: jest.fn(),
}))
jest.mock("desktop/lib/mediator.coffee", () => ({
trigger: jest.fn(),
on: jest.fn(),
}))
const mediatorTrigger = require("desktop/lib/mediator.coffee")
.trigger as jest.Mock
const mediatorOn = require("desktop/lib/mediator.coffee").on as jest.Mock
const qsMock = require("querystring").parse as jest.Mock
const CookiesGetMock = require("desktop/components/cookies/index.coffee")
.get as jest.Mock
const CookiesSetMock = require("desktop/components/cookies/index.coffee")
.set as jest.Mock
jest.spyOn(helpers, "handleScrollingAuthModal")
const handleScrollingAuthModal = require("desktop/lib/openAuthModal")
.handleScrollingAuthModal as jest.Mock
jest.useFakeTimers()
describe("AuthWrapper", () => {
beforeEach(() => {
delete sd.IS_MOBILE
delete sd.CURRENT_USER
window.addEventListener = jest.fn()
})
afterEach(() => {
mediatorOn.mockClear()
mediatorTrigger.mockClear()
mediatorOn.mockClear()
// FIXME: reaction migration
// @ts-ignore
sharify.mockClear()
handleScrollingAuthModal.mockClear()
})
const getWrapper = () => {
return mount(<AuthWrapper />)
}
describe("#componentWillMount", () => {
it("does nothing if IS_MOBILE", () => {
sd.IS_MOBILE = true
getWrapper()
expect(mediatorOn).not.toBeCalled()
expect(handleScrollingAuthModal).not.toBeCalled()
})
it("does nothing if CURRENT_USER", () => {
sd.CURRENT_USER = { name: "Carter" }
getWrapper()
expect(mediatorOn).not.toBeCalled()
expect(handleScrollingAuthModal).not.toBeCalled()
})
it("does nothing if user has dismiss cookie", () => {
CookiesGetMock.mockReturnValueOnce(1)
getWrapper()
expect(mediatorOn).not.toBeCalled()
expect(handleScrollingAuthModal).not.toBeCalled()
})
it("does nothing if source is sailthru", () => {
qsMock.mockReturnValueOnce({
utm_source: "sailthru",
})
getWrapper()
expect(mediatorOn).not.toBeCalled()
expect(handleScrollingAuthModal).not.toBeCalled()
})
it("sets up scrolling auth modal", () => {
const component = getWrapper().instance() as AuthWrapper
expect(mediatorOn).toBeCalledWith(
"modal:closed",
component.setDismissCookie
)
expect(mediatorOn).toBeCalledWith(
"auth:sign_up:success",
component.setDismissCookie
)
expect(window.addEventListener).toBeCalled()
expect(handleScrollingAuthModal).toBeCalledWith({
afterSignUpAction: { action: "editorialSignup" },
contextModule: "popUpModal",
copy: "Sign up for the best stories in art and visual culture",
destination: "https://artsy.net/",
intent: "viewEditorial",
})
})
})
describe("#isFromSailthru", () => {
afterEach(() => {
qsMock.mockReturnValue({})
})
it("returns true if utm_source is sailthru", () => {
qsMock.mockReturnValue({
utm_source: "sailthru",
})
const component = getWrapper().instance() as AuthWrapper
expect(component.isFromSailthru()).toBe(true)
})
it("returns true if utm_content is sailthru", () => {
qsMock.mockReturnValue({
utm_content: "st-",
})
const component = getWrapper().instance() as AuthWrapper
expect(component.isFromSailthru()).toBe(true)
})
})
it("#setDismissCookie sets a cookie", () => {
const component = getWrapper().instance() as AuthWrapper
component.setDismissCookie()
expect(CookiesSetMock).toBeCalledWith("editorial-signup-dismissed", 1, {
expires: 31536000,
})
})
})<|fim▁end|> | import { AuthWrapper } from "../AuthWrapper"
import sharify, { data as sd } from "sharify"
import * as helpers from "desktop/lib/openAuthModal" |
<|file_name|>wizardcontroller.rs<|end_file_name|><|fim▁begin|>use super::Wizard;
use super::prelude::*;<|fim▁hole|> fn reset(&mut self) {}
}<|fim▁end|> |
pub trait WizardController {
fn choose_spell<'a>(&mut self, wizard: &'a Wizard) -> Option<&'a Spell>; |
<|file_name|>cssCode.directive.spec.js<|end_file_name|><|fim▁begin|>'use strict';
describe('Directive: cssCode', function () {
// load the directive's module and view
beforeEach(module('googleWebfontsHelperApp'));
beforeEach(module('app/cssCode/cssCode.html'));
var element, scope;
beforeEach(inject(function ($rootScope) {
scope = $rootScope.$new();
}));
it('should make hidden element visible', inject(function ($compile) {
element = angular.element('<css-code></css-code>');
element = $compile(element)(scope);<|fim▁hole|> }));
});<|fim▁end|> | scope.$apply();
expect(element.text()).toBe('this is the cssCode directive'); |
<|file_name|>assertStatement.go<|end_file_name|><|fim▁begin|>package source
//
// An AssertionStatement declares the existence of a class or instance.
//
type AssertionStatement struct {
fields AssertionFields
source Code
}<|fim▁hole|>type AssertionFields struct {
Owner string // base type or class
Called string // name of reference being asserted into existence
Options // ex. called
}
//
func (ts AssertionStatement) Fields() AssertionFields {
return ts.fields
}
//
func (ts AssertionStatement) Source() Code {
return ts.source
}<|fim▁end|> | |
<|file_name|>deep_copy_generated.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// DO NOT EDIT. THIS FILE IS AUTO-GENERATED BY $KUBEROOT/hack/update-generated-deep-copies.sh.
package v1
import (
time "time"
api "k8s.io/kubernetes/pkg/api"
resource "k8s.io/kubernetes/pkg/api/resource"
conversion "k8s.io/kubernetes/pkg/conversion"
runtime "k8s.io/kubernetes/pkg/runtime"
util "k8s.io/kubernetes/pkg/util"
inf "speter.net/go/exp/math/dec/inf"
)
func deepCopy_resource_Quantity(in resource.Quantity, out *resource.Quantity, c *conversion.Cloner) error {
if in.Amount != nil {
if newVal, err := c.DeepCopy(in.Amount); err != nil {
return err
} else if newVal == nil {
out.Amount = nil
} else {
out.Amount = newVal.(*inf.Dec)
}
} else {
out.Amount = nil
}
out.Format = in.Format
return nil
}
func deepCopy_v1_APIVersion(in APIVersion, out *APIVersion, c *conversion.Cloner) error {
out.Name = in.Name
out.APIGroup = in.APIGroup
return nil
}
func deepCopy_v1_AWSElasticBlockStoreVolumeSource(in AWSElasticBlockStoreVolumeSource, out *AWSElasticBlockStoreVolumeSource, c *conversion.Cloner) error {
out.VolumeID = in.VolumeID
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_Binding(in Binding, out *Binding, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectReference(in.Target, &out.Target, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_Capabilities(in Capabilities, out *Capabilities, c *conversion.Cloner) error {
if in.Add != nil {
out.Add = make([]Capability, len(in.Add))
for i := range in.Add {
out.Add[i] = in.Add[i]
}
} else {
out.Add = nil
}
if in.Drop != nil {
out.Drop = make([]Capability, len(in.Drop))
for i := range in.Drop {
out.Drop[i] = in.Drop[i]
}
} else {
out.Drop = nil
}
return nil
}
func deepCopy_v1_ComponentCondition(in ComponentCondition, out *ComponentCondition, c *conversion.Cloner) error {
out.Type = in.Type
out.Status = in.Status
out.Message = in.Message
out.Error = in.Error
return nil
}
func deepCopy_v1_ComponentStatus(in ComponentStatus, out *ComponentStatus, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Conditions != nil {
out.Conditions = make([]ComponentCondition, len(in.Conditions))
for i := range in.Conditions {
if err := deepCopy_v1_ComponentCondition(in.Conditions[i], &out.Conditions[i], c); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
return nil
}
func deepCopy_v1_ComponentStatusList(in ComponentStatusList, out *ComponentStatusList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ComponentStatus, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_ComponentStatus(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_Container(in Container, out *Container, c *conversion.Cloner) error {
out.Name = in.Name
out.Image = in.Image
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
if in.Args != nil {
out.Args = make([]string, len(in.Args))
for i := range in.Args {
out.Args[i] = in.Args[i]
}
} else {
out.Args = nil
}
out.WorkingDir = in.WorkingDir
if in.Ports != nil {
out.Ports = make([]ContainerPort, len(in.Ports))
for i := range in.Ports {
if err := deepCopy_v1_ContainerPort(in.Ports[i], &out.Ports[i], c); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Env != nil {
out.Env = make([]EnvVar, len(in.Env))
for i := range in.Env {
if err := deepCopy_v1_EnvVar(in.Env[i], &out.Env[i], c); err != nil {
return err
}
}
} else {
out.Env = nil
}
if err := deepCopy_v1_ResourceRequirements(in.Resources, &out.Resources, c); err != nil {
return err
}
if in.VolumeMounts != nil {
out.VolumeMounts = make([]VolumeMount, len(in.VolumeMounts))
for i := range in.VolumeMounts {
if err := deepCopy_v1_VolumeMount(in.VolumeMounts[i], &out.VolumeMounts[i], c); err != nil {
return err
}
}
} else {
out.VolumeMounts = nil
}
if in.LivenessProbe != nil {
out.LivenessProbe = new(Probe)
if err := deepCopy_v1_Probe(*in.LivenessProbe, out.LivenessProbe, c); err != nil {
return err
}
} else {
out.LivenessProbe = nil
}
if in.ReadinessProbe != nil {
out.ReadinessProbe = new(Probe)
if err := deepCopy_v1_Probe(*in.ReadinessProbe, out.ReadinessProbe, c); err != nil {
return err
}
} else {
out.ReadinessProbe = nil
}
if in.Lifecycle != nil {
out.Lifecycle = new(Lifecycle)
if err := deepCopy_v1_Lifecycle(*in.Lifecycle, out.Lifecycle, c); err != nil {
return err
}
} else {
out.Lifecycle = nil
}
out.TerminationMessagePath = in.TerminationMessagePath
out.ImagePullPolicy = in.ImagePullPolicy
if in.SecurityContext != nil {
out.SecurityContext = new(SecurityContext)
if err := deepCopy_v1_SecurityContext(*in.SecurityContext, out.SecurityContext, c); err != nil {
return err
}
} else {
out.SecurityContext = nil
}
out.Stdin = in.Stdin
out.TTY = in.TTY
return nil
}
func deepCopy_v1_ContainerPort(in ContainerPort, out *ContainerPort, c *conversion.Cloner) error {
out.Name = in.Name
out.HostPort = in.HostPort
out.ContainerPort = in.ContainerPort
out.Protocol = in.Protocol
out.HostIP = in.HostIP
return nil
}
func deepCopy_v1_ContainerState(in ContainerState, out *ContainerState, c *conversion.Cloner) error {
if in.Waiting != nil {
out.Waiting = new(ContainerStateWaiting)
if err := deepCopy_v1_ContainerStateWaiting(*in.Waiting, out.Waiting, c); err != nil {
return err
}
} else {
out.Waiting = nil
}
if in.Running != nil {
out.Running = new(ContainerStateRunning)
if err := deepCopy_v1_ContainerStateRunning(*in.Running, out.Running, c); err != nil {
return err
}
} else {
out.Running = nil
}
if in.Terminated != nil {
out.Terminated = new(ContainerStateTerminated)
if err := deepCopy_v1_ContainerStateTerminated(*in.Terminated, out.Terminated, c); err != nil {
return err
}
} else {
out.Terminated = nil
}
return nil
}
func deepCopy_v1_ContainerStateRunning(in ContainerStateRunning, out *ContainerStateRunning, c *conversion.Cloner) error {
if err := deepCopy_util_Time(in.StartedAt, &out.StartedAt, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_ContainerStateTerminated(in ContainerStateTerminated, out *ContainerStateTerminated, c *conversion.Cloner) error {
out.ExitCode = in.ExitCode
out.Signal = in.Signal
out.Reason = in.Reason
out.Message = in.Message
if err := deepCopy_util_Time(in.StartedAt, &out.StartedAt, c); err != nil {
return err
}
if err := deepCopy_util_Time(in.FinishedAt, &out.FinishedAt, c); err != nil {
return err
}
out.ContainerID = in.ContainerID
return nil
}
func deepCopy_v1_ContainerStateWaiting(in ContainerStateWaiting, out *ContainerStateWaiting, c *conversion.Cloner) error {
out.Reason = in.Reason
return nil
}
func deepCopy_v1_ContainerStatus(in ContainerStatus, out *ContainerStatus, c *conversion.Cloner) error {
out.Name = in.Name
if err := deepCopy_v1_ContainerState(in.State, &out.State, c); err != nil {
return err
}
if err := deepCopy_v1_ContainerState(in.LastTerminationState, &out.LastTerminationState, c); err != nil {
return err
}
out.Ready = in.Ready
out.RestartCount = in.RestartCount
out.Image = in.Image
out.ImageID = in.ImageID
out.ContainerID = in.ContainerID
return nil
}
func deepCopy_v1_Daemon(in Daemon, out *Daemon, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_DaemonSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_DaemonStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_DaemonList(in DaemonList, out *DaemonList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Daemon, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Daemon(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_DaemonSpec(in DaemonSpec, out *DaemonSpec, c *conversion.Cloner) error {
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
if in.Template != nil {
out.Template = new(PodTemplateSpec)
if err := deepCopy_v1_PodTemplateSpec(*in.Template, out.Template, c); err != nil {
return err
}
} else {
out.Template = nil
}
return nil
}
func deepCopy_v1_DaemonStatus(in DaemonStatus, out *DaemonStatus, c *conversion.Cloner) error {
out.CurrentNumberScheduled = in.CurrentNumberScheduled
out.NumberMisscheduled = in.NumberMisscheduled
out.DesiredNumberScheduled = in.DesiredNumberScheduled
return nil
}
func deepCopy_v1_DeleteOptions(in DeleteOptions, out *DeleteOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if in.GracePeriodSeconds != nil {
out.GracePeriodSeconds = new(int64)
*out.GracePeriodSeconds = *in.GracePeriodSeconds
} else {
out.GracePeriodSeconds = nil
}
return nil
}
func deepCopy_v1_EmptyDirVolumeSource(in EmptyDirVolumeSource, out *EmptyDirVolumeSource, c *conversion.Cloner) error {
out.Medium = in.Medium
return nil
}
func deepCopy_v1_EndpointAddress(in EndpointAddress, out *EndpointAddress, c *conversion.Cloner) error {
out.IP = in.IP
if in.TargetRef != nil {
out.TargetRef = new(ObjectReference)
if err := deepCopy_v1_ObjectReference(*in.TargetRef, out.TargetRef, c); err != nil {
return err
}
} else {
out.TargetRef = nil
}
return nil
}
func deepCopy_v1_EndpointPort(in EndpointPort, out *EndpointPort, c *conversion.Cloner) error {
out.Name = in.Name
out.Port = in.Port
out.Protocol = in.Protocol
return nil
}
func deepCopy_v1_EndpointSubset(in EndpointSubset, out *EndpointSubset, c *conversion.Cloner) error {
if in.Addresses != nil {
out.Addresses = make([]EndpointAddress, len(in.Addresses))
for i := range in.Addresses {
if err := deepCopy_v1_EndpointAddress(in.Addresses[i], &out.Addresses[i], c); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if in.Ports != nil {
out.Ports = make([]EndpointPort, len(in.Ports))
for i := range in.Ports {
if err := deepCopy_v1_EndpointPort(in.Ports[i], &out.Ports[i], c); err != nil {
return err
}
}
} else {
out.Ports = nil
}
return nil
}
func deepCopy_v1_Endpoints(in Endpoints, out *Endpoints, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Subsets != nil {
out.Subsets = make([]EndpointSubset, len(in.Subsets))
for i := range in.Subsets {
if err := deepCopy_v1_EndpointSubset(in.Subsets[i], &out.Subsets[i], c); err != nil {
return err
}
}
} else {
out.Subsets = nil
}
return nil
}
func deepCopy_v1_EndpointsList(in EndpointsList, out *EndpointsList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Endpoints, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Endpoints(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_EnvVar(in EnvVar, out *EnvVar, c *conversion.Cloner) error {
out.Name = in.Name
out.Value = in.Value
if in.ValueFrom != nil {
out.ValueFrom = new(EnvVarSource)
if err := deepCopy_v1_EnvVarSource(*in.ValueFrom, out.ValueFrom, c); err != nil {
return err
}
} else {
out.ValueFrom = nil
}
return nil
}
func deepCopy_v1_EnvVarSource(in EnvVarSource, out *EnvVarSource, c *conversion.Cloner) error {
if in.FieldRef != nil {
out.FieldRef = new(ObjectFieldSelector)
if err := deepCopy_v1_ObjectFieldSelector(*in.FieldRef, out.FieldRef, c); err != nil {
return err
}
} else {
out.FieldRef = nil
}
return nil
}
func deepCopy_v1_Event(in Event, out *Event, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectReference(in.InvolvedObject, &out.InvolvedObject, c); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
if err := deepCopy_v1_EventSource(in.Source, &out.Source, c); err != nil {
return err
}
if err := deepCopy_util_Time(in.FirstTimestamp, &out.FirstTimestamp, c); err != nil {
return err
}
if err := deepCopy_util_Time(in.LastTimestamp, &out.LastTimestamp, c); err != nil {
return err
}
out.Count = in.Count
return nil
}
func deepCopy_v1_EventList(in EventList, out *EventList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Event, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Event(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_EventSource(in EventSource, out *EventSource, c *conversion.Cloner) error {
out.Component = in.Component
out.Host = in.Host
return nil
}
func deepCopy_v1_ExecAction(in ExecAction, out *ExecAction, c *conversion.Cloner) error {
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func deepCopy_v1_GCEPersistentDiskVolumeSource(in GCEPersistentDiskVolumeSource, out *GCEPersistentDiskVolumeSource, c *conversion.Cloner) error {
out.PDName = in.PDName
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_GitRepoVolumeSource(in GitRepoVolumeSource, out *GitRepoVolumeSource, c *conversion.Cloner) error {
out.Repository = in.Repository
out.Revision = in.Revision
return nil
}
func deepCopy_v1_GlusterfsVolumeSource(in GlusterfsVolumeSource, out *GlusterfsVolumeSource, c *conversion.Cloner) error {
out.EndpointsName = in.EndpointsName
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_HTTPGetAction(in HTTPGetAction, out *HTTPGetAction, c *conversion.Cloner) error {
out.Path = in.Path
if err := deepCopy_util_IntOrString(in.Port, &out.Port, c); err != nil {
return err
}
out.Host = in.Host
out.Scheme = in.Scheme
return nil
}
func deepCopy_v1_Handler(in Handler, out *Handler, c *conversion.Cloner) error {
if in.Exec != nil {
out.Exec = new(ExecAction)
if err := deepCopy_v1_ExecAction(*in.Exec, out.Exec, c); err != nil {
return err
}
} else {
out.Exec = nil
}
if in.HTTPGet != nil {
out.HTTPGet = new(HTTPGetAction)
if err := deepCopy_v1_HTTPGetAction(*in.HTTPGet, out.HTTPGet, c); err != nil {
return err
}
} else {
out.HTTPGet = nil
}
if in.TCPSocket != nil {
out.TCPSocket = new(TCPSocketAction)
if err := deepCopy_v1_TCPSocketAction(*in.TCPSocket, out.TCPSocket, c); err != nil {
return err
}
} else {
out.TCPSocket = nil
}
return nil
}
func deepCopy_v1_HostPathVolumeSource(in HostPathVolumeSource, out *HostPathVolumeSource, c *conversion.Cloner) error {
out.Path = in.Path
return nil
}
func deepCopy_v1_ISCSIVolumeSource(in ISCSIVolumeSource, out *ISCSIVolumeSource, c *conversion.Cloner) error {
out.TargetPortal = in.TargetPortal
out.IQN = in.IQN
out.Lun = in.Lun
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_Lifecycle(in Lifecycle, out *Lifecycle, c *conversion.Cloner) error {
if in.PostStart != nil {
out.PostStart = new(Handler)
if err := deepCopy_v1_Handler(*in.PostStart, out.PostStart, c); err != nil {
return err
}
} else {
out.PostStart = nil
}
if in.PreStop != nil {
out.PreStop = new(Handler)
if err := deepCopy_v1_Handler(*in.PreStop, out.PreStop, c); err != nil {
return err
}
} else {
out.PreStop = nil
}
return nil
}
func deepCopy_v1_LimitRange(in LimitRange, out *LimitRange, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_LimitRangeSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_LimitRangeItem(in LimitRangeItem, out *LimitRangeItem, c *conversion.Cloner) error {
out.Type = in.Type
if in.Max != nil {
out.Max = make(ResourceList)
for key, val := range in.Max {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Max[key] = *newVal
}
} else {
out.Max = nil
}
if in.Min != nil {
out.Min = make(ResourceList)
for key, val := range in.Min {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Min[key] = *newVal
}
} else {
out.Min = nil
}
if in.Default != nil {
out.Default = make(ResourceList)
for key, val := range in.Default {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Default[key] = *newVal
}
} else {
out.Default = nil
}
return nil
}
func deepCopy_v1_LimitRangeList(in LimitRangeList, out *LimitRangeList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]LimitRange, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_LimitRange(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_LimitRangeSpec(in LimitRangeSpec, out *LimitRangeSpec, c *conversion.Cloner) error {
if in.Limits != nil {
out.Limits = make([]LimitRangeItem, len(in.Limits))
for i := range in.Limits {
if err := deepCopy_v1_LimitRangeItem(in.Limits[i], &out.Limits[i], c); err != nil {
return err
}
}
} else {
out.Limits = nil
}
return nil
}
func deepCopy_v1_List(in List, out *List, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]runtime.RawExtension, len(in.Items))
for i := range in.Items {
if err := deepCopy_runtime_RawExtension(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_ListMeta(in ListMeta, out *ListMeta, c *conversion.Cloner) error {
out.SelfLink = in.SelfLink
out.ResourceVersion = in.ResourceVersion
return nil
}
func deepCopy_v1_ListOptions(in ListOptions, out *ListOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
out.LabelSelector = in.LabelSelector
out.FieldSelector = in.FieldSelector
out.Watch = in.Watch
out.ResourceVersion = in.ResourceVersion
return nil
}
func deepCopy_v1_LoadBalancerIngress(in LoadBalancerIngress, out *LoadBalancerIngress, c *conversion.Cloner) error {
out.IP = in.IP
out.Hostname = in.Hostname
return nil
}
func deepCopy_v1_LoadBalancerStatus(in LoadBalancerStatus, out *LoadBalancerStatus, c *conversion.Cloner) error {
if in.Ingress != nil {
out.Ingress = make([]LoadBalancerIngress, len(in.Ingress))
for i := range in.Ingress {
if err := deepCopy_v1_LoadBalancerIngress(in.Ingress[i], &out.Ingress[i], c); err != nil {
return err
}
}
} else {
out.Ingress = nil
}
return nil
}
func deepCopy_v1_LocalObjectReference(in LocalObjectReference, out *LocalObjectReference, c *conversion.Cloner) error {
out.Name = in.Name
return nil
}
func deepCopy_v1_NFSVolumeSource(in NFSVolumeSource, out *NFSVolumeSource, c *conversion.Cloner) error {
out.Server = in.Server
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_Namespace(in Namespace, out *Namespace, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_NamespaceSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_NamespaceStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_NamespaceList(in NamespaceList, out *NamespaceList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Namespace, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Namespace(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_NamespaceSpec(in NamespaceSpec, out *NamespaceSpec, c *conversion.Cloner) error {
if in.Finalizers != nil {
out.Finalizers = make([]FinalizerName, len(in.Finalizers))
for i := range in.Finalizers {
out.Finalizers[i] = in.Finalizers[i]
}
} else {
out.Finalizers = nil
}
return nil
}
func deepCopy_v1_NamespaceStatus(in NamespaceStatus, out *NamespaceStatus, c *conversion.Cloner) error {
out.Phase = in.Phase
return nil
}
func deepCopy_v1_Node(in Node, out *Node, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_NodeSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_NodeStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_NodeAddress(in NodeAddress, out *NodeAddress, c *conversion.Cloner) error {
out.Type = in.Type
out.Address = in.Address
return nil
}
func deepCopy_v1_NodeCondition(in NodeCondition, out *NodeCondition, c *conversion.Cloner) error {
out.Type = in.Type
out.Status = in.Status
if err := deepCopy_util_Time(in.LastHeartbeatTime, &out.LastHeartbeatTime, c); err != nil {
return err
}
if err := deepCopy_util_Time(in.LastTransitionTime, &out.LastTransitionTime, c); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
return nil
}
func deepCopy_v1_NodeList(in NodeList, out *NodeList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Node, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Node(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_NodeSpec(in NodeSpec, out *NodeSpec, c *conversion.Cloner) error {
out.PodCIDR = in.PodCIDR
out.ExternalID = in.ExternalID
out.ProviderID = in.ProviderID
out.Unschedulable = in.Unschedulable
return nil
}
func deepCopy_v1_NodeStatus(in NodeStatus, out *NodeStatus, c *conversion.Cloner) error {
if in.Capacity != nil {
out.Capacity = make(ResourceList)
for key, val := range in.Capacity {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Capacity[key] = *newVal
}
} else {
out.Capacity = nil
}
out.Phase = in.Phase
if in.Conditions != nil {
out.Conditions = make([]NodeCondition, len(in.Conditions))
for i := range in.Conditions {
if err := deepCopy_v1_NodeCondition(in.Conditions[i], &out.Conditions[i], c); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
if in.Addresses != nil {
out.Addresses = make([]NodeAddress, len(in.Addresses))
for i := range in.Addresses {
if err := deepCopy_v1_NodeAddress(in.Addresses[i], &out.Addresses[i], c); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if err := deepCopy_v1_NodeSystemInfo(in.NodeInfo, &out.NodeInfo, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_NodeSystemInfo(in NodeSystemInfo, out *NodeSystemInfo, c *conversion.Cloner) error {
out.MachineID = in.MachineID
out.SystemUUID = in.SystemUUID
out.BootID = in.BootID
out.KernelVersion = in.KernelVersion
out.OsImage = in.OsImage
out.ContainerRuntimeVersion = in.ContainerRuntimeVersion
out.KubeletVersion = in.KubeletVersion
out.KubeProxyVersion = in.KubeProxyVersion
return nil
}
func deepCopy_v1_ObjectFieldSelector(in ObjectFieldSelector, out *ObjectFieldSelector, c *conversion.Cloner) error {
out.APIVersion = in.APIVersion
out.FieldPath = in.FieldPath
return nil
}
func deepCopy_v1_ObjectMeta(in ObjectMeta, out *ObjectMeta, c *conversion.Cloner) error {
out.Name = in.Name
out.GenerateName = in.GenerateName
out.Namespace = in.Namespace
out.SelfLink = in.SelfLink
out.UID = in.UID
out.ResourceVersion = in.ResourceVersion
out.Generation = in.Generation
if err := deepCopy_util_Time(in.CreationTimestamp, &out.CreationTimestamp, c); err != nil {
return err
}
if in.DeletionTimestamp != nil {
out.DeletionTimestamp = new(util.Time)
if err := deepCopy_util_Time(*in.DeletionTimestamp, out.DeletionTimestamp, c); err != nil {
return err
}
} else {
out.DeletionTimestamp = nil
}
if in.DeletionGracePeriodSeconds != nil {
out.DeletionGracePeriodSeconds = new(int64)
*out.DeletionGracePeriodSeconds = *in.DeletionGracePeriodSeconds
} else {
out.DeletionGracePeriodSeconds = nil
}
if in.Labels != nil {
out.Labels = make(map[string]string)
for key, val := range in.Labels {
out.Labels[key] = val
}
} else {
out.Labels = nil
}
if in.Annotations != nil {
out.Annotations = make(map[string]string)
for key, val := range in.Annotations {
out.Annotations[key] = val
}
} else {
out.Annotations = nil
}
return nil
}
func deepCopy_v1_ObjectReference(in ObjectReference, out *ObjectReference, c *conversion.Cloner) error {
out.Kind = in.Kind
out.Namespace = in.Namespace
out.Name = in.Name
out.UID = in.UID
out.APIVersion = in.APIVersion
out.ResourceVersion = in.ResourceVersion
out.FieldPath = in.FieldPath
return nil
}
func deepCopy_v1_PersistentVolume(in PersistentVolume, out *PersistentVolume, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PersistentVolumeSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_PersistentVolumeStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_PersistentVolumeClaim(in PersistentVolumeClaim, out *PersistentVolumeClaim, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PersistentVolumeClaimSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_PersistentVolumeClaimStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_PersistentVolumeClaimList(in PersistentVolumeClaimList, out *PersistentVolumeClaimList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PersistentVolumeClaim, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_PersistentVolumeClaim(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_PersistentVolumeClaimSpec(in PersistentVolumeClaimSpec, out *PersistentVolumeClaimSpec, c *conversion.Cloner) error {
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = in.AccessModes[i]
}
} else {
out.AccessModes = nil
}
if err := deepCopy_v1_ResourceRequirements(in.Resources, &out.Resources, c); err != nil {
return err
}
out.VolumeName = in.VolumeName
return nil
}
func deepCopy_v1_PersistentVolumeClaimStatus(in PersistentVolumeClaimStatus, out *PersistentVolumeClaimStatus, c *conversion.Cloner) error {
out.Phase = in.Phase
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = in.AccessModes[i]
}
} else {
out.AccessModes = nil
}
if in.Capacity != nil {
out.Capacity = make(ResourceList)
for key, val := range in.Capacity {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Capacity[key] = *newVal
}
} else {
out.Capacity = nil
}
return nil
}
func deepCopy_v1_PersistentVolumeClaimVolumeSource(in PersistentVolumeClaimVolumeSource, out *PersistentVolumeClaimVolumeSource, c *conversion.Cloner) error {
out.ClaimName = in.ClaimName
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_PersistentVolumeList(in PersistentVolumeList, out *PersistentVolumeList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PersistentVolume, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_PersistentVolume(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_PersistentVolumeSource(in PersistentVolumeSource, out *PersistentVolumeSource, c *conversion.Cloner) error {
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(GCEPersistentDiskVolumeSource)
if err := deepCopy_v1_GCEPersistentDiskVolumeSource(*in.GCEPersistentDisk, out.GCEPersistentDisk, c); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(AWSElasticBlockStoreVolumeSource)
if err := deepCopy_v1_AWSElasticBlockStoreVolumeSource(*in.AWSElasticBlockStore, out.AWSElasticBlockStore, c); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.HostPath != nil {
out.HostPath = new(HostPathVolumeSource)
if err := deepCopy_v1_HostPathVolumeSource(*in.HostPath, out.HostPath, c); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(GlusterfsVolumeSource)
if err := deepCopy_v1_GlusterfsVolumeSource(*in.Glusterfs, out.Glusterfs, c); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.NFS != nil {
out.NFS = new(NFSVolumeSource)
if err := deepCopy_v1_NFSVolumeSource(*in.NFS, out.NFS, c); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.RBD != nil {
out.RBD = new(RBDVolumeSource)<|fim▁hole|> } else {
out.RBD = nil
}
if in.ISCSI != nil {
out.ISCSI = new(ISCSIVolumeSource)
if err := deepCopy_v1_ISCSIVolumeSource(*in.ISCSI, out.ISCSI, c); err != nil {
return err
}
} else {
out.ISCSI = nil
}
return nil
}
func deepCopy_v1_PersistentVolumeSpec(in PersistentVolumeSpec, out *PersistentVolumeSpec, c *conversion.Cloner) error {
if in.Capacity != nil {
out.Capacity = make(ResourceList)
for key, val := range in.Capacity {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Capacity[key] = *newVal
}
} else {
out.Capacity = nil
}
if err := deepCopy_v1_PersistentVolumeSource(in.PersistentVolumeSource, &out.PersistentVolumeSource, c); err != nil {
return err
}
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = in.AccessModes[i]
}
} else {
out.AccessModes = nil
}
if in.ClaimRef != nil {
out.ClaimRef = new(ObjectReference)
if err := deepCopy_v1_ObjectReference(*in.ClaimRef, out.ClaimRef, c); err != nil {
return err
}
} else {
out.ClaimRef = nil
}
out.PersistentVolumeReclaimPolicy = in.PersistentVolumeReclaimPolicy
return nil
}
func deepCopy_v1_PersistentVolumeStatus(in PersistentVolumeStatus, out *PersistentVolumeStatus, c *conversion.Cloner) error {
out.Phase = in.Phase
out.Message = in.Message
out.Reason = in.Reason
return nil
}
func deepCopy_v1_Pod(in Pod, out *Pod, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PodSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_PodStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_PodAttachOptions(in PodAttachOptions, out *PodAttachOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
out.Stdin = in.Stdin
out.Stdout = in.Stdout
out.Stderr = in.Stderr
out.TTY = in.TTY
out.Container = in.Container
return nil
}
func deepCopy_v1_PodCondition(in PodCondition, out *PodCondition, c *conversion.Cloner) error {
out.Type = in.Type
out.Status = in.Status
return nil
}
func deepCopy_v1_PodExecOptions(in PodExecOptions, out *PodExecOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
out.Stdin = in.Stdin
out.Stdout = in.Stdout
out.Stderr = in.Stderr
out.TTY = in.TTY
out.Container = in.Container
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func deepCopy_v1_PodList(in PodList, out *PodList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Pod, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Pod(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_PodLogOptions(in PodLogOptions, out *PodLogOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
out.Container = in.Container
out.Follow = in.Follow
out.Previous = in.Previous
return nil
}
func deepCopy_v1_PodProxyOptions(in PodProxyOptions, out *PodProxyOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
out.Path = in.Path
return nil
}
func deepCopy_v1_PodSpec(in PodSpec, out *PodSpec, c *conversion.Cloner) error {
if in.Volumes != nil {
out.Volumes = make([]Volume, len(in.Volumes))
for i := range in.Volumes {
if err := deepCopy_v1_Volume(in.Volumes[i], &out.Volumes[i], c); err != nil {
return err
}
}
} else {
out.Volumes = nil
}
if in.Containers != nil {
out.Containers = make([]Container, len(in.Containers))
for i := range in.Containers {
if err := deepCopy_v1_Container(in.Containers[i], &out.Containers[i], c); err != nil {
return err
}
}
} else {
out.Containers = nil
}
out.RestartPolicy = in.RestartPolicy
if in.TerminationGracePeriodSeconds != nil {
out.TerminationGracePeriodSeconds = new(int64)
*out.TerminationGracePeriodSeconds = *in.TerminationGracePeriodSeconds
} else {
out.TerminationGracePeriodSeconds = nil
}
if in.ActiveDeadlineSeconds != nil {
out.ActiveDeadlineSeconds = new(int64)
*out.ActiveDeadlineSeconds = *in.ActiveDeadlineSeconds
} else {
out.ActiveDeadlineSeconds = nil
}
out.DNSPolicy = in.DNSPolicy
if in.NodeSelector != nil {
out.NodeSelector = make(map[string]string)
for key, val := range in.NodeSelector {
out.NodeSelector[key] = val
}
} else {
out.NodeSelector = nil
}
out.ServiceAccountName = in.ServiceAccountName
out.DeprecatedServiceAccount = in.DeprecatedServiceAccount
out.NodeName = in.NodeName
out.HostNetwork = in.HostNetwork
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := deepCopy_v1_LocalObjectReference(in.ImagePullSecrets[i], &out.ImagePullSecrets[i], c); err != nil {
return err
}
}
} else {
out.ImagePullSecrets = nil
}
return nil
}
func deepCopy_v1_PodStatus(in PodStatus, out *PodStatus, c *conversion.Cloner) error {
out.Phase = in.Phase
if in.Conditions != nil {
out.Conditions = make([]PodCondition, len(in.Conditions))
for i := range in.Conditions {
if err := deepCopy_v1_PodCondition(in.Conditions[i], &out.Conditions[i], c); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
out.Message = in.Message
out.Reason = in.Reason
out.HostIP = in.HostIP
out.PodIP = in.PodIP
if in.StartTime != nil {
out.StartTime = new(util.Time)
if err := deepCopy_util_Time(*in.StartTime, out.StartTime, c); err != nil {
return err
}
} else {
out.StartTime = nil
}
if in.ContainerStatuses != nil {
out.ContainerStatuses = make([]ContainerStatus, len(in.ContainerStatuses))
for i := range in.ContainerStatuses {
if err := deepCopy_v1_ContainerStatus(in.ContainerStatuses[i], &out.ContainerStatuses[i], c); err != nil {
return err
}
}
} else {
out.ContainerStatuses = nil
}
return nil
}
func deepCopy_v1_PodStatusResult(in PodStatusResult, out *PodStatusResult, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PodStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_PodTemplate(in PodTemplate, out *PodTemplate, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PodTemplateSpec(in.Template, &out.Template, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_PodTemplateList(in PodTemplateList, out *PodTemplateList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PodTemplate, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_PodTemplate(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_PodTemplateSpec(in PodTemplateSpec, out *PodTemplateSpec, c *conversion.Cloner) error {
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PodSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_Probe(in Probe, out *Probe, c *conversion.Cloner) error {
if err := deepCopy_v1_Handler(in.Handler, &out.Handler, c); err != nil {
return err
}
out.InitialDelaySeconds = in.InitialDelaySeconds
out.TimeoutSeconds = in.TimeoutSeconds
return nil
}
func deepCopy_v1_RBDVolumeSource(in RBDVolumeSource, out *RBDVolumeSource, c *conversion.Cloner) error {
if in.CephMonitors != nil {
out.CephMonitors = make([]string, len(in.CephMonitors))
for i := range in.CephMonitors {
out.CephMonitors[i] = in.CephMonitors[i]
}
} else {
out.CephMonitors = nil
}
out.RBDImage = in.RBDImage
out.FSType = in.FSType
out.RBDPool = in.RBDPool
out.RadosUser = in.RadosUser
out.Keyring = in.Keyring
if in.SecretRef != nil {
out.SecretRef = new(LocalObjectReference)
if err := deepCopy_v1_LocalObjectReference(*in.SecretRef, out.SecretRef, c); err != nil {
return err
}
} else {
out.SecretRef = nil
}
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_RangeAllocation(in RangeAllocation, out *RangeAllocation, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
out.Range = in.Range
if in.Data != nil {
out.Data = make([]uint8, len(in.Data))
for i := range in.Data {
out.Data[i] = in.Data[i]
}
} else {
out.Data = nil
}
return nil
}
func deepCopy_v1_ReplicationController(in ReplicationController, out *ReplicationController, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ReplicationControllerSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_ReplicationControllerStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_ReplicationControllerList(in ReplicationControllerList, out *ReplicationControllerList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ReplicationController, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_ReplicationController(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_ReplicationControllerSpec(in ReplicationControllerSpec, out *ReplicationControllerSpec, c *conversion.Cloner) error {
if in.Replicas != nil {
out.Replicas = new(int)
*out.Replicas = *in.Replicas
} else {
out.Replicas = nil
}
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
if in.Template != nil {
out.Template = new(PodTemplateSpec)
if err := deepCopy_v1_PodTemplateSpec(*in.Template, out.Template, c); err != nil {
return err
}
} else {
out.Template = nil
}
return nil
}
func deepCopy_v1_ReplicationControllerStatus(in ReplicationControllerStatus, out *ReplicationControllerStatus, c *conversion.Cloner) error {
out.Replicas = in.Replicas
out.ObservedGeneration = in.ObservedGeneration
return nil
}
func deepCopy_v1_ResourceQuota(in ResourceQuota, out *ResourceQuota, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ResourceQuotaSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_ResourceQuotaStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_ResourceQuotaList(in ResourceQuotaList, out *ResourceQuotaList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ResourceQuota, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_ResourceQuota(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_ResourceQuotaSpec(in ResourceQuotaSpec, out *ResourceQuotaSpec, c *conversion.Cloner) error {
if in.Hard != nil {
out.Hard = make(ResourceList)
for key, val := range in.Hard {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Hard[key] = *newVal
}
} else {
out.Hard = nil
}
return nil
}
func deepCopy_v1_ResourceQuotaStatus(in ResourceQuotaStatus, out *ResourceQuotaStatus, c *conversion.Cloner) error {
if in.Hard != nil {
out.Hard = make(ResourceList)
for key, val := range in.Hard {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Hard[key] = *newVal
}
} else {
out.Hard = nil
}
if in.Used != nil {
out.Used = make(ResourceList)
for key, val := range in.Used {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Used[key] = *newVal
}
} else {
out.Used = nil
}
return nil
}
func deepCopy_v1_ResourceRequirements(in ResourceRequirements, out *ResourceRequirements, c *conversion.Cloner) error {
if in.Limits != nil {
out.Limits = make(ResourceList)
for key, val := range in.Limits {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Limits[key] = *newVal
}
} else {
out.Limits = nil
}
if in.Requests != nil {
out.Requests = make(ResourceList)
for key, val := range in.Requests {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Requests[key] = *newVal
}
} else {
out.Requests = nil
}
return nil
}
func deepCopy_v1_SELinuxOptions(in SELinuxOptions, out *SELinuxOptions, c *conversion.Cloner) error {
out.User = in.User
out.Role = in.Role
out.Type = in.Type
out.Level = in.Level
return nil
}
func deepCopy_v1_Secret(in Secret, out *Secret, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Data != nil {
out.Data = make(map[string][]uint8)
for key, val := range in.Data {
if newVal, err := c.DeepCopy(val); err != nil {
return err
} else {
out.Data[key] = newVal.([]uint8)
}
}
} else {
out.Data = nil
}
out.Type = in.Type
return nil
}
func deepCopy_v1_SecretList(in SecretList, out *SecretList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Secret, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Secret(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_SecretVolumeSource(in SecretVolumeSource, out *SecretVolumeSource, c *conversion.Cloner) error {
out.SecretName = in.SecretName
return nil
}
func deepCopy_v1_SecurityContext(in SecurityContext, out *SecurityContext, c *conversion.Cloner) error {
if in.Capabilities != nil {
out.Capabilities = new(Capabilities)
if err := deepCopy_v1_Capabilities(*in.Capabilities, out.Capabilities, c); err != nil {
return err
}
} else {
out.Capabilities = nil
}
if in.Privileged != nil {
out.Privileged = new(bool)
*out.Privileged = *in.Privileged
} else {
out.Privileged = nil
}
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(SELinuxOptions)
if err := deepCopy_v1_SELinuxOptions(*in.SELinuxOptions, out.SELinuxOptions, c); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
out.RunAsNonRoot = in.RunAsNonRoot
return nil
}
func deepCopy_v1_SerializedReference(in SerializedReference, out *SerializedReference, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectReference(in.Reference, &out.Reference, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_Service(in Service, out *Service, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ServiceSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_ServiceStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_ServiceAccount(in ServiceAccount, out *ServiceAccount, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Secrets != nil {
out.Secrets = make([]ObjectReference, len(in.Secrets))
for i := range in.Secrets {
if err := deepCopy_v1_ObjectReference(in.Secrets[i], &out.Secrets[i], c); err != nil {
return err
}
}
} else {
out.Secrets = nil
}
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := deepCopy_v1_LocalObjectReference(in.ImagePullSecrets[i], &out.ImagePullSecrets[i], c); err != nil {
return err
}
}
} else {
out.ImagePullSecrets = nil
}
return nil
}
func deepCopy_v1_ServiceAccountList(in ServiceAccountList, out *ServiceAccountList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ServiceAccount, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_ServiceAccount(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_ServiceList(in ServiceList, out *ServiceList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Service, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Service(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_ServicePort(in ServicePort, out *ServicePort, c *conversion.Cloner) error {
out.Name = in.Name
out.Protocol = in.Protocol
out.Port = in.Port
if err := deepCopy_util_IntOrString(in.TargetPort, &out.TargetPort, c); err != nil {
return err
}
out.NodePort = in.NodePort
return nil
}
func deepCopy_v1_ServiceSpec(in ServiceSpec, out *ServiceSpec, c *conversion.Cloner) error {
if in.Ports != nil {
out.Ports = make([]ServicePort, len(in.Ports))
for i := range in.Ports {
if err := deepCopy_v1_ServicePort(in.Ports[i], &out.Ports[i], c); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
out.ClusterIP = in.ClusterIP
out.Type = in.Type
if in.ExternalIPs != nil {
out.ExternalIPs = make([]string, len(in.ExternalIPs))
for i := range in.ExternalIPs {
out.ExternalIPs[i] = in.ExternalIPs[i]
}
} else {
out.ExternalIPs = nil
}
out.SessionAffinity = in.SessionAffinity
return nil
}
func deepCopy_v1_ServiceStatus(in ServiceStatus, out *ServiceStatus, c *conversion.Cloner) error {
if err := deepCopy_v1_LoadBalancerStatus(in.LoadBalancer, &out.LoadBalancer, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_Status(in Status, out *Status, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
out.Status = in.Status
out.Message = in.Message
out.Reason = in.Reason
if in.Details != nil {
out.Details = new(StatusDetails)
if err := deepCopy_v1_StatusDetails(*in.Details, out.Details, c); err != nil {
return err
}
} else {
out.Details = nil
}
out.Code = in.Code
return nil
}
func deepCopy_v1_StatusCause(in StatusCause, out *StatusCause, c *conversion.Cloner) error {
out.Type = in.Type
out.Message = in.Message
out.Field = in.Field
return nil
}
func deepCopy_v1_StatusDetails(in StatusDetails, out *StatusDetails, c *conversion.Cloner) error {
out.Name = in.Name
out.Kind = in.Kind
if in.Causes != nil {
out.Causes = make([]StatusCause, len(in.Causes))
for i := range in.Causes {
if err := deepCopy_v1_StatusCause(in.Causes[i], &out.Causes[i], c); err != nil {
return err
}
}
} else {
out.Causes = nil
}
out.RetryAfterSeconds = in.RetryAfterSeconds
return nil
}
func deepCopy_v1_TCPSocketAction(in TCPSocketAction, out *TCPSocketAction, c *conversion.Cloner) error {
if err := deepCopy_util_IntOrString(in.Port, &out.Port, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_ThirdPartyResource(in ThirdPartyResource, out *ThirdPartyResource, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
out.Description = in.Description
if in.Versions != nil {
out.Versions = make([]APIVersion, len(in.Versions))
for i := range in.Versions {
if err := deepCopy_v1_APIVersion(in.Versions[i], &out.Versions[i], c); err != nil {
return err
}
}
} else {
out.Versions = nil
}
return nil
}
func deepCopy_v1_ThirdPartyResourceData(in ThirdPartyResourceData, out *ThirdPartyResourceData, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Data != nil {
out.Data = make([]uint8, len(in.Data))
for i := range in.Data {
out.Data[i] = in.Data[i]
}
} else {
out.Data = nil
}
return nil
}
func deepCopy_v1_ThirdPartyResourceList(in ThirdPartyResourceList, out *ThirdPartyResourceList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ThirdPartyResource, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_ThirdPartyResource(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_TypeMeta(in TypeMeta, out *TypeMeta, c *conversion.Cloner) error {
out.Kind = in.Kind
out.APIVersion = in.APIVersion
return nil
}
func deepCopy_v1_Volume(in Volume, out *Volume, c *conversion.Cloner) error {
out.Name = in.Name
if err := deepCopy_v1_VolumeSource(in.VolumeSource, &out.VolumeSource, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_VolumeMount(in VolumeMount, out *VolumeMount, c *conversion.Cloner) error {
out.Name = in.Name
out.ReadOnly = in.ReadOnly
out.MountPath = in.MountPath
return nil
}
func deepCopy_v1_VolumeSource(in VolumeSource, out *VolumeSource, c *conversion.Cloner) error {
if in.HostPath != nil {
out.HostPath = new(HostPathVolumeSource)
if err := deepCopy_v1_HostPathVolumeSource(*in.HostPath, out.HostPath, c); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.EmptyDir != nil {
out.EmptyDir = new(EmptyDirVolumeSource)
if err := deepCopy_v1_EmptyDirVolumeSource(*in.EmptyDir, out.EmptyDir, c); err != nil {
return err
}
} else {
out.EmptyDir = nil
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(GCEPersistentDiskVolumeSource)
if err := deepCopy_v1_GCEPersistentDiskVolumeSource(*in.GCEPersistentDisk, out.GCEPersistentDisk, c); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(AWSElasticBlockStoreVolumeSource)
if err := deepCopy_v1_AWSElasticBlockStoreVolumeSource(*in.AWSElasticBlockStore, out.AWSElasticBlockStore, c); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.GitRepo != nil {
out.GitRepo = new(GitRepoVolumeSource)
if err := deepCopy_v1_GitRepoVolumeSource(*in.GitRepo, out.GitRepo, c); err != nil {
return err
}
} else {
out.GitRepo = nil
}
if in.Secret != nil {
out.Secret = new(SecretVolumeSource)
if err := deepCopy_v1_SecretVolumeSource(*in.Secret, out.Secret, c); err != nil {
return err
}
} else {
out.Secret = nil
}
if in.NFS != nil {
out.NFS = new(NFSVolumeSource)
if err := deepCopy_v1_NFSVolumeSource(*in.NFS, out.NFS, c); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.ISCSI != nil {
out.ISCSI = new(ISCSIVolumeSource)
if err := deepCopy_v1_ISCSIVolumeSource(*in.ISCSI, out.ISCSI, c); err != nil {
return err
}
} else {
out.ISCSI = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(GlusterfsVolumeSource)
if err := deepCopy_v1_GlusterfsVolumeSource(*in.Glusterfs, out.Glusterfs, c); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.PersistentVolumeClaim != nil {
out.PersistentVolumeClaim = new(PersistentVolumeClaimVolumeSource)
if err := deepCopy_v1_PersistentVolumeClaimVolumeSource(*in.PersistentVolumeClaim, out.PersistentVolumeClaim, c); err != nil {
return err
}
} else {
out.PersistentVolumeClaim = nil
}
if in.RBD != nil {
out.RBD = new(RBDVolumeSource)
if err := deepCopy_v1_RBDVolumeSource(*in.RBD, out.RBD, c); err != nil {
return err
}
} else {
out.RBD = nil
}
return nil
}
func deepCopy_runtime_RawExtension(in runtime.RawExtension, out *runtime.RawExtension, c *conversion.Cloner) error {
if in.RawJSON != nil {
out.RawJSON = make([]uint8, len(in.RawJSON))
for i := range in.RawJSON {
out.RawJSON[i] = in.RawJSON[i]
}
} else {
out.RawJSON = nil
}
return nil
}
func deepCopy_util_IntOrString(in util.IntOrString, out *util.IntOrString, c *conversion.Cloner) error {
out.Kind = in.Kind
out.IntVal = in.IntVal
out.StrVal = in.StrVal
return nil
}
func deepCopy_util_Time(in util.Time, out *util.Time, c *conversion.Cloner) error {
if newVal, err := c.DeepCopy(in.Time); err != nil {
return err
} else {
out.Time = newVal.(time.Time)
}
return nil
}
func init() {
err := api.Scheme.AddGeneratedDeepCopyFuncs(
deepCopy_resource_Quantity,
deepCopy_v1_APIVersion,
deepCopy_v1_AWSElasticBlockStoreVolumeSource,
deepCopy_v1_Binding,
deepCopy_v1_Capabilities,
deepCopy_v1_ComponentCondition,
deepCopy_v1_ComponentStatus,
deepCopy_v1_ComponentStatusList,
deepCopy_v1_Container,
deepCopy_v1_ContainerPort,
deepCopy_v1_ContainerState,
deepCopy_v1_ContainerStateRunning,
deepCopy_v1_ContainerStateTerminated,
deepCopy_v1_ContainerStateWaiting,
deepCopy_v1_ContainerStatus,
deepCopy_v1_Daemon,
deepCopy_v1_DaemonList,
deepCopy_v1_DaemonSpec,
deepCopy_v1_DaemonStatus,
deepCopy_v1_DeleteOptions,
deepCopy_v1_EmptyDirVolumeSource,
deepCopy_v1_EndpointAddress,
deepCopy_v1_EndpointPort,
deepCopy_v1_EndpointSubset,
deepCopy_v1_Endpoints,
deepCopy_v1_EndpointsList,
deepCopy_v1_EnvVar,
deepCopy_v1_EnvVarSource,
deepCopy_v1_Event,
deepCopy_v1_EventList,
deepCopy_v1_EventSource,
deepCopy_v1_ExecAction,
deepCopy_v1_GCEPersistentDiskVolumeSource,
deepCopy_v1_GitRepoVolumeSource,
deepCopy_v1_GlusterfsVolumeSource,
deepCopy_v1_HTTPGetAction,
deepCopy_v1_Handler,
deepCopy_v1_HostPathVolumeSource,
deepCopy_v1_ISCSIVolumeSource,
deepCopy_v1_Lifecycle,
deepCopy_v1_LimitRange,
deepCopy_v1_LimitRangeItem,
deepCopy_v1_LimitRangeList,
deepCopy_v1_LimitRangeSpec,
deepCopy_v1_List,
deepCopy_v1_ListMeta,
deepCopy_v1_ListOptions,
deepCopy_v1_LoadBalancerIngress,
deepCopy_v1_LoadBalancerStatus,
deepCopy_v1_LocalObjectReference,
deepCopy_v1_NFSVolumeSource,
deepCopy_v1_Namespace,
deepCopy_v1_NamespaceList,
deepCopy_v1_NamespaceSpec,
deepCopy_v1_NamespaceStatus,
deepCopy_v1_Node,
deepCopy_v1_NodeAddress,
deepCopy_v1_NodeCondition,
deepCopy_v1_NodeList,
deepCopy_v1_NodeSpec,
deepCopy_v1_NodeStatus,
deepCopy_v1_NodeSystemInfo,
deepCopy_v1_ObjectFieldSelector,
deepCopy_v1_ObjectMeta,
deepCopy_v1_ObjectReference,
deepCopy_v1_PersistentVolume,
deepCopy_v1_PersistentVolumeClaim,
deepCopy_v1_PersistentVolumeClaimList,
deepCopy_v1_PersistentVolumeClaimSpec,
deepCopy_v1_PersistentVolumeClaimStatus,
deepCopy_v1_PersistentVolumeClaimVolumeSource,
deepCopy_v1_PersistentVolumeList,
deepCopy_v1_PersistentVolumeSource,
deepCopy_v1_PersistentVolumeSpec,
deepCopy_v1_PersistentVolumeStatus,
deepCopy_v1_Pod,
deepCopy_v1_PodAttachOptions,
deepCopy_v1_PodCondition,
deepCopy_v1_PodExecOptions,
deepCopy_v1_PodList,
deepCopy_v1_PodLogOptions,
deepCopy_v1_PodProxyOptions,
deepCopy_v1_PodSpec,
deepCopy_v1_PodStatus,
deepCopy_v1_PodStatusResult,
deepCopy_v1_PodTemplate,
deepCopy_v1_PodTemplateList,
deepCopy_v1_PodTemplateSpec,
deepCopy_v1_Probe,
deepCopy_v1_RBDVolumeSource,
deepCopy_v1_RangeAllocation,
deepCopy_v1_ReplicationController,
deepCopy_v1_ReplicationControllerList,
deepCopy_v1_ReplicationControllerSpec,
deepCopy_v1_ReplicationControllerStatus,
deepCopy_v1_ResourceQuota,
deepCopy_v1_ResourceQuotaList,
deepCopy_v1_ResourceQuotaSpec,
deepCopy_v1_ResourceQuotaStatus,
deepCopy_v1_ResourceRequirements,
deepCopy_v1_SELinuxOptions,
deepCopy_v1_Secret,
deepCopy_v1_SecretList,
deepCopy_v1_SecretVolumeSource,
deepCopy_v1_SecurityContext,
deepCopy_v1_SerializedReference,
deepCopy_v1_Service,
deepCopy_v1_ServiceAccount,
deepCopy_v1_ServiceAccountList,
deepCopy_v1_ServiceList,
deepCopy_v1_ServicePort,
deepCopy_v1_ServiceSpec,
deepCopy_v1_ServiceStatus,
deepCopy_v1_Status,
deepCopy_v1_StatusCause,
deepCopy_v1_StatusDetails,
deepCopy_v1_TCPSocketAction,
deepCopy_v1_ThirdPartyResource,
deepCopy_v1_ThirdPartyResourceData,
deepCopy_v1_ThirdPartyResourceList,
deepCopy_v1_TypeMeta,
deepCopy_v1_Volume,
deepCopy_v1_VolumeMount,
deepCopy_v1_VolumeSource,
deepCopy_runtime_RawExtension,
deepCopy_util_IntOrString,
deepCopy_util_Time,
)
if err != nil {
// if one of the deep copy functions is malformed, detect it immediately.
panic(err)
}
}<|fim▁end|> | if err := deepCopy_v1_RBDVolumeSource(*in.RBD, out.RBD, c); err != nil {
return err
} |
<|file_name|>math.rs<|end_file_name|><|fim▁begin|>// Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::{
atomic::{AtomicU32, Ordering},
Mutex,
};
struct MovingAvgU32Inner {
buffer: Vec<u32>,
current_index: usize,
sum: u32,
}
pub struct MovingAvgU32 {
protected: Mutex<MovingAvgU32Inner>,
cached_avg: AtomicU32,
}
impl MovingAvgU32 {
pub fn new(size: usize) -> Self {
MovingAvgU32 {
protected: Mutex::new(MovingAvgU32Inner {
buffer: vec![0; size],
current_index: 0,
sum: 0,
}),
cached_avg: AtomicU32::new(0),
}
}
pub fn add(&self, sample: u32) -> (u32, u32) {
let mut inner = self.protected.lock().unwrap();
let current_index = (inner.current_index + 1) % inner.buffer.len();
inner.current_index = current_index;
let old_avg = inner.sum / inner.buffer.len() as u32;
inner.sum = inner.sum + sample - inner.buffer[current_index];
inner.buffer[current_index] = sample;
let new_avg = inner.sum / inner.buffer.len() as u32;
self.cached_avg.store(new_avg, Ordering::Relaxed);
(old_avg, new_avg)
}
pub fn fetch(&self) -> u32 {
self.cached_avg.load(Ordering::Relaxed)
}
pub fn clear(&self) {
let mut inner = self.protected.lock().unwrap();
inner.buffer.fill(0);
inner.current_index = 0;
inner.sum = 0;
self.cached_avg.store(0, Ordering::Relaxed);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_monotonic_sequence() {
let avg = MovingAvgU32::new(5);
for i in (0..100).rev() {
avg.add(i);
if 100 - i >= 5 {
assert_eq!(avg.fetch(), i + 2);
} else {
assert_eq!(avg.fetch(), ((i + 99) * (100 - i) / 10));
}
}
avg.clear();
for i in 0..100 {
avg.add(i);
if i >= 4 {
assert_eq!(avg.fetch(), i - 2);<|fim▁hole|> }
#[test]
fn test_random_sequence() {
use rand::Rng;
let mut rng = rand::thread_rng();
let avg = MovingAvgU32::new(105);
let mut external_sum = 0;
for _ in 0..100 {
let n: u32 = rng.gen_range(0..u32::MAX / 100);
external_sum += n;
avg.add(n);
assert_eq!(avg.fetch(), external_sum / 105);
}
}
}<|fim▁end|> | } else {
assert_eq!(avg.fetch(), (i * (i + 1) / 10));
}
} |
<|file_name|>weighted.rs<|end_file_name|><|fim▁begin|>//! "Weighted" graph types
use std;
use std::ops::{Deref,DerefMut};
use smallvec::SmallVec;<|fim▁hole|>// Edge
/// Weighted edge type.
#[derive(Debug)]
pub struct Edge<T, I: Id> {
source: I,
target: I,
data: T
}
impl<T, I: Id> Clone for Edge<T, I>
where T: Clone
{
fn clone(&self) -> Self {
Edge{source: self.source, target: self.target, data: self.data.clone()}
}
}
impl<T, I: Id> interface::Edge for Edge<T, I> {
type NodeId = I;
#[inline]
fn endpoints(&self) -> (Self::NodeId, Self::NodeId) {
(self.source, self.target)
}
}
impl<T, I: Id> interface::DirectedEdge for Edge<T, I> {
#[inline]
fn source(&self) -> I { self.source }
#[inline]
fn target(&self) -> I { self.target }
}
impl<T, I: Id> interface::DirectedEdgeMut for Edge<T, I> {
fn rev(&mut self) {
::std::mem::swap(&mut self.source, &mut self.target);
}
}
impl<T, I: Id> Edge<T, I> {
/// Create an edge with the given source & target node indices and
/// weight data.
pub fn new(source: I, target: I, data: T) -> Self {
Edge{source: source, target: target, data: data}
}
/// Retrieve a reference to the edge's data (weight)
pub fn data(&self) -> &T {
&self.data
}
}
impl<T, I: Id> Deref for Edge<T, I> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.data
}
}
impl<T, I: Id> DerefMut for Edge<T, I> {
fn deref_mut(&mut self) -> &mut <Self as Deref>::Target {
&mut self.data
}
}
impl<T, I: Id, I2: Copy> From<(I2, I2)> for Edge<T, I>
where T: Default, I: From<I2> {
fn from(u: (I2, I2)) -> Self {
Self::new(I::from(u.0), I::from(u.1), Default::default())
}
}
impl<T, I: Id, I2: Copy> From<(I2, I2, T)> for Edge<T, I>
where I: From<I2> {
fn from(u: (I2, I2, T)) -> Self {
Self::new(I::from(u.0), I::from(u.1), u.2)
}
}
impl<'a, T, I: Id, I2: Copy> From<&'a (I2, I2)> for Edge<T, I>
where T: Default, I: From<I2> {
fn from(u: &'a (I2, I2)) -> Self {
Self::new(I::from(u.0), I::from(u.1), Default::default())
}
}
impl<'a, T, I: Id, I2: Copy> From<&'a (I2, I2, T)> for Edge<T, I>
where T: Clone, I: From<I2> {
fn from(u: &'a (I2, I2, T)) -> Self {
Self::new(I::from(u.0), I::from(u.1), u.2.clone())
}
}
// ----------------------------------------------------------------
// Node
/// Weighted node implementation.
///
/// A reference to the node's weight data can be obtained using the type's
/// `Deref` implementation.
///
/// ```rust
/// use cripes_core::util::graph::{EdgeId,WeightedNode};
///
/// # fn main() {
/// let n = WeightedNode::<_, EdgeId<u8>>::new(32);
/// assert_eq!(32, *n);
/// # }
/// ```
#[derive(Debug)]
pub struct Node<T, I: Id> {
incoming_edges: SmallVec<[I; 8]>,
outgoing_edges: SmallVec<[I; 8]>,
data: T
}
impl<T, I: Id> Node<T, I> {
/// Instantiate a node with the given data.
pub fn new(data: T) -> Self {
Node{incoming_edges: SmallVec::new(), outgoing_edges: SmallVec::new(), data: data}
}
/// Retrieve a reference to the nodes's data (weight)
pub fn data(&self) -> &T {
&self.data
}
}
impl<T, I: Id> Clone for Node<T, I>
where T: Clone
{
fn clone(&self) -> Self {
Node{incoming_edges: self.incoming_edges.clone(),
outgoing_edges: self.outgoing_edges.clone(),
data: self.data.clone()}
}
}
impl<T, I: Id> Deref for Node<T, I> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.data
}
}
impl<T, I: Id> DerefMut for Node<T, I> {
#[inline]
fn deref_mut(&mut self) -> &mut <Self as Deref>::Target {
&mut self.data
}
}
impl<T, I: Id> From<T> for Node<T, I> {
#[inline]
fn from(data: T) -> Self {
Self::new(data)
}
}
impl<T, I: Id> interface::Node for Node<T, I> {
type EdgeId = I;
fn edges(&self) -> std::iter::Chain<std::slice::Iter<Self::EdgeId>,std::slice::Iter<Self::EdgeId>> {
self.incoming_edges.iter().chain(self.outgoing_edges.iter())
}
}
impl<T, I: Id> interface::DirectedNode for Node<T, I> {
impl_basic_node!(I);
}
impl<T, I: Id> interface::DirectedNodeMut for Node<T, I> {
impl_basic_node_mut!(I);
}<|fim▁end|> |
use super::interface::{self, Id};
// ---------------------------------------------------------------- |
<|file_name|>triggerPhotons.py<|end_file_name|><|fim▁begin|>from matplotlib import rcParams, rc
import numpy as np
import sys
from fitFunctions import gaussian
import scipy.interpolate
import scipy.signal
from baselineIIR import IirFilter
import pickle
import smooth
# common setup for matplotlib
params = {'savefig.dpi': 300, # save figures to 300 dpi
'axes.labelsize': 14,
'text.fontsize': 14,
'legend.fontsize': 14,
'xtick.labelsize': 14,
'ytick.major.pad': 6,
'xtick.major.pad': 6,
'ytick.labelsize': 14}
# use of Sans Serif also in math mode
rc('text.latex', preamble='\usepackage{sfmath}')
rcParams.update(params)
import matplotlib.pyplot as plt
import numpy as np
import os
import struct
def calcThreshold(phase,nSigma=2.5,nSamples=5000):
'''
Calculate the threshold (in phase units) corresponding
to a sigma threshold (note: look at this f'n, seems a bit odd
Appears to define sigma as one-sided lower 95% threshold)
'''
n,bins= np.histogram(phase[:nSamples],bins=100)
n = np.array(n,dtype='float32')/np.sum(n)
tot = np.zeros(len(bins))
for i in xrange(len(bins)):
tot[i] = np.sum(n[:i])
med = bins[np.abs(tot-0.5).argmin()]
thresh = bins[np.abs(tot-0.05).argmin()]
threshold = med-nSigma*abs(med-thresh)
return threshold
def sigmaTrigger(data,nSigmaTrig=7.,deadtime=10):
'''
Find photon pulses using a sigma trigger
INPUTS:
data - phase timestream (filtered or raw)
nSigmaTrig - threshold for photon detection, in units sigma from baseline
deadtime - trigger deadtime in ticks (us)
OUTPUTS:
Dictionary with keys:
peakIndices - indices of detected pulses in phase stream
peakHeights - heights of detected pulses (in same units as input data)
'''
data = np.array(data)
med = np.median(data)
trigMask = data > (med + np.std(data)*nSigmaTrig)
if np.sum(trigMask) > 0:
peakIndices = np.where(trigMask)[0]
i = 0
p = peakIndices[i]
while p < peakIndices[-1]:
peakIndices = peakIndices[np.logical_or(peakIndices-p > deadtime , peakIndices-p <= 0)]#apply deadtime
i+=1
if i < len(peakIndices):
p = peakIndices[i]
else:
p = peakIndices[-1]
else:
return {'peakIndices':np.array([]),'peakHeights':np.array([])}
peakHeights = data[peakIndices]
return {'peakIndices':peakIndices,'peakHeights':peakHeights}
def detectPulses(data,threshold=None,nSigmaThreshold=3.,deadtime=10,nNegDerivChecks=10,negDerivLenience=1,bNegativePulses = True):
#deadtime in ticks (us)
if bNegativePulses:
data = np.array(data)
else:
data = -np.array(data) #flip to negative pulses
if threshold is None:
threshold = np.median(data)-nSigmaThreshold*np.std(data)
derivative = np.diff(data)
peakHeights = []
t = 0
negDeriv = derivative <= 0
posDeriv = np.logical_not(negDeriv)
triggerBooleans = data[nNegDerivChecks:-2] < threshold
negDerivChecksSum = np.zeros(len(negDeriv[0:-nNegDerivChecks-1]))
for i in range(nNegDerivChecks):
negDerivChecksSum += negDeriv[i:i-nNegDerivChecks-1]
peakCondition0 = negDerivChecksSum >= nNegDerivChecks-negDerivLenience
peakCondition1 = np.logical_and(posDeriv[nNegDerivChecks:-1],posDeriv[nNegDerivChecks+1:])
peakCondition01 = np.logical_and(peakCondition0,peakCondition1)
peakBooleans = np.logical_and(triggerBooleans,peakCondition01)
try:
peakIndices = np.where(peakBooleans)[0]+nNegDerivChecks
i = 0
p = peakIndices[i]
while p < peakIndices[-1]:
peakIndices = peakIndices[np.logical_or(peakIndices-p > deadtime , peakIndices-p <= 0)]#apply deadtime
i+=1
if i < len(peakIndices):
p = peakIndices[i]
else:
p = peakIndices[-1]
except IndexError:
return {'peakIndices':np.array([]),'peakHeights':np.array([])}
if bNegativePulses:
peakHeights = data[peakIndices]
else:
peakHeights = -data[peakIndices] #flip back to positive sign
return {'peakIndices':peakIndices,'peakHeights':peakHeights}
def optimizeTrigCond(data, nPeaks, sigmaThreshList=[3.], nNegDerivChecksList=[10], negDerivLenienceList=[1], bNegativePulses=True):
minSigma = 1000
optSigmaThresh = 0
optNNegDerivChecks = 0
optNegDerivLenience = 0
optPeakDict = {'peakIndices':np.array([]), 'peakHeights':np.array([])}
for sigmaThresh in sigmaThreshList:
for nNegDerivChecks in nNegDerivChecksList:
for negDerivLenience in negDerivLenienceList:
peakDict = detectPulses(data, nSigmaThreshold=sigmaThresh, nNegDerivChecks=nNegDerivChecks, negDerivLenience=negDerivLenience, bNegativePulses=bNegativePulses)
if(len(peakDict['peakIndices'])>=nPeaks):
sigma = np.std(peakDict['peakHeights'])
if(sigma<minSigma):
minSigma = sigma
optSigmaThresh = sigmaThresh
optNNegDerivChecks = nNegDerivChecks
optNegDerivLenience = negDerivLenience
optPeakDict = peakDict
return optSigmaThresh, optNNegDerivChecks, optNegDerivLenience, minSigma, optPeakDict
def findSigmaThresh(data, initSigmaThresh=2., tailSlack=0., isPlot=False):
'''
Finds the optimal photon trigger threshold by cutting out the noise tail
in the pulse height histogram.
INPUTS:
data - filtered phase timestream data (positive pulses)<|fim▁hole|> isPlot - make peak height histograms if true
OUTPUTS:
threshold - trigger threshold in same units as data
sigmaThresh - trigger threshold in units sigma from median
'''
peakdict = sigmaTrigger(data, nSigmaTrig=initSigmaThresh)
peaksHist, peaksHistBins = np.histogram(peakdict['peakHeights'], bins='auto')
if(isPlot):
plt.plot(peaksHistBins[:-1], peaksHist)
plt.title('Unsmoothed Plot')
plt.show()
print 'peaksHistLen:', len(peaksHist)
peaksHist = smooth.smooth(peaksHist,(len(peaksHistBins)/20)*2+1)
print 'peaksHistSmoothLen:', len(peaksHist)
if(isPlot):
plt.plot(peaksHistBins[0:len(peaksHist)], peaksHist)
plt.title('smoothed plot')
plt.show()
minima=np.ones(len(peaksHist)) #keeps track of minima locations; element is 1 if minimum exists at that index
minimaCount = 1
#while there are multiple local minima, look for the deepest one
while(np.count_nonzero(minima)>1):
minima = np.logical_and(minima, np.logical_and((peaksHist<=np.roll(peaksHist,minimaCount)),(peaksHist<=np.roll(peaksHist,-minimaCount))))
#print 'minima array:', minima
minima[minimaCount-1]=0
minima[len(minima)-minimaCount]=0 #get rid of boundary effects
minimaCount += 1
thresholdInd = np.where(minima)[0][0]
threshold = peaksHistBins[thresholdInd]-tailSlack
sigmaThresh = (threshold-np.median(data))/np.std(data)
return threshold, sigmaThresh<|fim▁end|> | initSigmaThresh - sigma threshold to use when constructing initial
pulse height histogram
tailSlack - amount (in same units as data) to relax trigger threshold |
<|file_name|>trigger.py<|end_file_name|><|fim▁begin|>from abc import ABCMeta,abstractmethod
from my_hue import *
# Would dynamically choose a trigger based on trigger type
def trigger_factory(trigger_type):
return None
class Trigger(object):
__metaclass__ = ABCMeta
def __init__(self):
self.action()
@abstractmethod
def action(self):
pass
class IClickerTrigger(object):
def __init__(self, clicker_id, response_info, time_of_trigger, sequence_number):
super(IClickerTrigger, self).__init__()
self.clicker_id = clicker_id
self.response_info = response_info
self.time_of_trigger = time_of_trigger
self.sequence_number = sequence_number
def action(self):
print self.response_info<|fim▁hole|> pass<|fim▁end|> | button = 'a'
if button == 'a': |
<|file_name|>UserItem.java<|end_file_name|><|fim▁begin|>package org.iatoki.judgels.jerahmeel.user.item;
public final class UserItem {
private final String userJid;
private final String itemJid;
private final UserItemStatus status;<|fim▁hole|> public UserItem(String userJid, String itemJid, UserItemStatus status) {
this.userJid = userJid;
this.itemJid = itemJid;
this.status = status;
}
public String getUserJid() {
return userJid;
}
public String getItemJid() {
return itemJid;
}
public UserItemStatus getStatus() {
return status;
}
}<|fim▁end|> | |
<|file_name|>os-spawn-example-2.py<|end_file_name|><|fim▁begin|>'''
spawn º¯Êý»¹¿ÉÓÃÓÚÔÚºǫ́ÔËÐÐÒ»¸ö³ÌÐò. ÏÂÀýÖÐ ¸ø run º¯ÊýÌí¼ÓÁËÒ»¸ö¿ÉÑ¡µÄ mode ²ÎÊý;
µ±ÉèÖÃΪ os.P_NOWAIT ʱ, Õâ¸ö½Å±¾²»»áµÈ´ý×Ó³ÌÐò½áÊø, ĬÈÏÖµ os.P_WAIT ʱ spawn »áµÈ´ý×Ó½ø³Ì½áÊø.
ÆäËüµÄ±êÖ¾³£Á¿»¹ÓÐ os.P_OVERLAY ,ËüʹµÃ spawn µÄÐÐΪºÍ exec ÀàËÆ, ÒÔ¼° os.P_DETACH , ËüÔÚºǫ́ÔËÐÐ×Ó½ø³Ì, Ó뵱ǰ¿ØÖÆÌ¨ºÍ¼üÅ̽¹µã¸ôÀë.
'''
import os<|fim▁hole|> # find executable
mode = kw.get("mode", os.P_WAIT)
for path in string.split(os.environ["PATH"], os.pathsep):
file = os.path.join(path, program) + ".exe"
try:
return os.spawnv(mode, file, (file,) + args)
except os.error:
pass
raise os.error, "cannot find executable"
run("python", "hello.py", mode=os.P_NOWAIT)
print "goodbye"<|fim▁end|> | import string
def run(program, *args, **kw): |
<|file_name|>ec2_ami_find.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
#pylint: skip-file
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_ami_find
version_added: 2.0
short_description: Searches for AMIs to obtain the AMI ID and other information
description:
- Returns list of matching AMIs with AMI ID, along with other useful information
- Can search AMIs with different owners
- Can search by matching tag(s), by AMI name and/or other criteria
- Results can be sorted and sliced
author: Tom Bamford
notes:
- This module is not backwards compatible with the previous version of the ec2_search_ami module which worked only for Ubuntu AMIs listed on cloud-images.ubuntu.com.
- See the example below for a suggestion of how to search by distro/release.
options:
region:
description:
- The AWS region to use.
required: true
aliases: [ 'aws_region', 'ec2_region' ]
owner:
description:
- Search AMIs owned by the specified owner
- Can specify an AWS account ID, or one of the special IDs 'self', 'amazon' or 'aws-marketplace'
- If not specified, all EC2 AMIs in the specified region will be searched.
- You can include wildcards in many of the search options. An asterisk (*) matches zero or more characters, and a question mark (?) matches exactly one character. You can escape special characters using a backslash (\) before the character. For example, a value of \*amazon\?\\ searches for the literal string *amazon?\.
required: false
default: null
ami_id:
description:
- An AMI ID to match.
default: null
required: false
ami_tags:
description:
- A hash/dictionary of tags to match for the AMI.
default: null
required: false
architecture:
description:
- An architecture type to match (e.g. x86_64).
default: null
required: false
hypervisor:
description:
- A hypervisor type type to match (e.g. xen).
default: null
required: false
is_public:
description:
- Whether or not the image(s) are public.
choices: ['yes', 'no']
default: null
required: false
name:
description:
- An AMI name to match.
default: null
required: false
platform:
description:
- Platform type to match.
default: null
required: false
sort:
description:
- Optional attribute which with to sort the results.
- If specifying 'tag', the 'tag_name' parameter is required.
choices: ['name', 'description', 'tag']
default: null
required: false
sort_tag:
description:
- Tag name with which to sort results.
- Required when specifying 'sort=tag'.
default: null
required: false
sort_order:
description:
- Order in which to sort results.
- Only used when the 'sort' parameter is specified.
choices: ['ascending', 'descending']
default: 'ascending'
required: false
sort_start:
description:
- Which result to start with (when sorting).
- Corresponds to Python slice notation.
default: null
required: false
sort_end:
description:
- Which result to end with (when sorting).
- Corresponds to Python slice notation.
default: null
required: false
state:
description:
- AMI state to match.
default: 'available'
required: false
virtualization_type:
description:
- Virtualization type to match (e.g. hvm).
default: null
required: false
no_result_action:
description:
- What to do when no results are found.
- "'success' reports success and returns an empty array"
- "'fail' causes the module to report failure"
choices: ['success', 'fail']
default: 'success'
required: false
requirements:
- boto
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Search for the AMI tagged "project:website"
- ec2_ami_find:
owner: self
tags:
project: website
no_result_action: fail
register: ami_find
# Search for the latest Ubuntu 14.04 AMI
- ec2_ami_find:
name: "ubuntu/images/ebs/ubuntu-trusty-14.04-amd64-server-*"
owner: 099720109477
sort: name
sort_order: descending
sort_end: 1
register: ami_find
# Launch an EC2 instance
- ec2:
image: "{{ ami_search.results[0].ami_id }}"
instance_type: m4.medium
key_name: mykey
wait: yes
'''
try:
import boto.ec2
HAS_BOTO=True
except ImportError:
HAS_BOTO=False
import json
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
region = dict(required=True,
aliases = ['aws_region', 'ec2_region']),
owner = dict(required=False, default=None),
ami_id = dict(required=False),
ami_tags = dict(required=False, type='dict',
aliases = ['search_tags', 'image_tags']),
architecture = dict(required=False),
hypervisor = dict(required=False),
is_public = dict(required=False),
name = dict(required=False),
platform = dict(required=False),
sort = dict(required=False, default=None,
choices=['name', 'description', 'tag']),
sort_tag = dict(required=False),
sort_order = dict(required=False, default='ascending',
choices=['ascending', 'descending']),
sort_start = dict(required=False),
sort_end = dict(required=False),
state = dict(required=False, default='available'),
virtualization_type = dict(required=False),
no_result_action = dict(required=False, default='success',
choices = ['success', 'fail']),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
)
if not HAS_BOTO:<|fim▁hole|> ami_id = module.params.get('ami_id')
ami_tags = module.params.get('ami_tags')
architecture = module.params.get('architecture')
hypervisor = module.params.get('hypervisor')
is_public = module.params.get('is_public')
name = module.params.get('name')
owner = module.params.get('owner')
platform = module.params.get('platform')
sort = module.params.get('sort')
sort_tag = module.params.get('sort_tag')
sort_order = module.params.get('sort_order')
sort_start = module.params.get('sort_start')
sort_end = module.params.get('sort_end')
state = module.params.get('state')
virtualization_type = module.params.get('virtualization_type')
no_result_action = module.params.get('no_result_action')
filter = {'state': state}
if ami_id:
filter['image_id'] = ami_id
if ami_tags:
for tag in ami_tags:
filter['tag:'+tag] = ami_tags[tag]
if architecture:
filter['architecture'] = architecture
if hypervisor:
filter['hypervisor'] = hypervisor
if is_public:
filter['is_public'] = is_public
if name:
filter['name'] = name
if platform:
filter['platform'] = platform
if virtualization_type:
filter['virtualization_type'] = virtualization_type
ec2 = ec2_connect(module)
images_result = ec2.get_all_images(owners=owner, filters=filter)
if no_result_action == 'fail' and len(images_result) == 0:
module.fail_json(msg="No AMIs matched the attributes: %s" % json.dumps(filter))
results = []
for image in images_result:
data = {
'ami_id': image.id,
'architecture': image.architecture,
'description': image.description,
'is_public': image.is_public,
'name': image.name,
'owner_id': image.owner_id,
'platform': image.platform,
'root_device_name': image.root_device_name,
'root_device_type': image.root_device_type,
'state': image.state,
'tags': image.tags,
'virtualization_type': image.virtualization_type,
}
if image.kernel_id:
data['kernel_id'] = image.kernel_id
if image.ramdisk_id:
data['ramdisk_id'] = image.ramdisk_id
results.append(data)
if sort == 'tag':
if not sort_tag:
module.fail_json(msg="'sort_tag' option must be given with 'sort=tag'")
results.sort(key=lambda e: e['tags'][sort_tag], reverse=(sort_order=='descending'))
elif sort:
results.sort(key=lambda e: e[sort], reverse=(sort_order=='descending'))
try:
if sort and sort_start and sort_end:
results = results[int(sort_start):int(sort_end)]
elif sort and sort_start:
results = results[int(sort_start):]
elif sort and sort_end:
results = results[:int(sort_end)]
except TypeError:
module.fail_json(msg="Please supply numeric values for sort_start and/or sort_end")
module.exit_json(results=results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()<|fim▁end|> | module.fail_json(msg='boto required for this module, install via pip or your package manager')
|
<|file_name|>crazy.py<|end_file_name|><|fim▁begin|>import random
# CoRe
def turn(board, symbol):
while 1:
x = random.choice(range(8))
y = random.choice(range(8))<|fim▁hole|> if getboard(board,x,y) == '#': return (x,y)<|fim▁end|> | |
<|file_name|>ProspectiveLineItem.java<|end_file_name|><|fim▁begin|>// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* ProspectiveLineItem.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202102;
/**
* Represents a prospective line item to be forecasted.
*/
public class ProspectiveLineItem implements java.io.Serializable {
/* The target of the forecast. If {@link LineItem#id} is null
* or no line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a line item already exists with
* {@link LineItem#id}, the forecast is
* computed for the subject, predicting what would happen
* if the existing line item's settings
* were modified to match the subject. */
private com.google.api.ads.admanager.axis.v202102.LineItem lineItem;
/* The target of the forecast if this prospective line item is
* a proposal line item.
*
* <p>If {@link ProposalLineItem#id} is null or no proposal
* line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a proposal line item already exists
* with {@link ProposalLineItem#id},
* the forecast is computed for the subject, predicting
* what would happen if the existing proposal
* line item's settings were modified to match the subject.
*
* <p>A proposal line item can optionally correspond
* to an order {@link LineItem}, in which case,
* by forecasting a proposal line item, the corresponding
* line item is implicitly ignored in the
* forecasting.
*
* <p>Either {@link #lineItem} or {@link #proposalLineItem}
* should be specified but not both. */
private com.google.api.ads.admanager.axis.v202102.ProposalLineItem proposalLineItem;
/* When set, the line item is assumed to be from this advertiser,
* and unified blocking rules will
* apply accordingly. If absent, line items without an
* existing order won't be subject to unified
* blocking rules. */
private java.lang.Long advertiserId;
public ProspectiveLineItem() {
}
public ProspectiveLineItem(
com.google.api.ads.admanager.axis.v202102.LineItem lineItem,
com.google.api.ads.admanager.axis.v202102.ProposalLineItem proposalLineItem,
java.lang.Long advertiserId) {
this.lineItem = lineItem;
this.proposalLineItem = proposalLineItem;
this.advertiserId = advertiserId;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("advertiserId", getAdvertiserId())
.add("lineItem", getLineItem())
.add("proposalLineItem", getProposalLineItem())
.toString();
}
/**
* Gets the lineItem value for this ProspectiveLineItem.
*
* @return lineItem * The target of the forecast. If {@link LineItem#id} is null
* or no line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a line item already exists with
* {@link LineItem#id}, the forecast is<|fim▁hole|> * if the existing line item's settings
* were modified to match the subject.
*/
public com.google.api.ads.admanager.axis.v202102.LineItem getLineItem() {
return lineItem;
}
/**
* Sets the lineItem value for this ProspectiveLineItem.
*
* @param lineItem * The target of the forecast. If {@link LineItem#id} is null
* or no line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a line item already exists with
* {@link LineItem#id}, the forecast is
* computed for the subject, predicting what would happen
* if the existing line item's settings
* were modified to match the subject.
*/
public void setLineItem(com.google.api.ads.admanager.axis.v202102.LineItem lineItem) {
this.lineItem = lineItem;
}
/**
* Gets the proposalLineItem value for this ProspectiveLineItem.
*
* @return proposalLineItem * The target of the forecast if this prospective line item is
* a proposal line item.
*
* <p>If {@link ProposalLineItem#id} is null or no proposal
* line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a proposal line item already exists
* with {@link ProposalLineItem#id},
* the forecast is computed for the subject, predicting
* what would happen if the existing proposal
* line item's settings were modified to match the subject.
*
* <p>A proposal line item can optionally correspond
* to an order {@link LineItem}, in which case,
* by forecasting a proposal line item, the corresponding
* line item is implicitly ignored in the
* forecasting.
*
* <p>Either {@link #lineItem} or {@link #proposalLineItem}
* should be specified but not both.
*/
public com.google.api.ads.admanager.axis.v202102.ProposalLineItem getProposalLineItem() {
return proposalLineItem;
}
/**
* Sets the proposalLineItem value for this ProspectiveLineItem.
*
* @param proposalLineItem * The target of the forecast if this prospective line item is
* a proposal line item.
*
* <p>If {@link ProposalLineItem#id} is null or no proposal
* line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a proposal line item already exists
* with {@link ProposalLineItem#id},
* the forecast is computed for the subject, predicting
* what would happen if the existing proposal
* line item's settings were modified to match the subject.
*
* <p>A proposal line item can optionally correspond
* to an order {@link LineItem}, in which case,
* by forecasting a proposal line item, the corresponding
* line item is implicitly ignored in the
* forecasting.
*
* <p>Either {@link #lineItem} or {@link #proposalLineItem}
* should be specified but not both.
*/
public void setProposalLineItem(com.google.api.ads.admanager.axis.v202102.ProposalLineItem proposalLineItem) {
this.proposalLineItem = proposalLineItem;
}
/**
* Gets the advertiserId value for this ProspectiveLineItem.
*
* @return advertiserId * When set, the line item is assumed to be from this advertiser,
* and unified blocking rules will
* apply accordingly. If absent, line items without an
* existing order won't be subject to unified
* blocking rules.
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Sets the advertiserId value for this ProspectiveLineItem.
*
* @param advertiserId * When set, the line item is assumed to be from this advertiser,
* and unified blocking rules will
* apply accordingly. If absent, line items without an
* existing order won't be subject to unified
* blocking rules.
*/
public void setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof ProspectiveLineItem)) return false;
ProspectiveLineItem other = (ProspectiveLineItem) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.lineItem==null && other.getLineItem()==null) ||
(this.lineItem!=null &&
this.lineItem.equals(other.getLineItem()))) &&
((this.proposalLineItem==null && other.getProposalLineItem()==null) ||
(this.proposalLineItem!=null &&
this.proposalLineItem.equals(other.getProposalLineItem()))) &&
((this.advertiserId==null && other.getAdvertiserId()==null) ||
(this.advertiserId!=null &&
this.advertiserId.equals(other.getAdvertiserId())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getLineItem() != null) {
_hashCode += getLineItem().hashCode();
}
if (getProposalLineItem() != null) {
_hashCode += getProposalLineItem().hashCode();
}
if (getAdvertiserId() != null) {
_hashCode += getAdvertiserId().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(ProspectiveLineItem.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "ProspectiveLineItem"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("lineItem");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "lineItem"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "LineItem"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("proposalLineItem");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "proposalLineItem"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "ProposalLineItem"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("advertiserId");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "advertiserId"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}<|fim▁end|> | * computed for the subject, predicting what would happen |
<|file_name|>transport-negotiation-request.component.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2020
* SRDC - Software Research & Development Consultancy; Ankara; Turkey
In collaboration with
* SRFG - Salzburg Research Forschungsgesellschaft mbH; Salzburg; Austria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import { Component, Input, OnInit} from '@angular/core';
import { Location } from "@angular/common";
import { Router } from "@angular/router";
import { CookieService } from "ng2-cookies";
import { BPDataService } from "../bp-data-service";
import { CallStatus } from "../../../common/call-status";
import { RequestForQuotation } from "../../../catalogue/model/publish/request-for-quotation";
import { INCOTERMS, PAYMENT_MEANS, CURRENCIES } from "../../../catalogue/model/constants";
import { UBLModelUtils } from "../../../catalogue/model/ubl-model-utils";
import { copy } from "../../../common/utils";
import { PaymentTermsWrapper } from "../payment-terms-wrapper";
import { UserService } from "../../../user-mgmt/user.service";
import { CustomerParty } from "../../../catalogue/model/publish/customer-party";
import { SupplierParty } from "../../../catalogue/model/publish/supplier-party";
import { BPEService } from "../../bpe.service";
import { ThreadEventMetadata } from '../../../catalogue/model/publish/thread-event-metadata';
import { DiscountPriceWrapper } from "../../../common/discount-price-wrapper";
import { Text } from '../../../catalogue/model/publish/text';
import { TranslateService } from '@ngx-translate/core';
import { DocumentService } from "../document-service";
import {ValidationService} from '../../../common/validation/validators';
import {FormGroup} from '@angular/forms';
@Component({
selector: "transport-negotiation-request",
templateUrl: "./transport-negotiation-request.component.html"
})
export class TransportNegotiationRequestComponent implements OnInit {
rfq: RequestForQuotation;
selectedTab: string = "OVERVIEW";
rfqPrice: DiscountPriceWrapper;
rfqPaymentTerms: PaymentTermsWrapper;
updatingProcess: boolean = false;
callStatus: CallStatus = new CallStatus();
INCOTERMS: string[] = INCOTERMS;
PAYMENT_MEANS: string[] = PAYMENT_MEANS;
PAYMENT_TERMS: string[] = UBLModelUtils.getDefaultPaymentTermsAsStrings();
CURRENCIES: string[] = CURRENCIES;
deliverytermsOfBuyer = null;
// the copy of ThreadEventMetadata of the current business process
processMetadata: ThreadEventMetadata;
// this component is used for both transport and logistics service negotiation
// however, we need to know the type of service since some tabs are displayed only for transport services
@Input() isTransportService: boolean;
// save the default delivery period unit so that we can understand whether the delivery period is updated by the buyer or not
defaultDeliveryPeriodUnit: string = null;
// negotiation request form
negotiationRequestForm: FormGroup = new FormGroup({});
constructor(private bpDataService: BPDataService,
private bpeService: BPEService,
private documentService: DocumentService,
private cookieService: CookieService,
private userService: UserService,
private validationService: ValidationService,
private location: Location,
private translate: TranslateService,
private router: Router) {
}
ngOnInit() {
// Normally, this view is not displayed if the bpDataService.requestForQuotation is null.
// However, it should also be checked here also for the second and later iterations of the negotiation business process.
// In those cases, the negotiation component is not initialized again but only this component.
if (this.bpDataService.requestForQuotation == null) {
this.bpDataService.initRfqWithQuotation();
}
// get copy of ThreadEventMetadata of the current business process
this.processMetadata = this.bpDataService.bpActivityEvent.processMetadata;
this.userService.getSettingsForParty(this.cookieService.get("company_id")).then(res => {
this.deliverytermsOfBuyer = res.tradeDetails.deliveryTerms;
});
this.rfq = this.bpDataService.requestForQuotation;
// for logistics services except transport services, onyl Negotiation tab is available
if (!this.isTransportService) {
this.selectedTab = "NEGOTIATION";
}
this.validateRfq();
this.rfqPrice = new DiscountPriceWrapper(
this.rfq.requestForQuotationLine[0].lineItem.price,
this.rfq.requestForQuotationLine[0].lineItem.price,
this.bpDataService.getCatalogueLine().requiredItemLocationQuantity.applicableTaxCategory[0].percent);
//this.rfqPrice.quotationLinePriceWrapper = new ItemPriceWrapper(this.rfq.requestForQuotationLine[0].lineItem.price);
this.rfqPaymentTerms = new PaymentTermsWrapper(this.rfq.requestForQuotationLine[0].lineItem.paymentTerms);
if (this.processMetadata && this.processMetadata.isBeingUpdated) {
this.updatingProcess = true;
}
// set the default delivery period unit
this.defaultDeliveryPeriodUnit = this.rfq.requestForQuotationLine[0].lineItem.delivery[0].requestedDeliveryPeriod.durationMeasure.unitCode;
}
// be sure that rfq has all necessary fields to start a bp
validateRfq() {
// special terms
if (this.rfq.requestForQuotationLine[0].lineItem.deliveryTerms.specialTerms == null || this.rfq.requestForQuotationLine[0].lineItem.deliveryTerms.specialTerms.length == 0) {
this.rfq.requestForQuotationLine[0].lineItem.deliveryTerms.specialTerms.push(new Text(""));
}
}
isDisabled(): boolean {
return this.isWaitingForReply() || this.callStatus.fb_submitted;
}
isWaitingForReply(): boolean {
return !!this.processMetadata && !this.processMetadata.isBeingUpdated;
}
onSelectTab(event: any, id: any): void {
event.preventDefault();
this.selectedTab = id;
}
onBack(): void {
this.location.back();
}
// check whether the required fields for transport service details are filled or not
isTransportServiceDetailsValid() {
// no need to check transport service details for logistics services which are not transport services
if (!this.isTransportService) {
return true;
}
return this.negotiationRequestForm.valid;
}
onSendRequest(): void {
// send request for quotation
this.callStatus.submit();
let rfq: RequestForQuotation = copy(this.bpDataService.requestForQuotation);
let sellerId: string;
let sellerFederationId: string;
// final check on the rfq
if (this.bpDataService.modifiedCatalogueLines) {
sellerId = UBLModelUtils.getPartyId(this.bpDataService.modifiedCatalogueLines[0].goodsItem.item.manufacturerParty);
sellerFederationId = this.bpDataService.modifiedCatalogueLines[0].goodsItem.item.manufacturerParty.federationInstanceID;
}
else {
sellerId = UBLModelUtils.getPartyId(this.bpDataService.getCatalogueLine().goodsItem.item.manufacturerParty);
sellerFederationId = this.bpDataService.getCatalogueLine().goodsItem.item.manufacturerParty.federationInstanceID;
}
//first initialize the seller and buyer parties.
//once they are fetched continue with starting the ordering process
const buyerId: string = this.cookieService.get("company_id");
Promise.all([
this.userService.getParty(buyerId),
this.userService.getParty(sellerId, sellerFederationId)
])
.then(([buyerParty, sellerParty]) => {
rfq.buyerCustomerParty = new CustomerParty(buyerParty);
rfq.sellerSupplierParty = new SupplierParty(sellerParty);
return this.bpeService.startProcessWithDocument(rfq, sellerParty.federationInstanceID);
})
.then(() => {
this.callStatus.callback("Terms sent", true);
let tab = 'PURCHASES';
if (this.bpDataService.bpActivityEvent.userRole == "seller")
tab = "SALES";
this.router.navigate(['dashboard'], { queryParams: { tab: tab, ins: rfq.sellerSupplierParty.party.federationInstanceID } });
})
.catch(error => {
this.callStatus.error("Failed to send Terms", error);
});
}
onUpdateRequest(): void {
this.callStatus.submit();
let rfq: RequestForQuotation = copy(this.bpDataService.requestForQuotation);
this.bpeService.updateBusinessProcess(JSON.stringify(rfq), "REQUESTFORQUOTATION", this.processMetadata.processInstanceId, this.processMetadata.sellerFederationId)
.then(() => {
this.documentService.updateCachedDocument(rfq.id, rfq);
this.callStatus.callback("Terms updated", true);
let tab = 'PURCHASES';
if (this.bpDataService.bpActivityEvent.userRole == "seller")
tab = "SALES";
this.router.navigate(['dashboard'], { queryParams: { tab: tab } });
})
.catch(error => {
this.callStatus.error("Failed to update Terms", error);
});
}
isTermUpdated(term: string): boolean {
switch (term) {
case "price":
return (this.rfqPrice.itemPrice.value != null && this.rfqPrice.itemPrice.value != 0) || this.rfqPrice.itemPrice.currency != this.CURRENCIES[0];
case "payment-means":
return this.PAYMENT_MEANS[0] != this.rfq.requestForQuotationLine[0].lineItem.paymentMeans.paymentMeansCode.value;
case "payment-terms":
return this.rfqPaymentTerms.getDefaultPaymentTerms() != this.rfqPaymentTerms.paymentTerm;
case "incoterms":
return this.rfq.requestForQuotationLine[0].lineItem.deliveryTerms.incoterms != null && this.rfq.requestForQuotationLine[0].lineItem.deliveryTerms.incoterms != "";<|fim▁hole|> return this.rfq.requestForQuotationLine[0].lineItem.deliveryTerms.specialTerms[0].value != null && this.rfq.requestForQuotationLine[0].lineItem.deliveryTerms.specialTerms[0].value != "";
case "delivery-period":
return this.rfq.requestForQuotationLine[0].lineItem.delivery[0].requestedDeliveryPeriod.durationMeasure.unitCode != this.defaultDeliveryPeriodUnit || this.rfq.requestForQuotationLine[0].lineItem.delivery[0].requestedDeliveryPeriod.durationMeasure.value != null;
case "pick-up":
return this.rfq.delivery.requestedDeliveryPeriod.startDate != null && this.rfq.delivery.requestedDeliveryPeriod.startDate != "";
case "drop-off":
return this.rfq.delivery.requestedDeliveryPeriod.endDate != null && this.rfq.delivery.requestedDeliveryPeriod.endDate != "";
case "notes":
return UBLModelUtils.areNotesOrFilesAttachedToDocument(this.rfq);
default:
return true;
}
}
getValidationError(): string {
return this.validationService.extractErrorMessage(this.negotiationRequestForm);
}
}<|fim▁end|> | case "special-terms": |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::F4R1 {
#[doc = r" Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct FB0R {
bits: bool,
}
impl FB0R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB1R {
bits: bool,
}
impl FB1R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB2R {
bits: bool,
}
impl FB2R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB3R {
bits: bool,
}
impl FB3R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB4R {
bits: bool,
}
impl FB4R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]<|fim▁hole|> }
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB5R {
bits: bool,
}
impl FB5R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB6R {
bits: bool,
}
impl FB6R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB7R {
bits: bool,
}
impl FB7R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB8R {
bits: bool,
}
impl FB8R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB9R {
bits: bool,
}
impl FB9R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB10R {
bits: bool,
}
impl FB10R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB11R {
bits: bool,
}
impl FB11R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB12R {
bits: bool,
}
impl FB12R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB13R {
bits: bool,
}
impl FB13R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB14R {
bits: bool,
}
impl FB14R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB15R {
bits: bool,
}
impl FB15R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB16R {
bits: bool,
}
impl FB16R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB17R {
bits: bool,
}
impl FB17R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB18R {
bits: bool,
}
impl FB18R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB19R {
bits: bool,
}
impl FB19R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB20R {
bits: bool,
}
impl FB20R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB21R {
bits: bool,
}
impl FB21R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB22R {
bits: bool,
}
impl FB22R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB23R {
bits: bool,
}
impl FB23R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB24R {
bits: bool,
}
impl FB24R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB25R {
bits: bool,
}
impl FB25R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB26R {
bits: bool,
}
impl FB26R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB27R {
bits: bool,
}
impl FB27R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB28R {
bits: bool,
}
impl FB28R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB29R {
bits: bool,
}
impl FB29R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB30R {
bits: bool,
}
impl FB30R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB31R {
bits: bool,
}
impl FB31R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _FB0W<'a> {
w: &'a mut W,
}
impl<'a> _FB0W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB1W<'a> {
w: &'a mut W,
}
impl<'a> _FB1W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB2W<'a> {
w: &'a mut W,
}
impl<'a> _FB2W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB3W<'a> {
w: &'a mut W,
}
impl<'a> _FB3W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB4W<'a> {
w: &'a mut W,
}
impl<'a> _FB4W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB5W<'a> {
w: &'a mut W,
}
impl<'a> _FB5W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB6W<'a> {
w: &'a mut W,
}
impl<'a> _FB6W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB7W<'a> {
w: &'a mut W,
}
impl<'a> _FB7W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB8W<'a> {
w: &'a mut W,
}
impl<'a> _FB8W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB9W<'a> {
w: &'a mut W,
}
impl<'a> _FB9W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB10W<'a> {
w: &'a mut W,
}
impl<'a> _FB10W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB11W<'a> {
w: &'a mut W,
}
impl<'a> _FB11W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 11;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB12W<'a> {
w: &'a mut W,
}
impl<'a> _FB12W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 12;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB13W<'a> {
w: &'a mut W,
}
impl<'a> _FB13W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 13;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB14W<'a> {
w: &'a mut W,
}
impl<'a> _FB14W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 14;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB15W<'a> {
w: &'a mut W,
}
impl<'a> _FB15W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 15;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB16W<'a> {
w: &'a mut W,
}
impl<'a> _FB16W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB17W<'a> {
w: &'a mut W,
}
impl<'a> _FB17W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 17;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB18W<'a> {
w: &'a mut W,
}
impl<'a> _FB18W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB19W<'a> {
w: &'a mut W,
}
impl<'a> _FB19W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 19;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB20W<'a> {
w: &'a mut W,
}
impl<'a> _FB20W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 20;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB21W<'a> {
w: &'a mut W,
}
impl<'a> _FB21W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 21;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB22W<'a> {
w: &'a mut W,
}
impl<'a> _FB22W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 22;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB23W<'a> {
w: &'a mut W,
}
impl<'a> _FB23W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 23;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB24W<'a> {
w: &'a mut W,
}
impl<'a> _FB24W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB25W<'a> {
w: &'a mut W,
}
impl<'a> _FB25W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 25;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB26W<'a> {
w: &'a mut W,
}
impl<'a> _FB26W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 26;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB27W<'a> {
w: &'a mut W,
}
impl<'a> _FB27W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 27;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB28W<'a> {
w: &'a mut W,
}
impl<'a> _FB28W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 28;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB29W<'a> {
w: &'a mut W,
}
impl<'a> _FB29W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 29;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB30W<'a> {
w: &'a mut W,
}
impl<'a> _FB30W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 30;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB31W<'a> {
w: &'a mut W,
}
impl<'a> _FB31W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 31;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Filter bits"]
#[inline(always)]
pub fn fb0(&self) -> FB0R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB0R { bits }
}
#[doc = "Bit 1 - Filter bits"]
#[inline(always)]
pub fn fb1(&self) -> FB1R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB1R { bits }
}
#[doc = "Bit 2 - Filter bits"]
#[inline(always)]
pub fn fb2(&self) -> FB2R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB2R { bits }
}
#[doc = "Bit 3 - Filter bits"]
#[inline(always)]
pub fn fb3(&self) -> FB3R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB3R { bits }
}
#[doc = "Bit 4 - Filter bits"]
#[inline(always)]
pub fn fb4(&self) -> FB4R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB4R { bits }
}
#[doc = "Bit 5 - Filter bits"]
#[inline(always)]
pub fn fb5(&self) -> FB5R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB5R { bits }
}
#[doc = "Bit 6 - Filter bits"]
#[inline(always)]
pub fn fb6(&self) -> FB6R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB6R { bits }
}
#[doc = "Bit 7 - Filter bits"]
#[inline(always)]
pub fn fb7(&self) -> FB7R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB7R { bits }
}
#[doc = "Bit 8 - Filter bits"]
#[inline(always)]
pub fn fb8(&self) -> FB8R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB8R { bits }
}
#[doc = "Bit 9 - Filter bits"]
#[inline(always)]
pub fn fb9(&self) -> FB9R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB9R { bits }
}
#[doc = "Bit 10 - Filter bits"]
#[inline(always)]
pub fn fb10(&self) -> FB10R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB10R { bits }
}
#[doc = "Bit 11 - Filter bits"]
#[inline(always)]
pub fn fb11(&self) -> FB11R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB11R { bits }
}
#[doc = "Bit 12 - Filter bits"]
#[inline(always)]
pub fn fb12(&self) -> FB12R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB12R { bits }
}
#[doc = "Bit 13 - Filter bits"]
#[inline(always)]
pub fn fb13(&self) -> FB13R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 13;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB13R { bits }
}
#[doc = "Bit 14 - Filter bits"]
#[inline(always)]
pub fn fb14(&self) -> FB14R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB14R { bits }
}
#[doc = "Bit 15 - Filter bits"]
#[inline(always)]
pub fn fb15(&self) -> FB15R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB15R { bits }
}
#[doc = "Bit 16 - Filter bits"]
#[inline(always)]
pub fn fb16(&self) -> FB16R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB16R { bits }
}
#[doc = "Bit 17 - Filter bits"]
#[inline(always)]
pub fn fb17(&self) -> FB17R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB17R { bits }
}
#[doc = "Bit 18 - Filter bits"]
#[inline(always)]
pub fn fb18(&self) -> FB18R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB18R { bits }
}
#[doc = "Bit 19 - Filter bits"]
#[inline(always)]
pub fn fb19(&self) -> FB19R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 19;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB19R { bits }
}
#[doc = "Bit 20 - Filter bits"]
#[inline(always)]
pub fn fb20(&self) -> FB20R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 20;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB20R { bits }
}
#[doc = "Bit 21 - Filter bits"]
#[inline(always)]
pub fn fb21(&self) -> FB21R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 21;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB21R { bits }
}
#[doc = "Bit 22 - Filter bits"]
#[inline(always)]
pub fn fb22(&self) -> FB22R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 22;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB22R { bits }
}
#[doc = "Bit 23 - Filter bits"]
#[inline(always)]
pub fn fb23(&self) -> FB23R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 23;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB23R { bits }
}
#[doc = "Bit 24 - Filter bits"]
#[inline(always)]
pub fn fb24(&self) -> FB24R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB24R { bits }
}
#[doc = "Bit 25 - Filter bits"]
#[inline(always)]
pub fn fb25(&self) -> FB25R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 25;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB25R { bits }
}
#[doc = "Bit 26 - Filter bits"]
#[inline(always)]
pub fn fb26(&self) -> FB26R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 26;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB26R { bits }
}
#[doc = "Bit 27 - Filter bits"]
#[inline(always)]
pub fn fb27(&self) -> FB27R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 27;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB27R { bits }
}
#[doc = "Bit 28 - Filter bits"]
#[inline(always)]
pub fn fb28(&self) -> FB28R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 28;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB28R { bits }
}
#[doc = "Bit 29 - Filter bits"]
#[inline(always)]
pub fn fb29(&self) -> FB29R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 29;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB29R { bits }
}
#[doc = "Bit 30 - Filter bits"]
#[inline(always)]
pub fn fb30(&self) -> FB30R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 30;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB30R { bits }
}
#[doc = "Bit 31 - Filter bits"]
#[inline(always)]
pub fn fb31(&self) -> FB31R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 31;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB31R { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline(always)]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Filter bits"]
#[inline(always)]
pub fn fb0(&mut self) -> _FB0W {
_FB0W { w: self }
}
#[doc = "Bit 1 - Filter bits"]
#[inline(always)]
pub fn fb1(&mut self) -> _FB1W {
_FB1W { w: self }
}
#[doc = "Bit 2 - Filter bits"]
#[inline(always)]
pub fn fb2(&mut self) -> _FB2W {
_FB2W { w: self }
}
#[doc = "Bit 3 - Filter bits"]
#[inline(always)]
pub fn fb3(&mut self) -> _FB3W {
_FB3W { w: self }
}
#[doc = "Bit 4 - Filter bits"]
#[inline(always)]
pub fn fb4(&mut self) -> _FB4W {
_FB4W { w: self }
}
#[doc = "Bit 5 - Filter bits"]
#[inline(always)]
pub fn fb5(&mut self) -> _FB5W {
_FB5W { w: self }
}
#[doc = "Bit 6 - Filter bits"]
#[inline(always)]
pub fn fb6(&mut self) -> _FB6W {
_FB6W { w: self }
}
#[doc = "Bit 7 - Filter bits"]
#[inline(always)]
pub fn fb7(&mut self) -> _FB7W {
_FB7W { w: self }
}
#[doc = "Bit 8 - Filter bits"]
#[inline(always)]
pub fn fb8(&mut self) -> _FB8W {
_FB8W { w: self }
}
#[doc = "Bit 9 - Filter bits"]
#[inline(always)]
pub fn fb9(&mut self) -> _FB9W {
_FB9W { w: self }
}
#[doc = "Bit 10 - Filter bits"]
#[inline(always)]
pub fn fb10(&mut self) -> _FB10W {
_FB10W { w: self }
}
#[doc = "Bit 11 - Filter bits"]
#[inline(always)]
pub fn fb11(&mut self) -> _FB11W {
_FB11W { w: self }
}
#[doc = "Bit 12 - Filter bits"]
#[inline(always)]
pub fn fb12(&mut self) -> _FB12W {
_FB12W { w: self }
}
#[doc = "Bit 13 - Filter bits"]
#[inline(always)]
pub fn fb13(&mut self) -> _FB13W {
_FB13W { w: self }
}
#[doc = "Bit 14 - Filter bits"]
#[inline(always)]
pub fn fb14(&mut self) -> _FB14W {
_FB14W { w: self }
}
#[doc = "Bit 15 - Filter bits"]
#[inline(always)]
pub fn fb15(&mut self) -> _FB15W {
_FB15W { w: self }
}
#[doc = "Bit 16 - Filter bits"]
#[inline(always)]
pub fn fb16(&mut self) -> _FB16W {
_FB16W { w: self }
}
#[doc = "Bit 17 - Filter bits"]
#[inline(always)]
pub fn fb17(&mut self) -> _FB17W {
_FB17W { w: self }
}
#[doc = "Bit 18 - Filter bits"]
#[inline(always)]
pub fn fb18(&mut self) -> _FB18W {
_FB18W { w: self }
}
#[doc = "Bit 19 - Filter bits"]
#[inline(always)]
pub fn fb19(&mut self) -> _FB19W {
_FB19W { w: self }
}
#[doc = "Bit 20 - Filter bits"]
#[inline(always)]
pub fn fb20(&mut self) -> _FB20W {
_FB20W { w: self }
}
#[doc = "Bit 21 - Filter bits"]
#[inline(always)]
pub fn fb21(&mut self) -> _FB21W {
_FB21W { w: self }
}
#[doc = "Bit 22 - Filter bits"]
#[inline(always)]
pub fn fb22(&mut self) -> _FB22W {
_FB22W { w: self }
}
#[doc = "Bit 23 - Filter bits"]
#[inline(always)]
pub fn fb23(&mut self) -> _FB23W {
_FB23W { w: self }
}
#[doc = "Bit 24 - Filter bits"]
#[inline(always)]
pub fn fb24(&mut self) -> _FB24W {
_FB24W { w: self }
}
#[doc = "Bit 25 - Filter bits"]
#[inline(always)]
pub fn fb25(&mut self) -> _FB25W {
_FB25W { w: self }
}
#[doc = "Bit 26 - Filter bits"]
#[inline(always)]
pub fn fb26(&mut self) -> _FB26W {
_FB26W { w: self }
}
#[doc = "Bit 27 - Filter bits"]
#[inline(always)]
pub fn fb27(&mut self) -> _FB27W {
_FB27W { w: self }
}
#[doc = "Bit 28 - Filter bits"]
#[inline(always)]
pub fn fb28(&mut self) -> _FB28W {
_FB28W { w: self }
}
#[doc = "Bit 29 - Filter bits"]
#[inline(always)]
pub fn fb29(&mut self) -> _FB29W {
_FB29W { w: self }
}
#[doc = "Bit 30 - Filter bits"]
#[inline(always)]
pub fn fb30(&mut self) -> _FB30W {
_FB30W { w: self }
}
#[doc = "Bit 31 - Filter bits"]
#[inline(always)]
pub fn fb31(&mut self) -> _FB31W {
_FB31W { w: self }
}
}<|fim▁end|> | pub fn bit(&self) -> bool {
self.bits |
<|file_name|>SpringSoyViewBaseConfig.java<|end_file_name|><|fim▁begin|>package pl.matisoft.soy.config;
import com.google.template.soy.jssrc.SoyJsSrcOptions;
import com.google.template.soy.tofu.SoyTofuOptions;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.context.support.ServletContextResource;
import org.springframework.web.servlet.ViewResolver;
import pl.matisoft.soy.ContentNegotiator;
import pl.matisoft.soy.DefaultContentNegotiator;
import pl.matisoft.soy.SoyTemplateViewResolver;
import pl.matisoft.soy.bundle.DefaultSoyMsgBundleResolver;
import pl.matisoft.soy.bundle.SoyMsgBundleResolver;
import pl.matisoft.soy.compile.DefaultTofuCompiler;
import pl.matisoft.soy.compile.TofuCompiler;
import pl.matisoft.soy.data.DefaultToSoyDataConverter;
import pl.matisoft.soy.data.ToSoyDataConverter;
import pl.matisoft.soy.data.adjust.ModelAdjuster;
import pl.matisoft.soy.data.adjust.SpringModelAdjuster;
import pl.matisoft.soy.global.compile.CompileTimeGlobalModelResolver;
import pl.matisoft.soy.global.compile.EmptyCompileTimeGlobalModelResolver;
import pl.matisoft.soy.global.runtime.EmptyGlobalRuntimeModelResolver;
import pl.matisoft.soy.global.runtime.GlobalRuntimeModelResolver;
import pl.matisoft.soy.holder.CompiledTemplatesHolder;
import pl.matisoft.soy.holder.DefaultCompiledTemplatesHolder;
import pl.matisoft.soy.locale.LocaleProvider;
import pl.matisoft.soy.locale.SpringLocaleProvider;
import pl.matisoft.soy.render.DefaultTemplateRenderer;
import pl.matisoft.soy.render.TemplateRenderer;
import pl.matisoft.soy.template.DefaultTemplateFilesResolver;
import pl.matisoft.soy.template.TemplateFilesResolver;
import javax.inject.Inject;
import javax.servlet.ServletContext;
/**
* Created with IntelliJ IDEA.
* User: mati
* Date: 12/11/2013
* Time: 19:55
*/
@Configuration<|fim▁hole|> private boolean hotReloadMode;
@Value("${soy.templates.resolve.recursively:true}")
private boolean recursive;
@Value("${soy.templates.file.extension:soy}")
private String fileExtension;
@Value("${soy.templates.directory:/WEB-INF/templates}")
private String templatesPath;
@Value("${soy.i18n.xliff.path:xliffs/messages}")
private String messagesPath;
@Value("${soy.encoding:utf-8}")
private String encoding;
@Value("${soy.i18n.fallback.to.english:true}")
private boolean fallbackToEnglish;
@Value("${soy.preCompile.templates:false}")
private boolean preCompileTemplates;
@Value("${soy.indexView:index}")
private String indexView;
@Value("${soy.logical.prefix:soy:}")
private String logicalPrefix;
@Value("${soy.resolver.order:2147483647}")
private int order;
@Inject
private ServletContext servletContext;
@Bean
public LocaleProvider soyLocaleProvider() {
return new SpringLocaleProvider();
}
@Bean
public DefaultTemplateFilesResolver soyTemplateFilesResolver() throws Exception {
final DefaultTemplateFilesResolver defaultTemplateFilesResolver = new DefaultTemplateFilesResolver();
defaultTemplateFilesResolver.setHotReloadMode(hotReloadMode);
defaultTemplateFilesResolver.setRecursive(recursive);
defaultTemplateFilesResolver.setFilesExtension(fileExtension);
defaultTemplateFilesResolver.setTemplatesLocation(new ServletContextResource(servletContext, templatesPath));
return defaultTemplateFilesResolver;
}
@Bean
public CompileTimeGlobalModelResolver soyCompileTimeGlobalModelResolver() {
return new EmptyCompileTimeGlobalModelResolver();
}
@Bean
public ToSoyDataConverter soyToSoyDataConverter() {
return new DefaultToSoyDataConverter();
}
@Bean
public SoyJsSrcOptions soyJsSourceOptions() {
return new SoyJsSrcOptions();
}
@Bean
public SoyTofuOptions soyTofuOptions() {
final SoyTofuOptions soyTofuOptions = new SoyTofuOptions();
soyTofuOptions.setUseCaching(!hotReloadMode);
return soyTofuOptions;
}
@Bean
public TofuCompiler soyTofuCompiler(final CompileTimeGlobalModelResolver compileTimeGlobalModelResolver, final SoyJsSrcOptions soyJsSrcOptions, final SoyTofuOptions soyTofuOptions) {
final DefaultTofuCompiler defaultTofuCompiler = new DefaultTofuCompiler();
defaultTofuCompiler.setHotReloadMode(hotReloadMode);
defaultTofuCompiler.setCompileTimeGlobalModelResolver(compileTimeGlobalModelResolver);
defaultTofuCompiler.setSoyJsSrcOptions(soyJsSrcOptions);
defaultTofuCompiler.setSoyTofuOptions(soyTofuOptions);
return defaultTofuCompiler;
}
@Bean
public SoyMsgBundleResolver soyMsgBundleResolver() {
final DefaultSoyMsgBundleResolver defaultSoyMsgBundleResolver = new DefaultSoyMsgBundleResolver();
defaultSoyMsgBundleResolver.setHotReloadMode(hotReloadMode);
defaultSoyMsgBundleResolver.setMessagesPath(messagesPath);
defaultSoyMsgBundleResolver.setFallbackToEnglish(fallbackToEnglish);
return defaultSoyMsgBundleResolver;
}
@Bean
public CompiledTemplatesHolder soyTemplatesHolder(final TemplateFilesResolver templateFilesResolver, final TofuCompiler tofuCompiler) throws Exception {
final DefaultCompiledTemplatesHolder defaultCompiledTemplatesHolder = new DefaultCompiledTemplatesHolder();
defaultCompiledTemplatesHolder.setHotReloadMode(hotReloadMode);
defaultCompiledTemplatesHolder.setPreCompileTemplates(preCompileTemplates);
defaultCompiledTemplatesHolder.setTemplatesFileResolver(templateFilesResolver);
defaultCompiledTemplatesHolder.setTofuCompiler(tofuCompiler);
return defaultCompiledTemplatesHolder;
}
@Bean
public TemplateRenderer soyTemplateRenderer(final ToSoyDataConverter toSoyDataConverter) {
final DefaultTemplateRenderer defaultTemplateRenderer = new DefaultTemplateRenderer();
defaultTemplateRenderer.setHotReloadMode(hotReloadMode);
defaultTemplateRenderer.setToSoyDataConverter(toSoyDataConverter);
return defaultTemplateRenderer;
}
@Bean
public ModelAdjuster soySpringModelAdjuster() {
return new SpringModelAdjuster();
}
@Bean
public GlobalRuntimeModelResolver soyGlobalRuntimeModelResolver() {
return new EmptyGlobalRuntimeModelResolver();
}
@Bean
public ContentNegotiator contentNegotiator() {
return new DefaultContentNegotiator();
}
@Bean
public ViewResolver soyViewResolver(final CompiledTemplatesHolder compiledTemplatesHolder,
final ModelAdjuster modelAdjuster,
final TemplateRenderer templateRenderer,
final LocaleProvider localeProvider,
final GlobalRuntimeModelResolver globalRuntimeModelResolver,
final ContentNegotiator contentNegotiator,
final SoyMsgBundleResolver msgBundleResolver)
throws Exception {
final SoyTemplateViewResolver soyTemplateViewResolver = new SoyTemplateViewResolver();
soyTemplateViewResolver.setSoyMsgBundleResolver(msgBundleResolver);
soyTemplateViewResolver.setCompiledTemplatesHolder(compiledTemplatesHolder);
soyTemplateViewResolver.setEncoding(encoding);
soyTemplateViewResolver.setGlobalRuntimeModelResolver(globalRuntimeModelResolver);
soyTemplateViewResolver.setHotReloadMode(hotReloadMode);
soyTemplateViewResolver.setIndexView(indexView);
soyTemplateViewResolver.setLocaleProvider(localeProvider);
soyTemplateViewResolver.setModelAdjuster(modelAdjuster);
soyTemplateViewResolver.setTemplateRenderer(templateRenderer);
soyTemplateViewResolver.setPrefix(logicalPrefix);
soyTemplateViewResolver.setOrder(order);
soyTemplateViewResolver.setRedirectContextRelative(true);
soyTemplateViewResolver.setRedirectHttp10Compatible(true);
soyTemplateViewResolver.setContentNegotiator(contentNegotiator);
return soyTemplateViewResolver;
}
}<|fim▁end|> | public class SpringSoyViewBaseConfig {
@Value("${soy.hot.reload.mode:false}") |
<|file_name|>static-function-pointer-xc.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>
fn f(x: int) -> int { x }
pub fn main() {
assert_eq!(aux::F(42), -42);
unsafe {
assert_eq!(aux::MutF(42), -42);
aux::MutF = f;
assert_eq!(aux::MutF(42), 42);
aux::MutF = aux::f;
assert_eq!(aux::MutF(42), -42);
}
}<|fim▁end|> |
// ignore-fast
// aux-build:static-function-pointer-aux.rs
extern crate aux = "static-function-pointer-aux"; |
<|file_name|>test_memcache.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from textwrap import dedent
import unittest
from eventlet.green import ssl
import mock
from six.moves.configparser import NoSectionError, NoOptionError
from swift.common.middleware import memcache
from swift.common.memcached import MemcacheRing
from swift.common.swob import Request
from swift.common.wsgi import loadapp
from test.unit import with_tempdir, patch_policies
class FakeApp(object):
def __call__(self, env, start_response):
return env
class ExcConfigParser(object):
def read(self, path):
raise Exception('read called with %r' % path)
class EmptyConfigParser(object):
def read(self, path):
return False
def get_config_parser(memcache_servers='1.2.3.4:5',
memcache_serialization_support='1',
memcache_max_connections='4',
section='memcache'):
_srvs = memcache_servers
_sers = memcache_serialization_support
_maxc = memcache_max_connections
_section = section
class SetConfigParser(object):
def items(self, section_name):
if section_name != section:
raise NoSectionError(section_name)
return {
'memcache_servers': memcache_servers,
'memcache_serialization_support':
memcache_serialization_support,
'memcache_max_connections': memcache_max_connections,
}
def read(self, path):
return True
def get(self, section, option):
if _section == section:
if option == 'memcache_servers':
if _srvs == 'error':
raise NoOptionError(option, section)
return _srvs
elif option == 'memcache_serialization_support':
if _sers == 'error':
raise NoOptionError(option, section)
return _sers
elif option in ('memcache_max_connections',
'max_connections'):
if _maxc == 'error':
raise NoOptionError(option, section)
return _maxc
else:
raise NoOptionError(option, section)
else:
raise NoSectionError(option)
return SetConfigParser
def start_response(*args):
pass
class TestCacheMiddleware(unittest.TestCase):
def setUp(self):
self.app = memcache.MemcacheMiddleware(FakeApp(), {})
def test_cache_middleware(self):
req = Request.blank('/something', environ={'REQUEST_METHOD': 'GET'})
resp = self.app(req.environ, start_response)
self.assertTrue('swift.cache' in resp)
self.assertTrue(isinstance(resp['swift.cache'], MemcacheRing))
def test_conf_default_read(self):
with mock.patch.object(memcache, 'ConfigParser', ExcConfigParser):
for d in ({},
{'memcache_servers': '6.7.8.9:10'},
{'memcache_serialization_support': '0'},
{'memcache_max_connections': '30'},
{'memcache_servers': '6.7.8.9:10',
'memcache_serialization_support': '0'},
{'memcache_servers': '6.7.8.9:10',
'memcache_max_connections': '30'},
{'memcache_serialization_support': '0',
'memcache_max_connections': '30'}
):
with self.assertRaises(Exception) as catcher:
memcache.MemcacheMiddleware(FakeApp(), d)
self.assertEqual(
str(catcher.exception),
"read called with '/etc/swift/memcache.conf'")
def test_conf_set_no_read(self):
with mock.patch.object(memcache, 'ConfigParser', ExcConfigParser):
exc = None
try:
memcache.MemcacheMiddleware(
FakeApp(), {'memcache_servers': '1.2.3.4:5',
'memcache_serialization_support': '2',
'memcache_max_connections': '30'})
except Exception as err:
exc = err
self.assertIsNone(exc)
def test_conf_default(self):
with mock.patch.object(memcache, 'ConfigParser', EmptyConfigParser):
app = memcache.MemcacheMiddleware(FakeApp(), {})
self.assertEqual(app.memcache_servers, '127.0.0.1:11211')
self.assertEqual(app.memcache._allow_pickle, False)
self.assertEqual(app.memcache._allow_unpickle, False)
self.assertEqual(
app.memcache._client_cache['127.0.0.1:11211'].max_size, 2)
def test_conf_inline(self):
with mock.patch.object(memcache, 'ConfigParser', get_config_parser()):
app = memcache.MemcacheMiddleware(
FakeApp(),
{'memcache_servers': '6.7.8.9:10',
'memcache_serialization_support': '0',
'memcache_max_connections': '5'})
self.assertEqual(app.memcache_servers, '6.7.8.9:10')
self.assertEqual(app.memcache._allow_pickle, True)
self.assertEqual(app.memcache._allow_unpickle, True)
self.assertEqual(
app.memcache._client_cache['6.7.8.9:10'].max_size, 5)
def test_conf_inline_ratelimiting(self):
with mock.patch.object(memcache, 'ConfigParser', get_config_parser()):
app = memcache.MemcacheMiddleware(
FakeApp(),
{'error_suppression_limit': '5',
'error_suppression_interval': '2.5'})
self.assertEqual(app.memcache._error_limit_count, 5)
self.assertEqual(app.memcache._error_limit_time, 2.5)
self.assertEqual(app.memcache._error_limit_duration, 2.5)
def test_conf_inline_tls(self):
fake_context = mock.Mock()
with mock.patch.object(ssl, 'create_default_context',
return_value=fake_context):
with mock.patch.object(memcache, 'ConfigParser',
get_config_parser()):
memcache.MemcacheMiddleware(
FakeApp(),
{'tls_enabled': 'true',
'tls_cafile': 'cafile',
'tls_certfile': 'certfile',
'tls_keyfile': 'keyfile'})
ssl.create_default_context.assert_called_with(cafile='cafile')
fake_context.load_cert_chain.assert_called_with('certfile',
'keyfile')
def test_conf_extra_no_section(self):
with mock.patch.object(memcache, 'ConfigParser',
get_config_parser(section='foobar')):
app = memcache.MemcacheMiddleware(FakeApp(), {})
self.assertEqual(app.memcache_servers, '127.0.0.1:11211')
self.assertEqual(app.memcache._allow_pickle, False)
self.assertEqual(app.memcache._allow_unpickle, False)
self.assertEqual(
app.memcache._client_cache['127.0.0.1:11211'].max_size, 2)
def test_conf_extra_no_option(self):
replacement_parser = get_config_parser(
memcache_servers='error', memcache_serialization_support='error',
memcache_max_connections='error')
with mock.patch.object(memcache, 'ConfigParser', replacement_parser):
app = memcache.MemcacheMiddleware(FakeApp(), {})
self.assertEqual(app.memcache_servers, '127.0.0.1:11211')
self.assertEqual(app.memcache._allow_pickle, False)
self.assertEqual(app.memcache._allow_unpickle, False)
self.assertEqual(
app.memcache._client_cache['127.0.0.1:11211'].max_size, 2)
def test_conf_inline_other_max_conn(self):
with mock.patch.object(memcache, 'ConfigParser', get_config_parser()):
app = memcache.MemcacheMiddleware(
FakeApp(),
{'memcache_servers': '6.7.8.9:10',
'memcache_serialization_support': '0',
'max_connections': '5'})
self.assertEqual(app.memcache_servers, '6.7.8.9:10')
self.assertEqual(app.memcache._allow_pickle, True)
self.assertEqual(app.memcache._allow_unpickle, True)
self.assertEqual(
app.memcache._client_cache['6.7.8.9:10'].max_size, 5)
def test_conf_inline_bad_max_conn(self):
with mock.patch.object(memcache, 'ConfigParser', get_config_parser()):
app = memcache.MemcacheMiddleware(
FakeApp(),
{'memcache_servers': '6.7.8.9:10',
'memcache_serialization_support': '0',
'max_connections': 'bad42'})
self.assertEqual(app.memcache_servers, '6.7.8.9:10')
self.assertEqual(app.memcache._allow_pickle, True)
self.assertEqual(app.memcache._allow_unpickle, True)
self.assertEqual(
app.memcache._client_cache['6.7.8.9:10'].max_size, 4)
def test_conf_from_extra_conf(self):
with mock.patch.object(memcache, 'ConfigParser', get_config_parser()):
app = memcache.MemcacheMiddleware(FakeApp(), {})
self.assertEqual(app.memcache_servers, '1.2.3.4:5')
self.assertEqual(app.memcache._allow_pickle, False)
self.assertEqual(app.memcache._allow_unpickle, True)
self.assertEqual(
app.memcache._client_cache['1.2.3.4:5'].max_size, 4)
def test_conf_from_extra_conf_bad_max_conn(self):
with mock.patch.object(memcache, 'ConfigParser', get_config_parser(
memcache_max_connections='bad42')):
app = memcache.MemcacheMiddleware(FakeApp(), {})
self.assertEqual(app.memcache_servers, '1.2.3.4:5')
self.assertEqual(app.memcache._allow_pickle, False)
self.assertEqual(app.memcache._allow_unpickle, True)
self.assertEqual(
app.memcache._client_cache['1.2.3.4:5'].max_size, 2)
def test_conf_from_inline_and_maxc_from_extra_conf(self):
with mock.patch.object(memcache, 'ConfigParser', get_config_parser()):
app = memcache.MemcacheMiddleware(
FakeApp(),
{'memcache_servers': '6.7.8.9:10',
'memcache_serialization_support': '0'})
self.assertEqual(app.memcache_servers, '6.7.8.9:10')
self.assertEqual(app.memcache._allow_pickle, True)
self.assertEqual(app.memcache._allow_unpickle, True)
self.assertEqual(
app.memcache._client_cache['6.7.8.9:10'].max_size, 4)
def test_conf_from_inline_and_sers_from_extra_conf(self):
with mock.patch.object(memcache, 'ConfigParser', get_config_parser()):
app = memcache.MemcacheMiddleware(
FakeApp(),
{'memcache_servers': '6.7.8.9:10',
'memcache_max_connections': '42'})
self.assertEqual(app.memcache_servers, '6.7.8.9:10')
self.assertEqual(app.memcache._allow_pickle, False)
self.assertEqual(app.memcache._allow_unpickle, True)
self.assertEqual(
app.memcache._client_cache['6.7.8.9:10'].max_size, 42)
def test_filter_factory(self):
factory = memcache.filter_factory({'max_connections': '3'},
memcache_servers='10.10.10.10:10',
memcache_serialization_support='1')
thefilter = factory('myapp')
self.assertEqual(thefilter.app, 'myapp')
self.assertEqual(thefilter.memcache_servers, '10.10.10.10:10')
self.assertEqual(thefilter.memcache._allow_pickle, False)
self.assertEqual(thefilter.memcache._allow_unpickle, True)
self.assertEqual(
thefilter.memcache._client_cache['10.10.10.10:10'].max_size, 3)
@patch_policies
def _loadapp(self, proxy_config_path):
"""
Load a proxy from an app.conf to get the memcache_ring
:returns: the memcache_ring of the memcache middleware filter
"""
with mock.patch('swift.proxy.server.Ring'):
app = loadapp(proxy_config_path)
memcache_ring = None
while True:
memcache_ring = getattr(app, 'memcache', None)
if memcache_ring:
break
app = app.app
return memcache_ring
@with_tempdir
def test_real_config(self, tempdir):
config = """
[pipeline:main]
pipeline = cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
[filter:cache]
use = egg:swift#memcache
"""
config_path = os.path.join(tempdir, 'test.conf')
with open(config_path, 'w') as f:
f.write(dedent(config))
memcache_ring = self._loadapp(config_path)
# only one server by default
self.assertEqual(list(memcache_ring._client_cache.keys()),
['127.0.0.1:11211'])
# extra options
self.assertEqual(memcache_ring._connect_timeout, 0.3)
self.assertEqual(memcache_ring._pool_timeout, 1.0)
# tries is limited to server count
self.assertEqual(memcache_ring._tries, 1)
self.assertEqual(memcache_ring._io_timeout, 2.0)
@with_tempdir
def test_real_config_with_options(self, tempdir):
config = """
[pipeline:main]
pipeline = cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
[filter:cache]
use = egg:swift#memcache
memcache_servers = 10.0.0.1:11211,10.0.0.2:11211,10.0.0.3:11211,
10.0.0.4:11211
connect_timeout = 1.0
pool_timeout = 0.5
tries = 4
io_timeout = 1.0
tls_enabled = true
"""
config_path = os.path.join(tempdir, 'test.conf')
with open(config_path, 'w') as f:
f.write(dedent(config))
memcache_ring = self._loadapp(config_path)
self.assertEqual(sorted(memcache_ring._client_cache.keys()),
['10.0.0.%d:11211' % i for i in range(1, 5)])
# extra options
self.assertEqual(memcache_ring._connect_timeout, 1.0)
self.assertEqual(memcache_ring._pool_timeout, 0.5)
# tries is limited to server count
self.assertEqual(memcache_ring._tries, 4)
self.assertEqual(memcache_ring._io_timeout, 1.0)
self.assertEqual(memcache_ring._error_limit_count, 10)
self.assertEqual(memcache_ring._error_limit_time, 60)
self.assertEqual(memcache_ring._error_limit_duration, 60)
self.assertIsInstance(
list(memcache_ring._client_cache.values())[0]._tls_context,
ssl.SSLContext)
@with_tempdir
def test_real_memcache_config(self, tempdir):
proxy_config = """
[DEFAULT]
swift_dir = %s
[pipeline:main]
pipeline = cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
[filter:cache]
use = egg:swift#memcache
connect_timeout = 1.0
""" % tempdir
proxy_config_path = os.path.join(tempdir, 'test.conf')<|fim▁hole|> [memcache]
memcache_servers = 10.0.0.1:11211,10.0.0.2:11211,10.0.0.3:11211,
10.0.0.4:11211
connect_timeout = 0.5
io_timeout = 1.0
error_suppression_limit = 0
error_suppression_interval = 1.5
"""
memcache_config_path = os.path.join(tempdir, 'memcache.conf')
with open(memcache_config_path, 'w') as f:
f.write(dedent(memcache_config))
memcache_ring = self._loadapp(proxy_config_path)
self.assertEqual(sorted(memcache_ring._client_cache.keys()),
['10.0.0.%d:11211' % i for i in range(1, 5)])
# proxy option takes precedence
self.assertEqual(memcache_ring._connect_timeout, 1.0)
# default tries are not limited by servers
self.assertEqual(memcache_ring._tries, 3)
# memcache conf options are defaults
self.assertEqual(memcache_ring._io_timeout, 1.0)
self.assertEqual(memcache_ring._error_limit_count, 0)
self.assertEqual(memcache_ring._error_limit_time, 1.5)
self.assertEqual(memcache_ring._error_limit_duration, 1.5)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | with open(proxy_config_path, 'w') as f:
f.write(dedent(proxy_config))
memcache_config = """ |
<|file_name|>0002_auto_20161003_1842.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='group',
name='shared',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='group',
name='user_name',
field=models.CharField(default=None, max_length=400, null=True),
),
]<|fim▁end|> | # Generated by Django 1.10.dev20160107235441 on 2016-10-03 18:42
from __future__ import unicode_literals
|
<|file_name|>cistern.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-gogo.
// source: cistern.proto
// DO NOT EDIT!
/*
Package binlog is a generated protocol buffer package.
It is generated from these files:
cistern.proto
It has these top-level messages:
DumpBinlogReq
DumpBinlogResp
DumpDDLJobsReq
DumpDDLJobsResp
GetLatestCommitTSReq
GetLatestCommitTSResp
*/
package binlog
import (
"fmt"
proto "github.com/golang/protobuf/proto"
math "math"
io "io"
)
import (
context "golang.org/x/net/context"
grpc "google.golang.org/grpc"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
type DumpBinlogReq struct {
// beginCommitTS speicifies the position from which begin to dump binlogs.
// note that actually the result of dump starts from the one next to beginCommitTS
// it should be zero in case of the first request.
BeginCommitTS int64 `protobuf:"varint,1,opt,name=beginCommitTS,proto3" json:"beginCommitTS,omitempty"`
}
func (m *DumpBinlogReq) Reset() { *m = DumpBinlogReq{} }
func (m *DumpBinlogReq) String() string { return proto.CompactTextString(m) }
func (*DumpBinlogReq) ProtoMessage() {}
func (*DumpBinlogReq) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{0} }
type DumpBinlogResp struct {
// CommitTS specifies the commitTS of binlog
CommitTS int64 `protobuf:"varint,1,opt,name=commitTS,proto3" json:"commitTS,omitempty"`
// payloads is bytecodes encoded from binlog item
Payload []byte `protobuf:"bytes,2,opt,name=payload,proto3" json:"payload,omitempty"`
// ddljob is json bytes marshaled from corresponding ddljob struct if payload is a DDL type of binlog
Ddljob []byte `protobuf:"bytes,3,opt,name=ddljob,proto3" json:"ddljob,omitempty"`
}
func (m *DumpBinlogResp) Reset() { *m = DumpBinlogResp{} }
func (m *DumpBinlogResp) String() string { return proto.CompactTextString(m) }
func (*DumpBinlogResp) ProtoMessage() {}
func (*DumpBinlogResp) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{1} }
type DumpDDLJobsReq struct {
// beginCommitTS is the start point of drainer processing binlog, DumpDDLJobs() returns
// all history DDL jobs before this position, then drainer will apply these DDL jobs
// in order of job ID to restore the whole schema info at that moment.
BeginCommitTS int64 `protobuf:"varint,1,opt,name=beginCommitTS,proto3" json:"beginCommitTS,omitempty"`
}
func (m *DumpDDLJobsReq) Reset() { *m = DumpDDLJobsReq{} }
func (m *DumpDDLJobsReq) String() string { return proto.CompactTextString(m) }
func (*DumpDDLJobsReq) ProtoMessage() {}
func (*DumpDDLJobsReq) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{2} }
type DumpDDLJobsResp struct {
// ddljobs is an array of JSON encoded history DDL jobs
Ddljobs [][]byte `protobuf:"bytes,1,rep,name=ddljobs" json:"ddljobs,omitempty"`
}
func (m *DumpDDLJobsResp) Reset() { *m = DumpDDLJobsResp{} }
func (m *DumpDDLJobsResp) String() string { return proto.CompactTextString(m) }
func (*DumpDDLJobsResp) ProtoMessage() {}
func (*DumpDDLJobsResp) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{3} }
type GetLatestCommitTSReq struct {
}
func (m *GetLatestCommitTSReq) Reset() { *m = GetLatestCommitTSReq{} }
func (m *GetLatestCommitTSReq) String() string { return proto.CompactTextString(m) }
func (*GetLatestCommitTSReq) ProtoMessage() {}
func (*GetLatestCommitTSReq) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{4} }
type GetLatestCommitTSResp struct {
// commitTS specifies the Last binlog commitTS of the TiDB
CommitTS int64 `protobuf:"varint,1,opt,name=commitTS,proto3" json:"commitTS,omitempty"`
// isSynced specifies whether the all binlogs are consumed from pump
IsSynced bool `protobuf:"varint,2,opt,name=isSynced,proto3" json:"isSynced,omitempty"`
}
func (m *GetLatestCommitTSResp) Reset() { *m = GetLatestCommitTSResp{} }
func (m *GetLatestCommitTSResp) String() string { return proto.CompactTextString(m) }
func (*GetLatestCommitTSResp) ProtoMessage() {}
func (*GetLatestCommitTSResp) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{5} }
func init() {
proto.RegisterType((*DumpBinlogReq)(nil), "binlog.DumpBinlogReq")
proto.RegisterType((*DumpBinlogResp)(nil), "binlog.DumpBinlogResp")
proto.RegisterType((*DumpDDLJobsReq)(nil), "binlog.DumpDDLJobsReq")
proto.RegisterType((*DumpDDLJobsResp)(nil), "binlog.DumpDDLJobsResp")
proto.RegisterType((*GetLatestCommitTSReq)(nil), "binlog.GetLatestCommitTSReq")
proto.RegisterType((*GetLatestCommitTSResp)(nil), "binlog.GetLatestCommitTSResp")
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion3
// Client API for Cistern service
type CisternClient interface {
// DumpBinlog dumps continuous binlog items in a stream from a given position
DumpBinlog(ctx context.Context, in *DumpBinlogReq, opts ...grpc.CallOption) (Cistern_DumpBinlogClient, error)
// DumpDDLJobs dumps all history DDL jobs before a specified commitTS
DumpDDLJobs(ctx context.Context, in *DumpDDLJobsReq, opts ...grpc.CallOption) (*DumpDDLJobsResp, error)
// GetLatestCommitTS returns the Last binlog commitTS of the TiDB
GetLatestCommitTS(ctx context.Context, in *GetLatestCommitTSReq, opts ...grpc.CallOption) (*GetLatestCommitTSResp, error)
}
type cisternClient struct {
cc *grpc.ClientConn
}
func NewCisternClient(cc *grpc.ClientConn) CisternClient {
return &cisternClient{cc}
}
func (c *cisternClient) DumpBinlog(ctx context.Context, in *DumpBinlogReq, opts ...grpc.CallOption) (Cistern_DumpBinlogClient, error) {
stream, err := grpc.NewClientStream(ctx, &_Cistern_serviceDesc.Streams[0], c.cc, "/binlog.Cistern/DumpBinlog", opts...)
if err != nil {
return nil, err
}
x := &cisternDumpBinlogClient{stream}
if err := x.ClientStream.SendMsg(in); err != nil {
return nil, err
}
if err := x.ClientStream.CloseSend(); err != nil {
return nil, err
}
return x, nil
}
type Cistern_DumpBinlogClient interface {
Recv() (*DumpBinlogResp, error)
grpc.ClientStream
}
type cisternDumpBinlogClient struct {
grpc.ClientStream
}
func (x *cisternDumpBinlogClient) Recv() (*DumpBinlogResp, error) {
m := new(DumpBinlogResp)
if err := x.ClientStream.RecvMsg(m); err != nil {
return nil, err
}
return m, nil
}
func (c *cisternClient) DumpDDLJobs(ctx context.Context, in *DumpDDLJobsReq, opts ...grpc.CallOption) (*DumpDDLJobsResp, error) {
out := new(DumpDDLJobsResp)
err := grpc.Invoke(ctx, "/binlog.Cistern/DumpDDLJobs", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *cisternClient) GetLatestCommitTS(ctx context.Context, in *GetLatestCommitTSReq, opts ...grpc.CallOption) (*GetLatestCommitTSResp, error) {
out := new(GetLatestCommitTSResp)
err := grpc.Invoke(ctx, "/binlog.Cistern/GetLatestCommitTS", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// Server API for Cistern service
type CisternServer interface {
// DumpBinlog dumps continuous binlog items in a stream from a given position
DumpBinlog(*DumpBinlogReq, Cistern_DumpBinlogServer) error
// DumpDDLJobs dumps all history DDL jobs before a specified commitTS
DumpDDLJobs(context.Context, *DumpDDLJobsReq) (*DumpDDLJobsResp, error)
// GetLatestCommitTS returns the Last binlog commitTS of the TiDB
GetLatestCommitTS(context.Context, *GetLatestCommitTSReq) (*GetLatestCommitTSResp, error)
}
func RegisterCisternServer(s *grpc.Server, srv CisternServer) {
s.RegisterService(&_Cistern_serviceDesc, srv)
}
func _Cistern_DumpBinlog_Handler(srv interface{}, stream grpc.ServerStream) error {
m := new(DumpBinlogReq)
if err := stream.RecvMsg(m); err != nil {
return err
}
return srv.(CisternServer).DumpBinlog(m, &cisternDumpBinlogServer{stream})
}
type Cistern_DumpBinlogServer interface {
Send(*DumpBinlogResp) error
grpc.ServerStream
}
type cisternDumpBinlogServer struct {
grpc.ServerStream
}
func (x *cisternDumpBinlogServer) Send(m *DumpBinlogResp) error {
return x.ServerStream.SendMsg(m)
}
func _Cistern_DumpDDLJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(DumpDDLJobsReq)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(CisternServer).DumpDDLJobs(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/binlog.Cistern/DumpDDLJobs",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(CisternServer).DumpDDLJobs(ctx, req.(*DumpDDLJobsReq))
}
return interceptor(ctx, in, info, handler)
}
func _Cistern_GetLatestCommitTS_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetLatestCommitTSReq)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(CisternServer).GetLatestCommitTS(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/binlog.Cistern/GetLatestCommitTS",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(CisternServer).GetLatestCommitTS(ctx, req.(*GetLatestCommitTSReq))
}
return interceptor(ctx, in, info, handler)
}
var _Cistern_serviceDesc = grpc.ServiceDesc{
ServiceName: "binlog.Cistern",
HandlerType: (*CisternServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "DumpDDLJobs",
Handler: _Cistern_DumpDDLJobs_Handler,
},
{
MethodName: "GetLatestCommitTS",
Handler: _Cistern_GetLatestCommitTS_Handler,
},
},
Streams: []grpc.StreamDesc{
{
StreamName: "DumpBinlog",
Handler: _Cistern_DumpBinlog_Handler,
ServerStreams: true,
},
},
Metadata: fileDescriptorCistern,
}
func (m *DumpBinlogReq) Marshal() (data []byte, err error) {
size := m.Size()
data = make([]byte, size)
n, err := m.MarshalTo(data)
if err != nil {
return nil, err
}
return data[:n], nil
}
func (m *DumpBinlogReq) MarshalTo(data []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.BeginCommitTS != 0 {
data[i] = 0x8
i++
i = encodeVarintCistern(data, i, uint64(m.BeginCommitTS))
}
return i, nil
}
func (m *DumpBinlogResp) Marshal() (data []byte, err error) {
size := m.Size()
data = make([]byte, size)
n, err := m.MarshalTo(data)
if err != nil {
return nil, err
}
return data[:n], nil
}
func (m *DumpBinlogResp) MarshalTo(data []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.CommitTS != 0 {
data[i] = 0x8
i++
i = encodeVarintCistern(data, i, uint64(m.CommitTS))
}
if len(m.Payload) > 0 {
data[i] = 0x12
i++
i = encodeVarintCistern(data, i, uint64(len(m.Payload)))
i += copy(data[i:], m.Payload)
}
if len(m.Ddljob) > 0 {
data[i] = 0x1a
i++
i = encodeVarintCistern(data, i, uint64(len(m.Ddljob)))
i += copy(data[i:], m.Ddljob)
}
return i, nil
}
func (m *DumpDDLJobsReq) Marshal() (data []byte, err error) {
size := m.Size()
data = make([]byte, size)
n, err := m.MarshalTo(data)
if err != nil {
return nil, err
}
return data[:n], nil
}
func (m *DumpDDLJobsReq) MarshalTo(data []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.BeginCommitTS != 0 {
data[i] = 0x8
i++
i = encodeVarintCistern(data, i, uint64(m.BeginCommitTS))
}
return i, nil
}
func (m *DumpDDLJobsResp) Marshal() (data []byte, err error) {
size := m.Size()
data = make([]byte, size)
n, err := m.MarshalTo(data)
if err != nil {
return nil, err
}
return data[:n], nil
}
func (m *DumpDDLJobsResp) MarshalTo(data []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Ddljobs) > 0 {
for _, b := range m.Ddljobs {
data[i] = 0xa
i++
i = encodeVarintCistern(data, i, uint64(len(b)))
i += copy(data[i:], b)
}
}
return i, nil
}
func (m *GetLatestCommitTSReq) Marshal() (data []byte, err error) {
size := m.Size()
data = make([]byte, size)
n, err := m.MarshalTo(data)
if err != nil {
return nil, err
}
return data[:n], nil
}
func (m *GetLatestCommitTSReq) MarshalTo(data []byte) (int, error) {
var i int
_ = i
var l int
_ = l
return i, nil
}
func (m *GetLatestCommitTSResp) Marshal() (data []byte, err error) {
size := m.Size()
data = make([]byte, size)
n, err := m.MarshalTo(data)
if err != nil {
return nil, err
}
return data[:n], nil
}
func (m *GetLatestCommitTSResp) MarshalTo(data []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.CommitTS != 0 {
data[i] = 0x8
i++
i = encodeVarintCistern(data, i, uint64(m.CommitTS))
}
if m.IsSynced {
data[i] = 0x10
i++
if m.IsSynced {
data[i] = 1
} else {
data[i] = 0
}
i++
}
return i, nil
}
func encodeFixed64Cistern(data []byte, offset int, v uint64) int {
data[offset] = uint8(v)
data[offset+1] = uint8(v >> 8)
data[offset+2] = uint8(v >> 16)
data[offset+3] = uint8(v >> 24)
data[offset+4] = uint8(v >> 32)
data[offset+5] = uint8(v >> 40)
data[offset+6] = uint8(v >> 48)
data[offset+7] = uint8(v >> 56)
return offset + 8
}
func encodeFixed32Cistern(data []byte, offset int, v uint32) int {
data[offset] = uint8(v)
data[offset+1] = uint8(v >> 8)
data[offset+2] = uint8(v >> 16)
data[offset+3] = uint8(v >> 24)
return offset + 4
}
func encodeVarintCistern(data []byte, offset int, v uint64) int {
for v >= 1<<7 {
data[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
data[offset] = uint8(v)
return offset + 1
}
func (m *DumpBinlogReq) Size() (n int) {
var l int
_ = l
if m.BeginCommitTS != 0 {
n += 1 + sovCistern(uint64(m.BeginCommitTS))
}
return n
}
func (m *DumpBinlogResp) Size() (n int) {
var l int
_ = l
if m.CommitTS != 0 {
n += 1 + sovCistern(uint64(m.CommitTS))
}
l = len(m.Payload)
if l > 0 {
n += 1 + l + sovCistern(uint64(l))
}
l = len(m.Ddljob)
if l > 0 {
n += 1 + l + sovCistern(uint64(l))
}
return n
}
func (m *DumpDDLJobsReq) Size() (n int) {
var l int
_ = l
if m.BeginCommitTS != 0 {
n += 1 + sovCistern(uint64(m.BeginCommitTS))
}
return n
}
func (m *DumpDDLJobsResp) Size() (n int) {
var l int
_ = l
if len(m.Ddljobs) > 0 {
for _, b := range m.Ddljobs {
l = len(b)
n += 1 + l + sovCistern(uint64(l))
}
}
return n
}
func (m *GetLatestCommitTSReq) Size() (n int) {
var l int
_ = l
return n
}
func (m *GetLatestCommitTSResp) Size() (n int) {
var l int
_ = l
if m.CommitTS != 0 {
n += 1 + sovCistern(uint64(m.CommitTS))
}
if m.IsSynced {
n += 2
}
return n
}
func sovCistern(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozCistern(x uint64) (n int) {
return sovCistern(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *DumpBinlogReq) Unmarshal(data []byte) error {
l := len(data)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: DumpBinlogReq: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: DumpBinlogReq: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field BeginCommitTS", wireType)
}
m.BeginCommitTS = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
m.BeginCommitTS |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipCistern(data[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthCistern
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *DumpBinlogResp) Unmarshal(data []byte) error {
l := len(data)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: DumpBinlogResp: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: DumpBinlogResp: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field CommitTS", wireType)
}
m.CommitTS = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
m.CommitTS |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Payload", wireType)
}
var byteLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
byteLen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if byteLen < 0 {
return ErrInvalidLengthCistern
}
postIndex := iNdEx + byteLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Payload = append(m.Payload[:0], data[iNdEx:postIndex]...)
if m.Payload == nil {
m.Payload = []byte{}
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Ddljob", wireType)
}
var byteLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
byteLen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if byteLen < 0 {
return ErrInvalidLengthCistern
}
postIndex := iNdEx + byteLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Ddljob = append(m.Ddljob[:0], data[iNdEx:postIndex]...)
if m.Ddljob == nil {
m.Ddljob = []byte{}
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipCistern(data[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthCistern
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *DumpDDLJobsReq) Unmarshal(data []byte) error {
l := len(data)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: DumpDDLJobsReq: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: DumpDDLJobsReq: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field BeginCommitTS", wireType)
}
m.BeginCommitTS = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
m.BeginCommitTS |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipCistern(data[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthCistern
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *DumpDDLJobsResp) Unmarshal(data []byte) error {
l := len(data)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: DumpDDLJobsResp: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: DumpDDLJobsResp: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Ddljobs", wireType)
}
var byteLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
byteLen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if byteLen < 0 {
return ErrInvalidLengthCistern
}
postIndex := iNdEx + byteLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Ddljobs = append(m.Ddljobs, make([]byte, postIndex-iNdEx))
copy(m.Ddljobs[len(m.Ddljobs)-1], data[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipCistern(data[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthCistern
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *GetLatestCommitTSReq) Unmarshal(data []byte) error {
l := len(data)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: GetLatestCommitTSReq: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: GetLatestCommitTSReq: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
default:
iNdEx = preIndex
skippy, err := skipCistern(data[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthCistern
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *GetLatestCommitTSResp) Unmarshal(data []byte) error {
l := len(data)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: GetLatestCommitTSResp: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: GetLatestCommitTSResp: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field CommitTS", wireType)
}
m.CommitTS = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
m.CommitTS |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
case 2:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field IsSynced", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowCistern
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.IsSynced = bool(v != 0)
default:
iNdEx = preIndex
skippy, err := skipCistern(data[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthCistern
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipCistern(data []byte) (n int, err error) {
l := len(data)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowCistern
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowCistern
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if data[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {<|fim▁hole|> }
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthCistern
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowCistern
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := data[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipCistern(data[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthCistern = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowCistern = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("cistern.proto", fileDescriptorCistern) }
var fileDescriptorCistern = []byte{
// 324 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xe2, 0xe2, 0x4d, 0xce, 0x2c, 0x2e,
0x49, 0x2d, 0xca, 0xd3, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x4b, 0xca, 0xcc, 0xcb, 0xc9,
0x4f, 0x97, 0x12, 0x49, 0xcf, 0x4f, 0xcf, 0x07, 0x0b, 0xe9, 0x83, 0x58, 0x10, 0x59, 0x25, 0x53,
0x2e, 0x5e, 0x97, 0xd2, 0xdc, 0x02, 0x27, 0xb0, 0x9a, 0xa0, 0xd4, 0x42, 0x21, 0x15, 0x2e, 0xde,
0xa4, 0xd4, 0xf4, 0xcc, 0x3c, 0xe7, 0xfc, 0xdc, 0xdc, 0xcc, 0x92, 0x90, 0x60, 0x09, 0x46, 0x05,
0x46, 0x0d, 0xe6, 0x20, 0x54, 0x41, 0xa5, 0x38, 0x2e, 0x3e, 0x64, 0x6d, 0xc5, 0x05, 0x42, 0x52,
0x5c, 0x1c, 0xc9, 0xa8, 0x5a, 0xe0, 0x7c, 0x21, 0x09, 0x2e, 0xf6, 0x82, 0xc4, 0xca, 0x9c, 0xfc,
0xc4, 0x14, 0x09, 0x26, 0x05, 0x46, 0x0d, 0x9e, 0x20, 0x18, 0x57, 0x48, 0x8c, 0x8b, 0x2d, 0x25,
0x25, 0x27, 0x2b, 0x3f, 0x49, 0x82, 0x19, 0x2c, 0x01, 0xe5, 0x29, 0x99, 0x41, 0xcc, 0x77, 0x71,
0xf1, 0xf1, 0xca, 0x4f, 0x2a, 0x26, 0xde, 0x5d, 0xda, 0x5c, 0xfc, 0x28, 0xfa, 0x8a, 0x0b, 0x40,
0x96, 0x43, 0x0c, 0x2d, 0x96, 0x60, 0x54, 0x60, 0x06, 0x59, 0x0e, 0xe5, 0x2a, 0x89, 0x71, 0x89,
0xb8, 0xa7, 0x96, 0xf8, 0x24, 0x96, 0xa4, 0x16, 0x97, 0xc0, 0x4c, 0x08, 0x4a, 0x2d, 0x54, 0xf2,
0xe7, 0x12, 0xc5, 0x22, 0x4e, 0xc0, 0x8f, 0x52, 0x5c, 0x1c, 0x99, 0xc5, 0xc1, 0x95, 0x79, 0xc9,
0xa9, 0x10, 0x4f, 0x72, 0x04, 0xc1, 0xf9, 0x46, 0x0f, 0x18, 0xb9, 0xd8, 0x9d, 0x21, 0x91, 0x22,
0x64, 0xcf, 0xc5, 0x85, 0x08, 0x39, 0x21, 0x51, 0x3d, 0x48, 0xec, 0xe8, 0xa1, 0x44, 0x82, 0x94,
0x18, 0x36, 0xe1, 0xe2, 0x02, 0x25, 0x06, 0x03, 0x46, 0x21, 0x07, 0x2e, 0x6e, 0x24, 0x2f, 0x0a,
0xa1, 0x28, 0x45, 0x84, 0x97, 0x94, 0x38, 0x56, 0x71, 0x90, 0x19, 0x42, 0x41, 0x5c, 0x82, 0x18,
0xfe, 0x13, 0x92, 0x81, 0xa9, 0xc7, 0x16, 0x24, 0x52, 0xb2, 0x78, 0x64, 0x41, 0x66, 0x3a, 0x09,
0x9c, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x33, 0x1e, 0xcb,
0x31, 0x24, 0xb1, 0x81, 0x13, 0x98, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0x28, 0xda, 0xbc, 0xd4,
0x8f, 0x02, 0x00, 0x00,
}<|fim▁end|> | return 0, ErrIntOverflowCistern |
<|file_name|>burst_plot.py<|end_file_name|><|fim▁begin|># encoding: utf-8
#
# FRETBursts - A single-molecule FRET burst analysis toolkit.
#
# Copyright (C) 2013-2016 The Regents of the University of California,
# Antonino Ingargiola <[email protected]>
#
"""
This module defines all the plotting functions for the
:class:`fretbursts.burstlib.Data` object.
The main plot function is `dplot()` that takes, as parameters, a `Data()`
object and a 1-ch-plot-function and creates a subplot for each channel.
The 1-ch plot functions are usually called through `dplot` but can also be
called directly to make a single channel plot.
The 1-ch plot functions names all start with the plot type (`timetrace`,
`ratetrace`, `hist` or `scatter`).
**Example 1** - Plot the timetrace for all ch::
dplot(d, timetrace, scroll=True)
**Example 2** - Plot a FRET histogramm for each ch with a fit overlay::
dplot(d, hist_fret, show_model=True)
For more examples refer to
`FRETBurst notebooks <http://nbviewer.ipython.org/github/tritemio/FRETBursts_notebooks/tree/master/notebooks/>`_.
"""
from __future__ import division, print_function, absolute_import
from builtins import range
import warnings
from itertools import cycle
# Numeric imports
import numpy as np
from numpy import arange, r_
from matplotlib.mlab import normpdf
from scipy.stats import erlang
from scipy.interpolate import UnivariateSpline
# Graphics imports
import matplotlib.pyplot as plt
from matplotlib.pyplot import (plot, hist, xlabel, ylabel, grid, title, legend,
gca, gcf)
from matplotlib.patches import Rectangle, Ellipse
from matplotlib.collections import PatchCollection, PolyCollection
import seaborn as sns
# Local imports
from .ph_sel import Ph_sel
from . import burstlib as bl
from .phtools import phrates
from . import burstlib_ext as bext
from . import background as bg
from .utils.misc import HistData, _is_list_of_arrays
from .scroll_gui import ScrollingToolQT
from . import gui_selection as gs
##
# Globals
#
blue = '#0055d4'
green = '#2ca02c'
red = '#e74c3c' # '#E41A1C'
purple = '#9b59b6'
_ph_sel_color_dict = {Ph_sel('all'): blue, Ph_sel(Dex='Dem'): green,
Ph_sel(Dex='Aem'): red, Ph_sel(Aex='Aem'): purple,
Ph_sel(Aex='Dem'): 'c', }
_ph_sel_label_dict = {Ph_sel('all'): 'All-ph', Ph_sel(Dex='Dem'): 'DexDem',
Ph_sel(Dex='Aem'): 'DexAem', Ph_sel(Aex='Aem'): 'AexAem',
Ph_sel(Aex='Dem'): 'AexDem'}
# Global store for plot status
_plot_status = {}
# Global store for GUI handlers
gui_status = {'first_plot_in_figure': True}
##
# Utility functions
#
def _normalize_kwargs(kwargs, kind='patch'):
"""Convert matplotlib keywords from short to long form."""
if kwargs is None:
return {}
if kind == 'line2d':
long_names = dict(c='color', ls='linestyle', lw='linewidth',
mec='markeredgecolor', mew='markeredgewidth',
mfc='markerfacecolor', ms='markersize',)
elif kind == 'patch':
long_names = dict(c='color', ls='linestyle', lw='linewidth',
ec='edgecolor', fc='facecolor',)
for short_name in long_names:
if short_name in kwargs:
kwargs[long_names[short_name]] = kwargs.pop(short_name)
return kwargs
def bsavefig(d, s):
"""Save current figure with name in `d`, appending the string `s`."""
plt.savefig(d.Name() + s)
##
# Multi-channel plot functions
#
def mch_plot_bg(d, **kwargs):
"""Plot background vs channel for DA, D and A photons."""
bg = d.bg_from(Ph_sel('all'))
bg_dd = d.bg_from(Ph_sel(Dex='Dem'))
bg_ad = d.bg_from(Ph_sel(Dex='Aem'))
plot(r_[1:d.nch+1], [b.mean()*1e-3 for b in bg], lw=2, color=blue,
label=' T', **kwargs)
plot(r_[1:d.nch+1], [b.mean()*1e-3 for b in bg_dd], color=green, lw=2,
label=' D', **kwargs)
plot(r_[1:d.nch+1], [b.mean()*1e-3 for b in bg_ad], color=red, lw=2,
label=' A', **kwargs)
xlabel("CH"); ylabel("kcps"); grid(True); legend(loc='best')
title(d.name)
def mch_plot_bg_ratio(d):
"""Plot ratio of A over D background vs channel."""
bg_dd = d.bg_from(Ph_sel(Dex='Dem'))
bg_ad = d.bg_from(Ph_sel(Dex='Aem'))
plot(r_[1:d.nch+1],
[ba.mean()/bd.mean() for bd, ba in zip(bg_dd, bg_ad)],
color=green, lw=2, label='A/D')
xlabel("CH"); ylabel("BG Ratio A/D"); grid(True)
title("BG Ratio A/D "+d.name)
def mch_plot_bsize(d):
"""Plot mean burst size vs channel."""
CH = np.arange(1, d.nch+1)
plot(CH, [b.mean() for b in d.nt], color=blue, lw=2, label=' T')
plot(CH, [b.mean() for b in d.nd], color=green, lw=2, label=' D')
plot(CH, [b.mean() for b in d.na], color=red, lw=2, label=' A')
xlabel("CH"); ylabel("Mean burst size")
grid(True)
legend(loc='best')
title(d.name)
##
# ALEX alternation period plots
#
def plot_alternation_hist(d, bins=None, ax=None, **kwargs):
"""Plot the ALEX alternation histogram for the variable `d`.
This function works both for us-ALEX and ns-ALEX data.
This function must be called on ALEX data **before** calling
:func:`fretbursts.loader.alex_apply_period`.
"""
assert d.alternated
if d.lifetime:
plot_alternation = plot_alternation_hist_nsalex
else:
plot_alternation = plot_alternation_hist_usalex
plot_alternation(d, bins=bins, ax=ax, **kwargs)
def plot_alternation_hist_usalex(d, bins=None, ax=None, ich=0,
hist_style={}, span_style={}):
"""Plot the us-ALEX alternation histogram for the variable `d`.
This function must be called on us-ALEX data **before** calling
:func:`fretbursts.loader.alex_apply_period`.
"""
if ax is None:
_, ax = plt.subplots()
if bins is None:
bins = 100
D_ON, A_ON = d._D_ON_multich[ich], d._A_ON_multich[ich]
d_ch, a_ch = d._det_donor_accept_multich[ich]
offset = d.get('offset', 0)
ph_times_t, det_t = d.ph_times_t[ich][:], d.det_t[ich][:]
period = d.alex_period
d_em_t = (det_t == d_ch)
hist_style_ = dict(bins=bins, histtype='step', lw=2, alpha=0.9, zorder=2)
hist_style_.update(hist_style)
span_style_ = dict(alpha=0.2, zorder=1)
span_style_.update(span_style)
D_label = 'Donor: %d-%d' % (D_ON[0], D_ON[1])
A_label = 'Accept: %d-%d' % (A_ON[0], A_ON[1])
ax.hist((ph_times_t[d_em_t] - offset) % period, color=green, label=D_label,
**hist_style_)
ax.hist((ph_times_t[~d_em_t] - offset) % period, color=red, label=A_label,
**hist_style_)
ax.set_xlabel('Timestamp MODULO Alternation period')
if D_ON[0] < D_ON[1]:
ax.axvspan(D_ON[0], D_ON[1], color=green, **span_style_)
else:
ax.axvspan(0, D_ON[1], color=green, **span_style_)
ax.axvspan(D_ON[0], period, color=green, **span_style_)
if A_ON[0] < A_ON[1]:
ax.axvspan(A_ON[0], A_ON[1], color=red, **span_style_)
else:
ax.axvspan(0, A_ON[1], color=red, **span_style_)
ax.axvspan(A_ON[0], period, color=red, **span_style_)
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), frameon=False)
def plot_alternation_hist_nsalex(d, bins=None, ax=None, ich=0,
hist_style={}, span_style={}):
"""Plot the ns-ALEX alternation histogram for the variable `d`.
This function must be called on ns-ALEX data **before** calling
:func:`fretbursts.loader.alex_apply_period`.
"""
if ax is None:
_, ax = plt.subplots()
if bins is None:
bins = np.arange(d.nanotimes_params[ich]['tcspc_num_bins'])
D_ON_multi, A_ON_multi = d._D_ON_multich[ich], d._A_ON_multich[ich]
D_ON = [(D_ON_multi[i], D_ON_multi[i+1]) for i in range(0, len(D_ON_multi), 2)]
A_ON = [(A_ON_multi[i], A_ON_multi[i+1]) for i in range(0, len(A_ON_multi), 2)]
d_ch, a_ch = d._det_donor_accept_multich[ich]
hist_style_ = dict(bins=bins, histtype='step', lw=1.3, alpha=0.9, zorder=2)
hist_style_.update(hist_style)
span_style_ = dict(alpha=0.2, zorder=1)
span_style_.update(span_style)
D_label = 'Donor: '
for d_on in D_ON:
D_label += '%d-%d' % (d_on[0], d_on[1])
A_label = 'Accept: '
for a_on in A_ON:
A_label += '%d-%d' % (a_on[0], a_on[1])
nanotimes_d = d.nanotimes_t[ich][d.det_t[ich] == d_ch]
nanotimes_a = d.nanotimes_t[ich][d.det_t[ich] == a_ch]
ax.hist(nanotimes_d, label=D_label, color=green, **hist_style_)
ax.hist(nanotimes_a, label=A_label, color=red, **hist_style_)
ax.set_xlabel('Nanotime bin')
ax.set_yscale('log')
for d_on in D_ON:
ax.axvspan(d_on[0], d_on[1], color=green, **span_style_)
for a_on in A_ON:
ax.axvspan(a_on[0], a_on[1], color=red, **span_style_)
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), frameon=False)
## - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
## Multi-channel plots
## - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
##
# Timetrace plots
#
def _burst_info(d, ich, burst_index):
burst = d.mburst[ich][burst_index]
params = dict(
b_index=burst_index,
start_ms=float(burst.start) * d.clk_p * 1e3,
width_ms=float(burst.width) * d.clk_p * 1e3,
nt=d.nt[ich][burst_index],
nd=d.nd[ich][burst_index],
na=d.na[ich][burst_index],
E=d.E[ich][burst_index])
msg = ("[{b_index}]: w={width_ms:4.2f} ms\n"
"size=(T{nt:3.0f}, D{nd:3.0f}, A{na:3.0f}")
if d.alternated:
msg += ", AA{naa:3.0f}"
params['naa'] = d.naa[ich][burst_index]
msg += ")\n E={E:4.2%}"
if d.alternated:
msg += " S={S:4.2%}"
params['S'] = d.S[ich][burst_index]
return msg.format(**params)
def _plot_bursts(d, i, tmin_clk, tmax_clk, pmax=1e3, pmin=0, color="#999999",
ytext=20):
"""Highlights bursts in a timetrace plot."""
b = d.mburst[i]
if b.num_bursts == 0:
return
burst_mask = (tmin_clk < b.start) * (b.start < tmax_clk)
bs = b[burst_mask]
burst_indices = np.where(burst_mask)[0]
start = bs.start * d.clk_p
end = bs.stop * d.clk_p
R = []
width = end - start
ax = gca()
for b, bidx, s, w, sign, va in zip(bs, burst_indices, start, width,
cycle([-1, 1]),
cycle(['top', 'bottom'])):
r = Rectangle(xy=(s, pmin), height=pmax - pmin, width=w)
r.set_clip_box(ax.bbox)
r.set_zorder(0)
R.append(r)
ax.text(s, sign * ytext, _burst_info(d, i, bidx), fontsize=6, rotation=45,
horizontalalignment='center', va=va)
ax.add_artist(PatchCollection(R, lw=0, color=color))
def _plot_rate_th(d, i, F, ph_sel, invert=False, scale=1,
plot_style_={}, rate_th_style={}):
"""Plots background_rate*F as a function of time.
`plot_style_` is the style of a timetrace/ratetrace plot used as starting
style. Linestyle and label are changed. Finally, `rate_th_style` is
applied and can override any style property.
If rate_th_style_['label'] is 'auto' the label is generated from
plot_style_['label'] and F.
"""
if F is None:
F = d.F if F in d else 6
rate_th_style_ = dict(plot_style_)
rate_th_style_.update(linestyle='--', label='auto')
rate_th_style_.update(_normalize_kwargs(rate_th_style, kind='line2d'))
if rate_th_style_['label'] is 'auto':
rate_th_style_['label'] = 'bg_rate*%d %s' % \
(F, plot_style_['label'])
x_rate = np.hstack(d.Ph_p[i]) * d.clk_p
y_rate = F * np.hstack([(rate, rate) for rate in d.bg_from(ph_sel)[i]])
y_rate *= scale
if invert:
y_rate *= -1
plot(x_rate, y_rate, **rate_th_style_)
def _gui_timetrace_burst_sel(d, fig, ax):
"""Add GUI burst selector via mouse click to the current plot."""
global gui_status
if gui_status['first_plot_in_figure']:
gui_status['burst_sel'] = gs.MultiAxPointSelection(fig, ax, d)
else:
gui_status['burst_sel'].ax_list.append(ax)
def _gui_timetrace_scroll(fig):
"""Add GUI to scroll a timetrace wi a slider."""
global gui_status
if gui_status['first_plot_in_figure']:
gui_status['scroll_gui'] = ScrollingToolQT(fig)
def timetrace_single(d, i=0, binwidth=1e-3, bins=None, tmin=0, tmax=200,
ph_sel=Ph_sel('all'), invert=False, bursts=False,
burst_picker=True, scroll=False, cache_bins=True,
plot_style=None, show_rate_th=True, F=None,
rate_th_style={}, set_ax_limits=True,
burst_color='#BBBBBB'):
"""Plot the timetrace (histogram) of timestamps for a photon selection.
See :func:`timetrace` to plot multiple photon selections (i.e.
Donor and Acceptor photons) in one step.
"""
if tmax is None or tmax < 0 or tmax > d.time_max:
tmax = d.time_max
def _get_cache():
return (timetrace_single.bins, timetrace_single.x,
timetrace_single.binwidth,
timetrace_single.tmin, timetrace_single.tmax)
def _set_cache(bins, x, binwidth, tmin, tmax):
cache = dict(bins=bins, x=x, binwidth=binwidth, tmin=tmin, tmax=tmax)
for name, value in cache.items():
setattr(timetrace_single, name, value)
def _del_cache():
names = ['bins', 'x', 'binwidth', 'tmin', 'tmax']
for name in names:
delattr(timetrace_single, name)
def _has_cache():
return hasattr(timetrace_single, 'bins')
def _has_cache_for(binwidth, tmin, tmax):
if _has_cache():
return (binwidth, tmin, tmax) == _get_cache()[2:]
return False
# If cache_bins is False delete any previously saved attribute
if not cache_bins and _has_cache:
_del_cache()
tmin_clk, tmax_clk = tmin / d.clk_p, tmax / d.clk_p
binwidth_clk = binwidth / d.clk_p
# If bins is not passed try to use the cached one
if bins is None:
if cache_bins and _has_cache_for(binwidth, tmin, tmax):
bins, x = timetrace_single.bins, timetrace_single.x
else:
bins = np.arange(tmin_clk, tmax_clk + 1, binwidth_clk)
x = bins[:-1] * d.clk_p + 0.5 * binwidth
if cache_bins:
_set_cache(bins, x, binwidth, tmin, tmax)
# Compute histogram
ph_times = d.get_ph_times(i, ph_sel=ph_sel)
timetrace, _ = np.histogram(ph_times, bins=bins)
if invert:
timetrace *= -1
# Plot bursts
if bursts:
_plot_bursts(d, i, tmin_clk, tmax_clk, pmax=500, pmin=-500,
color=burst_color)
# Plot timetrace
plot_style_ = dict(linestyle='-', linewidth=1.2, marker=None)
if ph_sel in _ph_sel_color_dict:
plot_style_['color'] = _ph_sel_color_dict[ph_sel]
plot_style_['label'] = _ph_sel_label_dict[ph_sel]
else:
plot_style_['label'] = str(ph_sel)
plot_style_.update(_normalize_kwargs(plot_style, kind='line2d'))
plot(x, timetrace, **plot_style_)
# Plot burst-search rate-threshold
if show_rate_th and 'bg' in d:
_plot_rate_th(d, i, F=F, ph_sel=ph_sel, invert=invert,
scale=binwidth, plot_style_=plot_style_,
rate_th_style=rate_th_style)
plt.xlabel('Time (s)')
plt.ylabel('# ph')
if burst_picker and 'mburst' in d:
_gui_timetrace_burst_sel(d, gcf(), gca())
if scroll:
_gui_timetrace_scroll(gcf())
if set_ax_limits:
plt.xlim(tmin, tmin + 1)
if not invert:
plt.ylim(ymax=100)
else:
plt.ylim(ymin=-100)
_plot_status['timetrace_single'] = {'autoscale': False}
def timetrace(d, i=0, binwidth=1e-3, bins=None, tmin=0, tmax=200,
bursts=False, burst_picker=True, scroll=False,
show_rate_th=True, F=None, rate_th_style={'label': None},
show_aa=True, legend=False, set_ax_limits=True,
burst_color='#BBBBBB', plot_style=None,
#dd_plot_style={}, ad_plot_style={}, aa_plot_style={}
):
"""Plot the timetraces (histogram) of photon timestamps.
Arguments:
d (Data object): the measurement's data to plot.
i (int): the channel to plot. Default 0.
binwidth (float): the bin width (seconds) of the timetrace histogram.
bins (array or None): If not None, defines the bin edges for the
timetrace (overriding `binwidth`). If None, `binwidth` is use
to generate uniform bins.
tmin, tmax (float): min and max time (seconds) to include in the
timetrace. Note that a long time range and a small `binwidth`
can require a significant amount of memory.
bursts (bool): if True, plot the burst start-stop times.
burst_picker (bool): if True, enable the ability to click on bursts
to obtain burst info. This function requires the matplotlib's QT
backend.
scroll (bool): if True, activate a scrolling bar to quickly scroll
through the timetrace. This function requires the matplotlib's QT
backend.
show_rate_th (bool): if True, plot the burst search threshold rate.
F (bool): if `show_rate` is True, show a rate `F` times larger
than the background rate.
rate_th_style (dict): matplotlib style for the rate line.
show_aa (bool): if True, plot a timetrace for the AexAem photons.
If False (default), plot timetraces only for DexDem and DexAem
streams.
legend (bool): whether to show the legend or not.
set_ax_limits (bool): if True, set the xlim to zoom on a small portion
of timetrace. If False, do not set the xlim, display the full
timetrace.
burst_color (string): string containing the the HEX RGB color to use
to highlight the burst regions.
plot_style (dict): matplotlib's style for the timetrace lines.
"""
# Plot bursts
if bursts:
tmin_clk, tmax_clk = tmin / d.clk_p, tmax / d.clk_p
_plot_bursts(d, i, tmin_clk, tmax_clk, pmax=500, pmin=-500,
color=burst_color)
# Plot multiple timetraces
ph_sel_list = [Ph_sel(Dex='Dem'), Ph_sel(Dex='Aem')]
invert_list = [False, True]
burst_picker_list = [burst_picker, False]
scroll_list = [scroll, False]
if d.alternated and show_aa:
ph_sel_list.append(Ph_sel(Aex='Aem'))
invert_list.append(True)
burst_picker_list.append(False)
scroll_list.append(False)
for ix, (ph_sel, invert) in enumerate(zip(ph_sel_list, invert_list)):
if not bl.mask_empty(d.get_ph_mask(i, ph_sel=ph_sel)):
timetrace_single(
d, i, binwidth=binwidth, bins=bins, tmin=tmin,
tmax=tmax, ph_sel=ph_sel, invert=invert, bursts=False,
burst_picker=burst_picker_list[ix],
scroll=scroll_list[ix], cache_bins=True,
show_rate_th=show_rate_th, F=F,
rate_th_style=rate_th_style, set_ax_limits=set_ax_limits,
plot_style=plot_style)
if legend:
plt.legend(loc='best', fancybox=True)
def ratetrace_single(d, i=0, m=None, max_num_ph=1e6, tmin=0, tmax=200,
ph_sel=Ph_sel('all'), invert=False, bursts=False,
burst_picker=True, scroll=False, plot_style={},
show_rate_th=True, F=None, rate_th_style={},
set_ax_limits=True, burst_color='#BBBBBB'):
"""Plot the ratetrace of timestamps for a photon selection.
See :func:`ratetrace` to plot multiple photon selections (i.e.
Donor and Acceptor photons) in one step.
"""
if tmax is None or tmax < 0:
tmax = d.time_max
if m is None:
m = d.m if m in d else 10
tmin_clk, tmax_clk = tmin / d.clk_p, tmax / d.clk_p
# Plot bursts
if bursts:
_plot_bursts(d, i, tmin_clk, tmax_clk, pmax=500, pmin=-500,
color=burst_color)
# Compute ratetrace
ph_times = d.get_ph_times(i, ph_sel=ph_sel)
iph1 = np.searchsorted(ph_times, tmin_clk)
iph2 = np.searchsorted(ph_times, tmax_clk)
if iph2 - iph1 > max_num_ph:
iph2 = iph1 + max_num_ph
tmax = ph_times[iph2] * d.clk_p
warnings.warn(('Max number of photons reached in ratetrace_single().'
'\n tmax is reduced to %ds. To plot a wider '
'time range increase `max_num_ph`.') % tmax,
UserWarning)
ph_times = ph_times[iph1:iph2]
rates = 1e-3 * phrates.mtuple_rates(ph_times, m) / d.clk_p
if invert:
rates *= -1
times = phrates.mtuple_rates_t(ph_times, m) * d.clk_p
# Plot ratetrace
plot_style_ = dict(linestyle='-', linewidth=1.2, marker=None)
if ph_sel in _ph_sel_color_dict:
plot_style_['color'] = _ph_sel_color_dict[ph_sel]
plot_style_['label'] = _ph_sel_label_dict[ph_sel]
plot_style_.update(_normalize_kwargs(plot_style, kind='line2d'))
plot(times, rates, **plot_style_)
# Plot burst-search rate-threshold
if show_rate_th and 'bg' in d:
_plot_rate_th(d, i, F=F, scale=1e-3, ph_sel=ph_sel, invert=invert,
plot_style_=plot_style_, rate_th_style=rate_th_style)
plt.xlabel('Time (s)')
plt.ylabel('Rate (kcps)')
if burst_picker:
_gui_timetrace_burst_sel(d, gcf(), gca())
if scroll:
_gui_timetrace_scroll(gcf())
if set_ax_limits:
plt.xlim(tmin, tmin + 1)
if not invert:
plt.ylim(ymax=100)
else:
plt.ylim(ymin=-100)
_plot_status['ratetrace_single'] = {'autoscale': False}
def ratetrace(d, i=0, m=None, max_num_ph=1e6, tmin=0, tmax=200,
bursts=False, burst_picker=True, scroll=False,
show_rate_th=True, F=None, rate_th_style={'label': None},
show_aa=True, legend=False, set_ax_limits=True,
#dd_plot_style={}, ad_plot_style={}, aa_plot_style={}
burst_color='#BBBBBB'):
"""Plot the rate timetraces of photon timestamps.
Arguments:
d (Data object): the measurement's data to plot.
i (int): the channel to plot. Default 0.
max_num_ph (int): Clip the rate timetrace after the
max number of photons `max_num_ph` is reached.
tmin, tmax (float): min and max time (seconds) to include in the
timetrace. Note that a long time range and a small `binwidth`
can require a significant amount of memory.
bursts (bool): if True, plot the burst start-stop times.
burst_picker (bool): if True, enable the ability to click on bursts
to obtain burst info. This function requires the matplotlib's QT
backend.
scroll (bool): if True, activate a scrolling bar to quickly scroll
through the timetrace. This function requires the matplotlib's QT
backend.
show_rate_th (bool): if True, plot the burst search threshold rate.
F (bool): if `show_rate` is True, show a rate `F` times larger
than the background rate.
rate_th_style (dict): matplotlib style for the rate line.
show_aa (bool): if True, plot a timetrace for the AexAem photons.
If False (default), plot timetraces only for DexDem and DexAem
streams.
legend (bool): whether to show the legend or not.
set_ax_limits (bool): if True, set the xlim to zoom on a small portion
of timetrace. If False, do not set the xlim, display the full
timetrace.
burst_color (string): string containing the the HEX RGB color to use
to highlight the burst regions.
"""
# Plot bursts
if bursts:
tmin_clk, tmax_clk = tmin / d.clk_p, tmax / d.clk_p
_plot_bursts(d, i, tmin_clk, tmax_clk, pmax=500, pmin=-500,
color=burst_color)
# Plot multiple timetraces
ph_sel_list = [Ph_sel(Dex='Dem'), Ph_sel(Dex='Aem')]
invert_list = [False, True]
burst_picker_list = [burst_picker, False]
scroll_list = [scroll, False]
if d.alternated and show_aa:
ph_sel_list.append(Ph_sel(Aex='Aem'))
invert_list.append(True)
burst_picker_list.append(False)
scroll_list.append(False)
for ix, (ph_sel, invert) in enumerate(zip(ph_sel_list, invert_list)):
if not bl.mask_empty(d.get_ph_mask(i, ph_sel=ph_sel)):
ratetrace_single(
d, i, m=m, max_num_ph=max_num_ph, tmin=tmin,
tmax=tmax, ph_sel=ph_sel, invert=invert, bursts=False,
burst_picker=burst_picker_list[ix],
scroll=scroll_list[ix],
show_rate_th=show_rate_th, F=F,
rate_th_style=rate_th_style, set_ax_limits=set_ax_limits)
if legend:
plt.legend(loc='best', fancybox=True)
def sort_burst_sizes(sizes, levels=np.arange(1, 102, 20)):
"""Return a list of masks that split `sizes` in levels.
Used by timetrace_fret to select burst based on size groups.
"""
masks = []
for level1, level2 in zip(levels[:-1], levels[1:]):
masks.append((sizes >= level1)*(sizes < level2))
masks.append(sizes >= level2)
return masks
def timetrace_fret(d, i=0, gamma=1., **kwargs):
"""Timetrace of burst FRET vs time. Uses `plot`."""
b = d.mburst[i]
bsizes = bl.select_bursts.get_burst_size(d, ich=i, gamma=gamma)
style_kwargs = dict(marker='o', mew=0.5, color=blue, mec='grey',
alpha=0.4, ls='')
style_kwargs.update(**kwargs)
t, E = b.start*d.clk_p, d.E[i]
levels = sort_burst_sizes(bsizes)
for ilev, level in enumerate(levels):
plt.plot(t[level], E[level], ms=np.sqrt((ilev+1)*15),
**style_kwargs)
plt.plot(b.start*d.clk_p, d.E[i], '-k', alpha=0.1, lw=1)
xlabel('Time (s)'); ylabel('E')
_gui_timetrace_burst_sel(d, i, timetrace_fret, gcf(), gca())
def timetrace_fret_scatter(d, i=0, gamma=1., **kwargs):
"""Timetrace of burst FRET vs time. Uses `scatter` (slow)."""
b = d.mburst[i]
bsizes = d.burst_sizes_ich(d, ich=i, gamma=gamma)
style_kwargs = dict(s=bsizes, marker='o', alpha=0.5)
style_kwargs.update(**kwargs)
plt.scatter(b.start*d.clk_p, d.E[i], **style_kwargs)
xlabel('Time (s)'); ylabel('E')
def timetrace_bg(d, i=0, nolegend=False, ncol=2, plot_style={}, show_da=False):
"""Timetrace of background rates."""
bg = d.bg_from(Ph_sel('all'))
bg_dd = d.bg_from(Ph_sel(Dex='Dem'))
bg_ad = d.bg_from(Ph_sel(Dex='Aem'))
t = arange(bg[i].size) * d.bg_time_s
plot_style_ = dict(linewidth=2, marker='o', markersize=6)
plot_style_.update(_normalize_kwargs(plot_style, kind='line2d'))
label = "T: %d cps" % d.bg_mean[Ph_sel('all')][i]
plot(t, 1e-3 * bg[i], color='k', label=label, **plot_style_)
label = "DD: %d cps" % d.bg_mean[Ph_sel(Dex='Dem')][i]
plot(t, 1e-3 * bg_dd[i], color=green, label=label, **plot_style_)
label = "AD: %d cps" % d.bg_mean[Ph_sel(Dex='Aem')][i]
plot(t, 1e-3 * bg_ad[i], color=red, label=label, **plot_style_)
if d.alternated:
bg_aa = d.bg_from(Ph_sel(Aex='Aem'))
label = "AA: %d cps" % d.bg_mean[Ph_sel(Aex='Aem')][i]
plot(t, 1e-3 * bg_aa[i], label=label, color=purple, **plot_style_)
if show_da:
bg_da = d.bg_from(Ph_sel(Aex='Dem'))
label = "DA: %d cps" % d.bg_mean[Ph_sel(Aex='Dem')][i]
plot(t, 1e-3 * bg_da[i], label=label,
color=_ph_sel_color_dict[Ph_sel(Aex='Dem')], **plot_style_)
if not nolegend:
legend(loc='best', frameon=False, ncol=ncol)
plt.xlabel("Time (s)")
plt.ylabel("BG rate (kcps)")
plt.grid(True)
plt.ylim(ymin=0)
def timetrace_b_rate(d, i=0):
"""Timetrace of bursts-per-second in each period."""
t = arange(d.bg[i].size)*d.bg_time_s
b_rate = r_[[(d.bp[i] == p).sum() for p in range(d.bp[i].max()+1)]]
b_rate /= d.bg_time_s
if t.size == b_rate.size+1:
t = t[:-1] # assuming last period without bursts
else:
assert t.size == b_rate.size
plot(t, b_rate, lw=2, label="CH%d" % (i+1))
legend(loc='best', fancybox=True, frameon=False, ncol=3)
xlabel("Time (s)"); ylabel("Burst per second"); grid(True)
plt.ylim(ymin=0)
def time_ph(d, i=0, num_ph=1e4, ph_istart=0):
"""Plot 'num_ph' ph starting at 'ph_istart' marking burst start/end.
TODO: Update to use the new matplotlib eventplot.
"""
b = d.mburst[i]
SLICE = slice(ph_istart, ph_istart+num_ph)
ph_d = d.ph_times_m[i][SLICE][~d.A_em[i][SLICE]]
ph_a = d.ph_times_m[i][SLICE][d.A_em[i][SLICE]]
BSLICE = (b.stop < ph_a[-1])
start, end = b[BSLICE].start, b[BSLICE].stop
u = d.clk_p # time scale
plt.vlines(ph_d*u, 0, 1, color='k', alpha=0.02)
plt.vlines(ph_a*u, 0, 1, color='k', alpha=0.02)
plt.vlines(start*u, -0.5, 1.5, lw=3, color=green, alpha=0.5)
plt.vlines(end*u, -0.5, 1.5, lw=3, color=red, alpha=0.5)
xlabel("Time (s)")
##
# Histogram plots
#
def _bins_array(bins):
"""When `bins` is a 3-element sequence returns an array of bin edges.
Otherwise returns the `bins` unchaged.
"""
if np.size(bins) == 3:
bins = np.arange(*bins)
return bins
def _hist_burst_taildist(data, bins, pdf, weights=None, yscale='log',
color=None, label=None, plot_style=None, vline=None):
hist = HistData(*np.histogram(data[~np.isnan(data)],
bins=_bins_array(bins), weights=weights))
ydata = hist.pdf if pdf else hist.counts
default_plot_style = dict(marker='o')
if plot_style is None:
plot_style = {}
if color is not None:
plot_style['color'] = color
if label is not None:
plot_style['label'] = label
default_plot_style.update(_normalize_kwargs(plot_style, kind='line2d'))
plt.plot(hist.bincenters, ydata, **default_plot_style)
if vline is not None:
plt.axvline(vline, ls='--')
plt.yscale(yscale)
if pdf:
plt.ylabel('PDF')
else:
plt.ylabel('# Bursts')
def hist_width(d, i=0, bins=(0, 10, 0.025), pdf=True, weights=None,
yscale='log', color=None, plot_style=None, vline=None):
"""Plot histogram of burst durations.
Parameters:
d (Data): Data object
i (int): channel index
bins (array or None): array of bin edges. If len(bins) == 3
then is interpreted as (start, stop, step) values.
pdf (bool): if True, normalize the histogram to obtain a PDF.
color (string or tuple or None): matplotlib color used for the plot.
yscale (string): 'log' or 'linear', sets the plot y scale.
plot_style (dict): dict of matplotlib line style passed to `plot`.
vline (float): If not None, plot vertical line at the specified x
position.
"""
weights = weights[i] if weights is not None else None
burst_widths = d.mburst[i].width * d.clk_p * 1e3
_hist_burst_taildist(burst_widths, bins, pdf, weights=weights, vline=vline,
yscale=yscale, color=color, plot_style=plot_style)
plt.xlabel('Burst width (ms)')
plt.xlim(xmin=0)
def hist_brightness(d, i=0, bins=(0, 60, 1), pdf=True, weights=None,
yscale='log', gamma=1, add_naa=False, beta=1.,
donor_ref=True, add_aex=True, aex_corr=True,
label_prefix=None, color=None, plot_style=None, vline=None):
"""Plot histogram of burst brightness, i.e. burst size / duration.
Parameters:
d (Data): Data object
i (int): channel index
bins (array or None): array of bin edges. If len(bins) == 3
then is interpreted as (start, stop, step) values.
gamma, beta (floats): factors used to compute the corrected burst
size. See :meth:`fretbursts.burstlib.Data.burst_sizes_ich`.
add_naa (bool): if True, include `naa` to the total burst size.
donor_ref (bool): convention used for corrected burst size computation.
See :meth:`fretbursts.burstlib.Data.burst_sizes_ich` for details.
add_aex (bool): *PAX-only*. Whether to add signal from Aex laser period
to the burst size. Default True.
See :meth:`fretbursts.burstlib.Data.burst_sizes_pax_ich`.
aex_corr (bool): *PAX-only*. If True, do duty-cycle correction
when adding the DAexAem term `naa`.
See :meth:`fretbursts.burstlib.Data.burst_sizes_pax_ich`.
label_prefix (string or None): a custom prefix for the legend label.
color (string or tuple or None): matplotlib color used for the plot.
pdf (bool): if True, normalize the histogram to obtain a PDF.
yscale (string): 'log' or 'linear', sets the plot y scale.
plot_style (dict): dict of matplotlib line style passed to `plot`.
vline (float): If not None, plot vertical line at the specified x
position.
"""
weights = weights[i] if weights is not None else None
if plot_style is None:
plot_style = {}
burst_widths = d.mburst[i].width * d.clk_p * 1e3
sizes, label = _get_sizes_and_formula(
d=d, ich=i, gamma=gamma, beta=beta, donor_ref=donor_ref,
add_naa=add_naa, add_aex=add_aex, aex_corr=aex_corr)
brightness = sizes / burst_widths
label = '$(' + label[1:-1] + ') / w$'
if label_prefix is not None:
label = label_prefix + ' ' + label
# Use default label (with optional prefix) only if not explicitly
# specified in `plot_style`
if 'label' not in plot_style:
plot_style['label'] = label
_hist_burst_taildist(brightness, bins, pdf, weights=weights, vline=vline,
yscale=yscale, color=color, plot_style=plot_style)
plt.xlabel('Burst brightness (kHz)')
plt.legend(loc='best')
def _get_sizes_and_formula(d, ich, gamma, beta, donor_ref, add_naa,
ph_sel, naa_aexonly, naa_comp, na_comp):
label = ('${FD} + {FA}/\\gamma$'
if donor_ref else '$\\gamma {FD} + {FA}$')
kws = dict(gamma=gamma, beta=beta, donor_ref=donor_ref)
if 'PAX' in d.meas_type and ph_sel is not None:
kws_pax = dict(ph_sel=ph_sel, naa_aexonly=naa_aexonly,
naa_comp=naa_comp, na_comp=na_comp)
sizes = d.burst_sizes_pax_ich(ich=ich, **dict(kws, **kws_pax))
label = '$ %s $' % d._burst_sizes_pax_formula(**dict(kws, **kws_pax))
else:
sizes = d.burst_sizes_ich(ich=ich, add_naa=add_naa, **kws)
label = label.format(FD='n_d', FA='n_a')
if add_naa:
corr = '(\\gamma\\beta) ' if donor_ref else '\\beta '
label = label[:-1] + ' + n_{aa} / %s$' % corr
return sizes, label
def hist_size(d, i=0, which='all', bins=(0, 600, 4), pdf=False, weights=None,
yscale='log', gamma=1, beta=1, donor_ref=True, add_naa=False,
ph_sel=None, naa_aexonly=False, naa_comp=False, na_comp=False,
vline=None, label_prefix=None, legend=True, color=None,
plot_style=None):
"""Plot histogram of "burst sizes", according to different definitions.
Arguments:
d (Data): Data object
i (int): channel index
bins (array or None): array of bin edges. If len(bins) == 3
then is interpreted as (start, stop, step) values.
which (string): what photons to include in "size". Valid values are
'all', 'nd', 'na', 'naa'. When 'all', sizes are computed with
`d.burst_sizes()` (by default nd + na); 'nd', 'na', 'naa' get
counts from `d.nd`, `d.na`, `d.naa` (respectively Dex-Dem,
Dex-Aem, Aex-Aem).
gamma, beta (floats): factors used to compute the corrected burst
size. Ignored when `which` != 'all'.
See :meth:`fretbursts.burstlib.Data.burst_sizes_ich`.
add_naa (bool): if True, include `naa` to the total burst size.
donor_ref (bool): convention used for corrected burst size computation.
See :meth:`fretbursts.burstlib.Data.burst_sizes_ich` for details.
na_comp (bool): **[PAX-only]** If True, multiply the `na` term
by `(1 + Wa/Wd)`, where Wa and Wd are the D and A alternation
durations (typically Wa/Wd = 1).
naa_aexonly (bool): **[PAX-only]** if True, the `naa` term is
corrected to include only A emission due to A excitation.
If False, the `naa` term includes all the counts in DAexAem.
The `naa` term also depends on the `naa_comp` argument.
naa_comp (bool): **[PAX-only]** If True, multiply the `naa` term by
`(1 + Wa/Wd)` where Wa and Wd are the D and A alternation
durations (typically Wa/Wd = 1). The `naa` term also depends on
the `naa_aexonly` argument.
label_prefix (string or None): a custom prefix for the legend label.
color (string or tuple or None): matplotlib color used for the plot.
pdf (bool): if True, normalize the histogram to obtain a PDF.
yscale (string): 'log' or 'linear', sets the plot y scale.
legend (bool): if True add legend to plot
plot_style (dict): dict of matplotlib line style passed to `plot`.
vline (float): If not None, plot vertical line at the specified x
position.
See also:
- :meth:`fretbursts.burstlib.Data.burst_sizes_ich`.
- :meth:`fretbursts.burstlib.Data.burst_sizes_pax_ich`.
"""
weights = weights[i] if weights is not None else None
if plot_style is None:
plot_style = {}
which_dict = {'all': 'k', 'nd': green, 'na': red, 'naa': purple,
'nar': red, 'nda': 'C0'}
assert which in which_dict
if which == 'all':
sizes, label = _get_sizes_and_formula(
d=d, ich=i, gamma=gamma, beta=beta, donor_ref=donor_ref,
add_naa=add_naa, ph_sel=ph_sel, naa_aexonly=naa_aexonly,
naa_comp=naa_comp, na_comp=na_comp)
else:
sizes = d[which][i]
label = which
# Use default label (with optional prefix) only if not explicitly
# specified in `plot_style`
if 'label' not in plot_style:
if label_prefix is not None:
label = label_prefix + ' ' + label
plot_style['label'] = label
# Use default color only if not specified in `color` or `plot_style`
if color is None and 'color' not in plot_style:
plot_style['color'] = which_dict[which]
elif color is not None:
plot_style['color'] = color
_hist_burst_taildist(sizes, bins, pdf, weights=weights, yscale=yscale,
plot_style=plot_style, vline=vline)
plt.xlabel('Burst size')
if legend:
plt.legend(loc='upper right')
def hist_size_all(d, i=0, **kwargs):
"""Plot burst sizes for all the combinations of photons.
Calls :func:`hist_size` multiple times with different `which` parameters.
"""
fields = ['nd', 'na']
if d.ALEX:
fields.append('naa')
elif 'PAX' in d.meas_type:
fields += ['nda', 'naa']
for which in fields:
hist_size(d, i, which=which, **kwargs)
def _fitted_E_plot(d, i=0, F=1, no_E=False, ax=None, show_model=True,
verbose=False, two_gauss_model=False, lw=2.5, color='k',
alpha=0.5, fillcolor=None):
"""Plot a fitted model overlay on a FRET histogram."""
if ax is None:
ax2 = gca()
else:
ax2 = plt.twinx(ax=ax)
ax2.grid(False)
if d.fit_E_curve and show_model:
x = r_[-0.2:1.21:0.002]
y = d.fit_E_model(x, d.fit_E_res[i, :])
scale = F*d.fit_E_model_F[i]
if two_gauss_model:
assert d.fit_E_res.shape[1] > 2
if d.fit_E_res.shape[1] == 5:
m1, s1, m2, s2, a1 = d.fit_E_res[i, :]
a2 = (1-a1)
elif d.fit_E_res.shape[1] == 6:
m1, s1, a1, m2, s2, a2 = d.fit_E_res[i, :]
y1 = a1*normpdf(x, m1, s1)
y2 = a2*normpdf(x, m2, s2)
ax2.plot(x, scale*y1, ls='--', lw=lw, alpha=alpha, color=color)
ax2.plot(x, scale*y2, ls='--', lw=lw, alpha=alpha, color=color)
if fillcolor is None:
ax2.plot(x, scale*y, lw=lw, alpha=alpha, color=color)
else:
ax2.fill_between(x, scale*y, lw=lw, alpha=alpha, edgecolor=color,
facecolor=fillcolor, zorder=10)
if verbose:<|fim▁hole|> xtext = 0.6 if d.E_fit[i] < 0.6 else 0.2
if d.nch > 1 and not no_E:
ax2.text(xtext, 0.81, "CH%d: $E_{fit} = %.3f$" % (i+1, d.E_fit[i]),
transform=gca().transAxes, fontsize=16,
bbox=dict(boxstyle='round', facecolor='#dedede', alpha=0.5))
def hist_burst_data(
d, i=0, data_name='E', ax=None, binwidth=0.03, bins=None,
vertical=False, pdf=False, hist_style='bar',
weights=None, gamma=1., add_naa=False, # weights args
show_fit_stats=False, show_fit_value=False, fit_from='kde',
show_kde=False, bandwidth=0.03, show_kde_peak=False, # kde args
show_model=False, show_model_peaks=True,
hist_bar_style=None, hist_plot_style=None, model_plot_style=None,
kde_plot_style=None, verbose=False):
"""Plot burst_data (i.e. E, S, etc...) histogram and KDE.
This a generic function to plot histograms for any burst data.
In particular this function is called by :func:`hist_fret` and
:func:`hist_S` to make E and S histograms respectively.
Histograms and KDE can be plotted on any `Data` variable after
burst search. To show a model, a model must be fitted first by calling
`d.E_fitter.fit_histogram()`. To show the KDE peaks position, they
must be computed first with `d.E_fitter.find_kde_max()`.
The arguments are shown below grouped in logical sections.
**Generic arguments**
Args:
data_name (string): name of the burst data (i.e. 'E' or 'S')
ax (None or matplotlib axis): optional axis instance to plot in.
vertical (bool): if True the x axis is oriented vertically.
verbose (bool): if False, suppress any printed output.
**Histogram arguments**: control the histogram appearance
Args:
hist_style (string): if 'bar' use a classical bar histogram,
otherwise do a normal line plot of bin counts vs bin centers
bins (None or array): if None the bins are computed according to
`binwidth`. If not None contains the arrays of bin edges
and overrides `binwidth`.
binwidth (float): bin width for the histogram.
pdf (bool): if True, normalize the histogram to obtain a PDF.
hist_bar_style (dict): style dict for the histogram when
`hist_style == 'bar'`.
hist_plot_style (dict): style dict for the histogram when
`hist_style != 'bar'`.
**Model arguments**: control the model plot
Args:
show_model (bool): if True shows the model fitted to the histogram
model (lmfit.Model object or None): lmfit Model used for histogram
fitting. If None the histogram is not fitted.
show_model_peaks (bool): if True marks the position of model peaks
model_plot_style (dict): style dict for the model plot
**KDE arguments**: control the KDE plot
Args:
show_kde (bool): if True shows the KDE curve
show_kde_peak (bool): if True marks the position of the KDE peak
bandwidth (float or None): bandwidth used to compute the KDE
If None the KDE is not computed.
kde_plot_style (dict): style dict for the KDE curve
**Weights arguments** (weights are used to weight bursts according to
their size, affecting histograms and KDEs).
Args:
weights (string or None): kind of burst-size weights.
See :func:`fretbursts.fret_fit.get_weights`.
gamma (float): gamma factor passed to `get_weights()`.
add_naa (bool): if True adds `naa` to the burst size.
**Fit text arguments**: control how to print annotation with
fit information.
Args:
fit_from (string): determines how to obtain the fit value. If 'kde'
the fit value is the KDE peak. Otherwise it must be the name
of a model parameter that will be used as fit value.
show_fit_value (bool): if True annotate the plot with fit value.
show_fit_stats (bool): if True annotate the figure with mean fit
value and max deviation across the channels (for multi-spot).
"""
assert data_name in d
fitter_name = data_name + '_fitter'
if ax is None:
ax = gca()
ax.set_axisbelow(True)
pline = ax.axhline if vertical else ax.axvline
bar = ax.barh if vertical else ax.bar
xlabel, ylabel = ax.set_xlabel, ax.set_ylabel
xlim, ylim = ax.set_xlim, ax.set_ylim
if vertical:
xlabel, ylabel = ylabel, xlabel
xlim, ylim = ylim, xlim
weights_tuple = (weights, float(gamma), add_naa)
if not hasattr(d, fitter_name) or _is_list_of_arrays(weights) \
or getattr(d, data_name+'_weights') != weights_tuple:
if hasattr(d, fitter_name):
print(' - Overwriting the old %s object with the new weights.' %
fitter_name)
if verbose:
print(' Old weights:', getattr(d, data_name+'_weights'))
print(' New weights:', weights_tuple)
bext.bursts_fitter(d, burst_data=data_name, weights=weights,
gamma=gamma, add_naa=add_naa)
# fitter_name is only an attribute of Data, not a key in the dictionary
fitter = getattr(d, fitter_name)
fitter.histogram(binwidth=binwidth, bins=bins, verbose=verbose)
if pdf:
ylabel('PDF')
hist_vals = fitter.hist_pdf[i]
else:
ylabel('# Bursts')
hist_vals = fitter.hist_counts[i]
xlabel(data_name)
if data_name in ['E', 'S']:
xlim(-0.19, 1.19)
hist_bar_style_ = dict(facecolor='#74a9cf', edgecolor='k', alpha=1,
linewidth=0.15, label='E Histogram')
hist_bar_style_.update(**_normalize_kwargs(hist_bar_style))
hist_plot_style_ = dict(linestyle='-', marker='o', markersize=6,
linewidth=2, alpha=0.6, label='E Histogram')
hist_plot_style_.update(_normalize_kwargs(hist_plot_style,
kind='line2d'))
if hist_style == 'bar':
bar(fitter.hist_bins[:-1], hist_vals, fitter.hist_binwidth,
align='edge', **hist_bar_style_)
else:
if vertical:
ax.plot(hist_vals, fitter.hist_axis, **hist_plot_style_)
else:
ax.plot(fitter.hist_axis, hist_vals, **hist_plot_style_)
if show_model or show_kde:
if pdf:
scale = 1
else:
scale = fitter.hist_binwidth * d.num_bursts[i]
if show_model:
model_plot_style_ = dict(color='k', alpha=0.8, label='Model')
model_plot_style_.update(_normalize_kwargs(model_plot_style,
kind='line2d'))
fit_res = fitter.fit_res[i]
x = fitter.x_axis
y = fit_res.model.eval(x=x, **fit_res.values)
xx, yy = (y, x) if vertical else (x, y)
ax.plot(xx, yy, **model_plot_style_)
if fit_res.model.components is not None:
for component in fit_res.model.components:
model_plot_style_.update(ls='--', label='Model component')
y = component.eval(x=x, **fit_res.values)
xx, yy = (y, x) if vertical else (x, y)
ax.plot(xx, yy, **model_plot_style_)
if show_model_peaks:
for param in fitter.params:
if param.endswith('center'):
pline(fitter.params[param][i], ls='--', color=red)
if show_kde:
x = fitter.x_axis
fitter.calc_kde(bandwidth=bandwidth)
kde_plot_style_ = dict(linewidth=1.5, color='k', alpha=0.8,
label='KDE')
kde_plot_style_.update(_normalize_kwargs(kde_plot_style,
kind='line2d'))
y = scale * fitter.kde[i](x)
xx, yy = (y, x) if vertical else (x, y)
ax.plot(xx, yy, **kde_plot_style_)
if show_kde_peak:
pline(fitter.kde_max_pos[i], ls='--', color='orange')
if show_fit_value or show_fit_stats:
if fit_from == 'kde':
fit_arr = fitter.kde_max_pos
else:
assert fit_from in fitter.params
fit_arr = fitter.params[fit_from]
if i == 0:
if show_fit_stats:
plt.figtext(0.4, 0.01, _get_fit_text_stats(fit_arr),
fontsize=16)
if show_fit_value:
_plot_fit_text_ch(fit_arr, i, ax=ax)
def hist_fret(
d, i=0, ax=None, binwidth=0.03, bins=None, pdf=True,
hist_style='bar',
weights=None, gamma=1., add_naa=False, # weights args
show_fit_stats=False, show_fit_value=False, fit_from='kde',
show_kde=False, bandwidth=0.03, show_kde_peak=False, # kde args
show_model=False, show_model_peaks=True,
hist_bar_style=None, hist_plot_style=None, model_plot_style=None,
kde_plot_style=None, verbose=False):
"""Plot FRET histogram and KDE.
The most used argument is `binwidth` that sets the histogram bin width.
For detailed documentation see :func:`hist_burst_data`.
"""
hist_burst_data(
d, i, data_name='E', ax=ax, binwidth=binwidth, bins=bins,
pdf=pdf, weights=weights, gamma=gamma, add_naa=add_naa,
hist_style=hist_style, show_fit_stats=show_fit_stats,
show_fit_value=show_fit_value, fit_from=fit_from,
show_kde=show_kde, bandwidth=bandwidth,
show_kde_peak=show_kde_peak, # kde args
show_model=show_model, show_model_peaks=show_model_peaks,
hist_bar_style=hist_bar_style, hist_plot_style=hist_plot_style,
model_plot_style=model_plot_style, kde_plot_style=kde_plot_style,
verbose=verbose)
def hist_S(
d, i=0, ax=None, binwidth=0.03, bins=None, pdf=True,
hist_style='bar',
weights=None, gamma=1., add_naa=False, # weights args
show_fit_stats=False, show_fit_value=False, fit_from='kde',
show_kde=False, bandwidth=0.03, show_kde_peak=False, # kde args
show_model=False, show_model_peaks=True,
hist_bar_style=None, hist_plot_style=None, model_plot_style=None,
kde_plot_style=None, verbose=False):
"""Plot S histogram and KDE.
The most used argument is `binwidth` that sets the histogram bin width.
For detailed documentation see :func:`hist_burst_data`. """
hist_burst_data(
d, i, data_name='S', ax=ax, binwidth=binwidth, bins=bins,
pdf=pdf, weights=weights, gamma=gamma, add_naa=add_naa,
hist_style=hist_style, show_fit_stats=show_fit_stats,
show_fit_value=show_fit_value, fit_from=fit_from,
show_kde=show_kde, bandwidth=bandwidth,
show_kde_peak=show_kde_peak, # kde args
show_model=show_model, show_model_peaks=show_model_peaks,
hist_bar_style=hist_bar_style, hist_plot_style=hist_plot_style,
model_plot_style=model_plot_style, kde_plot_style=kde_plot_style,
verbose=verbose)
def _get_fit_text_stats(fit_arr, pylab=True):
"""Return a formatted string for mean E and max delta-E."""
delta = (fit_arr.max() - fit_arr.min())*100
fit_text = r'\langle{E}_{fit}\rangle = %.3f \qquad ' % fit_arr.mean()
fit_text += r'\Delta E_{fit} = %.2f \%%' % delta
if pylab: fit_text = r'$'+fit_text+r'$'
return fit_text
def _plot_fit_text_ch(
fit_arr, ich, fmt_str="CH%d: $E_{fit} = %.3f$", ax=None,
bbox=dict(boxstyle='round', facecolor='#dedede', alpha=0.5),
xtext_low=0.2, xtext_high=0.6, fontsize=16):
"""Plot a text box with ch and fit value."""
if ax is None: ax = gca()
xtext = xtext_high if fit_arr[ich] < xtext_high else xtext_low
ax.text(xtext, 0.81, fmt_str % (ich+1, fit_arr[ich]),
transform=ax.transAxes, fontsize=fontsize, bbox=bbox)
def hist2d_alex(d, i=0, vmin=2, vmax=0, binwidth=0.05, S_max_norm=0.8,
interp='bicubic', cmap='hot', under_color='white',
over_color='white', scatter=True, scatter_ms=3,
scatter_color='orange', scatter_alpha=0.2, gui_sel=False,
cbar_ax=None, grid_color='#D0D0D0'):
"""Plot 2-D E-S ALEX histogram with a scatterplot overlay.
"""
ax = plt.gca()
d._calc_alex_hist(binwidth)
ES_hist, E_bins, S_bins, S_ax = d.ES_hist[i], d.E_bins, d.S_bins, d.S_ax
colormap = plt.get_cmap(cmap)
# Heuristic for colormap range
if vmax <= vmin:
S_range = (S_ax < S_max_norm)
vmax = ES_hist[:, S_range].max()
if vmax <= vmin: vmax = 10*vmin
if scatter:
ax.plot(d.E[i], d.S[i], 'o', mew=0, ms=scatter_ms,
alpha=scatter_alpha, color=scatter_color)
im = ax.imshow(ES_hist[:, ::-1].T, interpolation=interp,
extent=(E_bins[0], E_bins[-1], S_bins[0], S_bins[-1]),
vmin=vmin, vmax=vmax, cmap=colormap)
im.cmap.set_under(under_color)
im.cmap.set_over(over_color)
if cbar_ax is None:
gcf().colorbar(im)
else:
cbar_ax.colorbar(im)
ax.set_xlim(-0.2, 1.2)
ax.set_ylim(-0.2, 1.2)
ax.set_xlabel('E')
ax.set_ylabel('S')
ax.grid(color=grid_color)
if gui_sel:
# the selection object must be saved (otherwise will be destroyed)
hist2d_alex.gui_sel = gs.rectSelection(gcf(), gca())
def hexbin_alex(d, i=0, vmin=1, vmax=None, gridsize=80, cmap='Spectral_r',
E_name='E', S_name='S', **hexbin_kwargs):
"""Plot an hexbin 2D histogram for E-S.
"""
if d.num_bursts[i] < 1:
return
hexbin_kwargs_ = dict(edgecolor='none', linewidth=0.2, gridsize=gridsize,
cmap=cmap, extent=(-0.2, 1.2, -0.2, 1.2), mincnt=1)
if hexbin_kwargs is not None:
hexbin_kwargs_.update(_normalize_kwargs(hexbin_kwargs))
poly = plt.hexbin(d[E_name][i], d[S_name][i], **hexbin_kwargs_)
poly.set_clim(vmin, vmax)
plt.xlabel('E')
plt.ylabel('S')
def plot_ES_selection(ax, E1, E2, S1, S2, rect=True, **kwargs):
"""Plot an overlay ROI on top of an E-S plot (i.e. ALEX histogram).
This function plots a rectangle and inscribed ellipsis with x-axis limits
(E1, E2) and y-axis limits (S1, S2).
Note that, a dict with keys (E1, E2, S1, S2, rect) can be also passed to
:func:`fretbursts.select_bursts.ES` to apply a selection.
Parameters:
ax (matplotlib axis): the axis where the rectangle is plotted.
Typically you pass the axis of a previous E-S scatter plot
or histogram.
E1, E2, S1, S2 (floats): limits for E and S (X and Y axis respectively)
used to plot the rectangle.
rect (bool): if True, the rectangle is highlighted and the ellipsis is
grey. The color are swapped otherwise.
**kwargs: other keywords passed to both matplotlib's `Rectangle`
and `Ellipse`.
See also:
For selecting bursts according to (`E1`, `E2`, `S1`, `S2`, `rect`) see:
- :func:`fretbursts.select_bursts.ES`
"""
if rect:
rect_color, ellips_color = blue, 'gray'
else:
rect_color, ellips_color = 'gray', blue
patch_style = dict(fill=False, lw=1.5, alpha=0.5)
patch_style.update(**kwargs)
rect = Rectangle(xy=(E1, S1), height=(S2 - S1), width=(E2 - E1),
color=rect_color, **patch_style)
ellips = Ellipse(xy=(0.5*(E1 + E2), 0.5*(S1 + S2)), height=(S2 - S1),
width=(E2 - E1), color=ellips_color, **patch_style)
ax.add_patch(rect)
ax.add_patch(ellips)
return rect, ellips
def get_ES_range():
"""Get the range of ES histogram selected via GUI.
Prints E1, E2, S1, S2 and return a dict containig these values.
"""
sel = None
if hasattr(hist2d_alex.gui_sel, 'selection'):
sel = hist2d_alex.gui_sel.selection
print('E1={E1:.3}, E2={E2:.3}, S1={S1:.3}, S2={S2:.3}'.format(**sel))
return sel
def hist_interphoton_single(d, i=0, binwidth=1e-4, tmax=None, bins=None,
ph_sel=Ph_sel('all'), period=None,
yscale='log', xscale='linear', xunit='ms',
plot_style=None):
"""Plot histogram of interphoton delays for a single photon streams.
Arguments:
d (Data object): the input data.
i (int): the channel for which the plot must be done. Default is 0.
For single-spot data the only valid value is 0.
binwidth (float): histogram bin width in seconds.
tmax (float or None): max timestamp delay in the histogram (seconds).
If None (default), uses the the max timestamp delay in the stream.
If not None, the plotted histogram may be further trimmed to
the smallest delay with counts > 0 if this delay happens to be
smaller than `tmax`.
bins (array or None): specifies the bin edged (in seconds). When
`bins` is not None then the arguments `binwidth` and `tmax`
are ignored. When `bins` is None, the bin edges are computed
from the `binwidth` and `tmax` arguments.
ph_sel (Ph_sel object): photon stream for which plotting the histogram
period (int): the background period to use for plotting the histogram.
The background period is a time-slice of the measurement from which
timestamps are taken. If `period` is None (default) the
time-windows is the full measurement.
yscale (string): scale for the y-axis. Valid values include 'log' and
'linear'. Default 'log'.
xscale (string): scale for the x-axis. Valid values include 'log' and
'linear'. Default 'linear'.
xunit (string): unit used for the x-axis. Valid values are 's', 'ms',
'us', 'ns'. Default 'ms'.
plot_style (dict): keyword arguments to be passed to matplotlib's
`plot` function. Used to customize the plot style.
"""
unit_dict = {'s': 1, 'ms': 1e3, 'us': 1e6, 'ns': 1e9}
assert xunit in unit_dict
scalex = unit_dict[xunit]
# Compute interphoton delays
if period is None:
ph_times = d.get_ph_times(ich=i, ph_sel=ph_sel)
else:
ph_times = d.get_ph_times_period(ich=i, period=period, ph_sel=ph_sel)
delta_ph_t = np.diff(ph_times) * d.clk_p
if tmax is None:
tmax = delta_ph_t.max()
# Compute bin edges if not passed in
if bins is None:
# Shift by half clk_p to avoid "beatings" in the distribution
# due to floating point inaccuracies.
bins = np.arange(0, tmax + binwidth, binwidth) - 0.5 * d.clk_p
else:
warnings.warn('Using `bins` and ignoring `tmax` and `binwidth`.')
t_ax = bins[:-1] + 0.5 * binwidth
# Compute interphoton histogram
counts, _ = np.histogram(delta_ph_t, bins=bins)
# Max index with counts > 0
n_trim = np.trim_zeros(counts).size + 1
# Plot histograms
plot_style_ = dict(marker='o', markersize=5, linestyle='none', alpha=0.6)
if ph_sel in _ph_sel_color_dict:
plot_style_['color'] = _ph_sel_color_dict[ph_sel]
plot_style_['label'] = _ph_sel_label_dict[ph_sel]
plot_style_.update(_normalize_kwargs(plot_style, kind='line2d'))
plot(t_ax[:n_trim] * scalex, counts[:n_trim], **plot_style_)
if yscale == 'log':
gca().set_yscale(yscale)
plt.ylim(1)
_plot_status['hist_interphoton_single'] = {'autoscale': False}
if xscale == 'log':
gca().set_xscale(yscale)
plt.xlim(0.5 * binwidth)
_plot_status['hist_interphoton_single'] = {'autoscale': False}
plt.xlabel('Inter-photon delays (%s)' % xunit.replace('us', 'μs'))
plt.ylabel('# Delays')
# Return interal variables so that other functions can extend the plot
return dict(counts=counts, n_trim=n_trim, plot_style_=plot_style_,
t_ax=t_ax, scalex=scalex)
def hist_interphoton(d, i=0, binwidth=1e-4, tmax=None, bins=None, period=None,
yscale='log', xscale='linear', xunit='ms', plot_style=None,
show_da=False, legend=True):
"""Plot histogram of photon interval for different photon streams.
Arguments:
d (Data object): the input data.
i (int): the channel for which the plot must be done. Default is 0.
For single-spot data the only valid value is 0.
binwidth (float): histogram bin width in seconds.
tmax (float or None): max timestamp delay in the histogram (seconds).
If None (default), uses the the max timestamp delay in the stream.
If not None, the plotted histogram may be further trimmed to
the smallest delay with counts > 0 if this delay happens to be
smaller than `tmax`.
bins (array or None): specifies the bin edged (in seconds). When
`bins` is not None then the arguments `binwidth` and `tmax`
are ignored. When `bins` is None, the bin edges are computed
from the `binwidth` and `tmax` arguments.
period (int): the background period to use for plotting the histogram.
The background period is a time-slice of the measurement from which
timestamps are taken. If `period` is None (default) the
time-windows is the full measurement.
yscale (string): scale for the y-axis. Valid values include 'log' and
'linear'. Default 'log'.
xscale (string): scale for the x-axis. Valid values include 'log' and
'linear'. Default 'linear'.
xunit (string): unit used for the x-axis. Valid values are 's', 'ms',
'us', 'ns'. Default 'ms'.
plot_style (dict): keyword arguments to be passed to matplotlib's
`plot` function. Used to customize the plot style.
show_da (bool): If False (default) do not plot the AexDem photon stream.
Ignored when the measurement is not ALEX.
legend (bool): If True (default) plot a legend.
"""
# Plot multiple timetraces
ph_sel_list = [Ph_sel('all'), Ph_sel(Dex='Dem'), Ph_sel(Dex='Aem')]
if d.alternated:
ph_sel_list.append(Ph_sel(Aex='Aem'))
if show_da:
ph_sel_list.append(Ph_sel(Aex='Dem'))
for ix, ph_sel in enumerate(ph_sel_list):
if not bl.mask_empty(d.get_ph_mask(i, ph_sel=ph_sel)):
hist_interphoton_single(d, i=i, binwidth=binwidth, tmax=tmax,
bins=bins, period=period, ph_sel=ph_sel,
yscale=yscale, xscale=xscale, xunit=xunit,
plot_style=plot_style)
if legend:
plt.legend(loc='best', fancybox=True)
if yscale == 'log' or xscale == 'log':
_plot_status['hist_interphoton'] = {'autoscale': False}
def hist_bg_single(d, i=0, binwidth=1e-4, tmax=0.01, bins=None,
ph_sel=Ph_sel('all'), period=0,
yscale='log', xscale='linear', xunit='ms', plot_style=None,
show_fit=True, fit_style=None, manual_rate=None):
"""Plot histogram of photon interval for a single photon streams.
Optionally plots the fitted background as an exponential curve.
Most arguments are described in :func:`hist_interphoton_single`.
In the following we document only the additional arguments.
Arguments:
show_fit (bool): If True shows the fitted background rate as an
exponential distribution.
manual_rate (float or None): When not None use this value as background
rate (ignoring the value saved in Data).
fit_style (dict): arguments passed to matplotlib's `plot` for
for plotting the exponential curve.
For a description of all the other arguments see
:func:`hist_interphoton_single`.
"""
hist = hist_interphoton_single(d, i=i, binwidth=binwidth, tmax=tmax,
bins=bins, ph_sel=ph_sel, period=period,
yscale=yscale, xscale=xscale, xunit=xunit,
plot_style=None)
if show_fit or manual_rate is not None:
# Compute the fit function
if manual_rate is not None:
bg_rate = manual_rate
else:
bg_rate = d.bg_from(ph_sel)[i][period]
i_max = np.nonzero(hist['counts'] > 0)[0][-1]
tau_th = hist['t_ax'][i_max] / 3
i_tau_th = np.searchsorted(hist['t_ax'], tau_th)
counts_integral = hist['counts'][i_tau_th:].sum()
y_fit = np.exp(- hist['t_ax'] * bg_rate)
y_fit *= counts_integral / y_fit[i_tau_th:].sum()
# Plot
fit_style_ = dict(hist['plot_style_'])
fit_style_.update(linestyle='-', marker='', label='auto')
fit_style_.update(_normalize_kwargs(fit_style, kind='line2d'))
if fit_style_['label'] is 'auto':
plt_label = hist['plot_style_'].get('label', None)
label = str(ph_sel) if plt_label is None else plt_label
fit_style_['label'] = '%s, %.2f kcps' % (label, bg_rate * 1e-3)
n_trim = hist['n_trim']
plot(hist['t_ax'][:n_trim] * hist['scalex'], y_fit[:n_trim],
**fit_style_)
def hist_bg(d, i=0, binwidth=1e-4, tmax=0.01, bins=None, period=0,
yscale='log', xscale='linear', xunit='ms', plot_style=None,
show_da=False, legend=True, show_fit=True, fit_style=None):
"""Plot histogram of photon interval for different photon streams.
Optionally plots the fitted background.
Most arguments are described in :func:`hist_interphoton`.
In the following we document only the additional arguments.
Arguments:
show_fit (bool): If True shows the fitted background rate as an
exponential distribution.
fit_style (dict): arguments passed to matplotlib's `plot` for
for plotting the exponential curve.
For a description of all the other arguments see :func:`hist_interphoton`.
"""
# Plot multiple timetraces
ph_sel_list = [Ph_sel('all'), Ph_sel(Dex='Dem'), Ph_sel(Dex='Aem')]
if d.alternated:
ph_sel_list.append(Ph_sel(Aex='Aem'))
if show_da:
ph_sel_list.append(Ph_sel(Aex='Dem'))
for ix, ph_sel in enumerate(ph_sel_list):
if not bl.mask_empty(d.get_ph_mask(i, ph_sel=ph_sel)):
hist_bg_single(d, i=i, period=period, binwidth=binwidth,
bins=bins, tmax=tmax, ph_sel=ph_sel, xunit=xunit,
show_fit=show_fit, yscale=yscale, xscale=xscale,
plot_style=plot_style, fit_style=fit_style)
if legend:
plt.legend(loc='best', fancybox=True)
if yscale == 'log' or xscale == 'log':
_plot_status['hist_bg'] = {'autoscale': False}
def hist_ph_delays(
d, i=0, time_min_s=0, time_max_s=30, bin_width_us=10, mask=None,
yscale='log', hfit_bin_ms=1, efit_tail_min_us=1000, **kwargs):
"""Histog. of ph delays and comparison with 3 BG fitting functions.
"""
ph = d.ph_times_m[i].copy()
if mask is not None: ph = ph[mask[i]]
ph = ph[(ph < time_max_s/d.clk_p)*(ph > time_min_s/d.clk_p)]
dph = np.diff(ph)*d.clk_p
H = hist(dph*1e6, bins=r_[0:1200:bin_width_us], histtype='step', **kwargs)
gca().set_yscale('log')
xlabel(u'Ph delay time (μs)'); ylabel("# Ph")
efun = lambda t, r: np.exp(-r*t)*r
re = bg.exp_fit(ph, tail_min_us=efit_tail_min_us)
rg = bg.exp_hist_fit(ph, tail_min_us=efit_tail_min_us, binw=hfit_bin_ms*1e3)
rc = bg.exp_cdf_fit(ph, tail_min_us=efit_tail_min_us)
t = r_[0:1200]*1e-6
F = 1 if 'normed' in kwargs else H[0].sum()*(bin_width_us)
plot(t*1e6, 0.65*F*efun(t, rc)*1e-6, lw=3, alpha=0.5, color=purple,
label="%d cps - Exp CDF (tail_min_p=%.2f)" % (rc, efit_tail_min_us))
plot(t*1e6, 0.65*F*efun(t, re)*1e-6, lw=3, alpha=0.5, color=red,
label="%d cps - Exp ML (tail_min_p=%.2f)" % (re, efit_tail_min_us))
plot(t*1e6, 0.68*F*efun(t, rg)*1e-6, lw=3, alpha=0.5, color=green,
label=u"%d cps - Hist (bin_ms=%d) [Δ=%d%%]" % (hfit_bin_ms, rg,
100*(rg-re)/re))
plt.legend(loc='best', fancybox=True)
def hist_mdelays(d, i=0, m=10, bins_s=(0, 10, 0.02), period=0,
hold=False, bg_ppf=0.01, ph_sel=Ph_sel('all'), spline=True,
s=1., bg_fit=True, bg_F=0.8):
"""Histogram of m-photons delays (all-ph vs in-burst ph).
"""
ax = gca()
if not hold:
#ax.clear()
for _ind in range(len(ax.lines)): ax.lines.pop()
results = bext.calc_mdelays_hist(
d=d, ich=i, m=m, period=period, bins_s=bins_s,
ph_sel=ph_sel, bursts=True, bg_fit=bg_fit, bg_F=bg_F)
bin_x, histog_y = results[:2]
bg_dist = results[2]
rate_ch_kcps = 1./bg_dist.kwds['scale'] # extract the rate
if bg_fit:
a, rate_kcps = results[3:5]
mdelays_hist_y = histog_y[0]
mdelays_b_hist_y = histog_y[1]
# Center of mass (COM)
binw = bins_s[2]
com = np.sum(bin_x*mdelays_hist_y)*binw
com_b = np.sum(bin_x*mdelays_b_hist_y)*binw
#print(com, com_b)
# Compute a spline smoothing of the PDF
mdelays_spline = UnivariateSpline(bin_x, mdelays_hist_y, s=s*com)
mdelays_b_spline = UnivariateSpline(bin_x, mdelays_b_hist_y, s=s*com_b)
mdelays_spline_y = mdelays_spline(bin_x)
mdelays_b_spline_y = mdelays_b_spline(bin_x)
if spline:
mdelays_pdf_y = mdelays_spline_y
mdelays_b_pdf_y = mdelays_b_spline_y
else:
mdelays_pdf_y = mdelays_hist_y
mdelays_b_pdf_y = mdelays_b_hist_y
# Thresholds and integrals
max_delay_th_P = bg_dist.ppf(bg_ppf)
max_delay_th_F = m/rate_ch_kcps/d.F
burst_domain = bin_x < max_delay_th_F
burst_integral = np.trapz(x=bin_x[burst_domain],
y=mdelays_hist_y[burst_domain])
title("I = %.1f %%" % (burst_integral*100), fontsize='small')
#text(0.8,0.8,"I = %.1f %%" % (integr*100), transform = gca().transAxes)
## MDelays plot
plot(bin_x, mdelays_pdf_y, lw=2, color=blue, alpha=0.5,
label="Delays dist.")
plot(bin_x, mdelays_b_pdf_y, lw=2, color=red, alpha=0.5,
label="Delays dist. (in burst)")
plt.axvline(max_delay_th_P, color='k',
label="BG ML dist. @ %.1f%%" % (bg_ppf*100))
plt.axvline(max_delay_th_F, color=purple,
label="BS threshold (F=%d)" % d.F)
## Bg distribution plots
bg_dist_y = bg_dist.pdf(bin_x)
ibin_x_bg_mean = np.abs(bin_x - bg_dist.mean()).argmin()
bg_dist_y *= mdelays_pdf_y[ibin_x_bg_mean]/bg_dist_y[ibin_x_bg_mean]
plot(bin_x, bg_dist_y, '--k', alpha=1.,
label='BG ML dist.')
plt.axvline(bg_dist.mean(), color='k', ls='--', label="BG mean")
if bg_fit:
bg_y = a*erlang.pdf(bin_x, a=m, scale=1./rate_kcps)
plot(bin_x, bg_y, '--k', alpha=1.)
plt.legend(ncol=2, frameon=False)
xlabel("Time (ms)")
def hist_mrates(d, i=0, m=10, bins=(0, 4000, 100), yscale='log', pdf=False,
dense=True, plot_style=None):
"""Histogram of m-photons rates. See also :func:`hist_mdelays`.
"""
ph = d.get_ph_times(ich=i)
if dense:
ph_mrates = 1.*m/((ph[m-1:]-ph[:ph.size-m+1])*d.clk_p*1e3)
else:
ph_mrates = 1.*m/(np.diff(ph[::m])*d.clk_p*1e3)
hist = HistData(*np.histogram(ph_mrates, bins=_bins_array(bins)))
ydata = hist.pdf if pdf else hist.counts
plot_style_ = dict(marker='o')
plot_style_.update(_normalize_kwargs(plot_style, kind='line2d'))
plot(hist.bincenters, ydata, **plot_style_)
gca().set_yscale(yscale)
xlabel("Rates (kcps)")
## Bursts stats
def hist_sbr(d, i=0, bins=(0, 30, 1), pdf=True, weights=None, color=None,
plot_style=None):
"""Histogram of per-burst Signal-to-Background Ratio (SBR).
"""
weights = weights[i] if weights is not None else None
if 'sbr' not in d:
d.calc_sbr()
_hist_burst_taildist(d.sbr[i], bins, pdf, weights=weights, color=color,
plot_style=plot_style)
plt.xlabel('SBR')
def hist_burst_phrate(d, i=0, bins=(0, 1000, 20), pdf=True, weights=None,
color=None, plot_style=None, vline=None):
"""Histogram of max photon rate in each burst.
"""
weights = weights[i] if weights is not None else None
if hasattr(d, '__array__'):
max_rate = d
else:
if 'max_rate' not in d:
d.calc_max_rate(m=10)
max_rate = d.max_rate
_hist_burst_taildist(max_rate[i] * 1e-3, bins, pdf, weights=weights,
color=color, plot_style=plot_style, vline=vline)
plt.xlabel('Peak rate (kcps)')
def hist_burst_delays(d, i=0, bins=(0, 10, 0.2), pdf=False, weights=None,
color=None, plot_style=None):
"""Histogram of waiting times between bursts.
"""
weights = weights[i] if weights is not None else None
bdelays = np.diff(d.mburst[i].start*d.clk_p)
_hist_burst_taildist(bdelays, bins, pdf, weights=weights, color=color,
plot_style=plot_style)
plt.xlabel('Delays between bursts (s)')
## Burst internal "symmetry"
def hist_asymmetry(d, i=0, bin_max=2, binwidth=0.1, stat_func=np.median):
burst_asym = bext.asymmetry(d, ich=i, func=stat_func)
bins_pos = np.arange(0, bin_max+binwidth, binwidth)
bins = np.hstack([-bins_pos[1:][::-1], bins_pos])
izero = (bins.size - 1)/2.
assert izero == np.where(np.abs(bins) < 1e-8)[0]
counts, _ = np.histogram(burst_asym, bins=bins)
asym_counts_neg = counts[:izero] - counts[izero:][::-1]
asym_counts_pos = counts[izero:] - counts[:izero][::-1]
asym_counts = np.hstack([asym_counts_neg, asym_counts_pos])
plt.bar(bins[:-1], width=binwidth, height=counts, fc=blue, alpha=0.5)
plt.bar(bins[:-1], width=binwidth, height=asym_counts, fc=red,
alpha=0.5)
plt.grid(True)
plt.xlabel('Time (ms)')
plt.ylabel('# Bursts')
plt.legend(['{func}$(t_D)$ - {func}$(t_A)$'.format(func=stat_func.__name__),
'positive half - negative half'],
frameon=False, loc='best')
skew_abs = asym_counts_neg.sum()
skew_rel = 100.*skew_abs/counts.sum()
print('Skew: %d bursts, (%.1f %%)' % (skew_abs, skew_rel))
##
# Scatter plots
#
def scatter_width_size(d, i=0):
"""Scatterplot of burst width versus size."""
b = d.mburst[i]
plot(b.width*d.clk_p*1e3, d.nt[i], 'o', mew=0, ms=3, alpha=0.7,
color='blue')
t_ms = arange(0, 50)
plot(t_ms, ((d.m)/(d.T[i]))*t_ms*1e-3, '--', lw=2, color='k',
label='Slope = m/T = min. rate = %1.0f cps' % (d.m/d.T[i]))
plot(t_ms, d.bg_mean[Ph_sel('all')][i]*t_ms*1e-3, '--', lw=2, color=red,
label='Noise rate: BG*t')
xlabel('Burst width (ms)'); ylabel('Burst size (# ph.)')
plt.xlim(0, 10); plt.ylim(0, 300)
legend(frameon=False)
def scatter_rate_da(d, i=0):
"""Scatter of nd rate vs na rate (rates for each burst)."""
b = d.mburst[i]
Rate = lambda nX: nX[i]/b.width/d.clk_p*1e-3
plot(Rate(d.nd), Rate(d.na), 'o', mew=0, ms=3, alpha=0.1, color='blue')
xlabel('D burst rate (kcps)'); ylabel('A burst rate (kcps)')
plt.xlim(-20, 100); plt.ylim(-20, 100)
legend(frameon=False)
def scatter_fret_size(d, i=0, which='all', gamma=1, add_naa=False,
plot_style=None):
"""Scatterplot of FRET efficiency versus burst size.
"""
if which == 'all':
size = d.burst_sizes_ich(ich=i, gamma=gamma, add_naa=add_naa)
else:
assert which in d
size = d[which][i]
plot_style_ = dict(linestyle='', alpha=0.1, color=blue,
marker='o', markeredgewidth=0, markersize=3)
plot_style_.update(_normalize_kwargs(plot_style, kind='line2d'))
plot(d.E[i], size, **plot_style_)
xlabel("FRET Efficiency (E)")
ylabel("Corrected Burst size (#ph)")
def scatter_fret_nd_na(d, i=0, show_fit=False, no_text=False, gamma=1.,
**kwargs):
"""Scatterplot of FRET versus gamma-corrected burst size."""
default_kwargs = dict(mew=0, ms=3, alpha=0.3, color=blue)
default_kwargs.update(**kwargs)
plot(d.E[i], gamma*d.nd[i]+d.na[i], 'o', **default_kwargs)
xlabel("FRET Efficiency (E)")
ylabel("Burst size (#ph)")
if show_fit:
_fitted_E_plot(d, i, F=1., no_E=no_text, ax=gca())
if i == 0 and not no_text:
plt.figtext(0.4, 0.01, _get_fit_E_text(d), fontsize=14)
def scatter_fret_width(d, i=0):
"""Scatterplot of FRET versus burst width."""
b = d.mburst[i]
plot(d.E[i], (b[:, 1]*d.clk_p)*1e3, 'o', mew=0, ms=3, alpha=0.1,
color="blue")
xlabel("FRET Efficiency (E)")
ylabel("Burst width (ms)")
def scatter_da(d, i=0, alpha=0.3):
"""Scatterplot of donor vs acceptor photons (nd, vs na) in each burst."""
plot(d.nd[i], d.na[i], 'o', mew=0, ms=3, alpha=alpha, color='blue')
xlabel('# donor ph.'); ylabel('# acceptor ph.')
plt.xlim(-5, 200); plt.ylim(-5, 120)
def scatter_naa_nt(d, i=0, alpha=0.5):
"""Scatterplot of nt versus naa."""
plot(d.nt[i], d.naa[i], 'o', mew=0, ms=3, alpha=alpha, color='blue')
plot(arange(200), color='k', lw=2)
xlabel('Total burst size (nd+na+naa)'); ylabel('Accept em-ex BS (naa)')
plt.xlim(-5, 200); plt.ylim(-5, 120)
def scatter_alex(d, i=0, **kwargs):
"""Scatterplot of E vs S. Keyword arguments passed to `plot`."""
plot_style = dict(mew=1, ms=4, mec='black', color='purple',
alpha=0.1)
plot_style = _normalize_kwargs(plot_style, 'line2d')
plot_style.update(_normalize_kwargs(kwargs))
plot(d.E[i], d.S[i], 'o', **plot_style)
xlabel("E"); ylabel('S')
plt.xlim(-0.2, 1.2); plt.ylim(-0.2, 1.2)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# High-level plot wrappers
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _iter_plot(d, func, kwargs, iter_ch, nrows, ncols, figsize, AX,
sharex, sharey, suptitle, grid, scale, skip_ch=None,
title='out', title_ch=True, title_bg=True, title_nbursts=True,
title_kws=None, top=0.95, bottom=None, hspace=0.15, wspace=None,
left=0.08, right=0.96, xrotation=0):
if AX is None:
fig, AX = plt.subplots(nrows, ncols, figsize=figsize, sharex=sharex,
sharey=sharey, squeeze=False)
old_ax = False
else:
fig = AX[0, 0].figure
old_ax = True
if skip_ch is None:
skip_ch = []
for i, ich in enumerate(iter_ch):
ax = AX.ravel()[i]
ax.grid(grid)
plt.setp(ax.get_xticklabels(), rotation=xrotation)
if ich in skip_ch:
continue
b = d.mburst[ich] if 'mburst' in d else None
if suptitle and i == 0 and hasattr(d, 'status') and callable(d.status):
fig.suptitle(d.status())
if title: # no title if None of False
if title_kws is None:
title_kws = {}
s = ''
if title_ch:
s += '[%d]' % ich
if title_bg and 'bg_mean' in d:
s += (' BG=%.1fk' % (d.bg_mean[Ph_sel('all')][ich] * 1e-3))
if title_nbursts and b is not None:
s += (' #B=%d' % b.num_bursts)
if title is True or 'out' in title.lower():
ax.set_title(s, **title_kws)
else:
titley, va = 0.95, 'top'
if 'bottom' in str(title):
titley, va = 1 - titley, 'baseline'
titlex, ha = 0.95, 'right'
if 'left' in str(title):
titlex, ha = 1 - titlex, 'left'
ax.text(titlex, titley, s, transform=ax.transAxes, ha=ha, va=va,
**title_kws)
plt.sca(ax)
gui_status['first_plot_in_figure'] = (i == 0)
func(d, ich, **kwargs)
if ax.legend_ is not None:
ax.legend_.remove()
[a.set_xlabel('') for a in AX[:-1, :].ravel()]
[a.set_ylabel('') for a in AX[:, 1:].ravel()]
if sharex:
plt.setp([a.get_xticklabels() for a in AX[:-1, :].ravel()],
visible=False)
[a.set_xlabel('') for a in AX[:-1, :].ravel()]
if sharey:
if AX.shape[1] > 1:
plt.setp([a.get_yticklabels() for a in AX[:, 1]], visible=False)
if wspace is None:
wspace = 0.08
func_allows_autoscale = True
if func.__name__ in _plot_status:
func_allows_autoscale = _plot_status[func.__name__]['autoscale']
if scale and func_allows_autoscale:
ax.autoscale(enable=True, axis='y')
if not old_ax:
fig.subplots_adjust(hspace=hspace, wspace=wspace,
left=left, right=right, top=top, bottom=bottom)
return AX
def dplot_48ch(d, func, sharex=True, sharey=True, layout='horiz',
grid=True, figsize=None, AX=None, scale=True, skip_ch=None,
suptitle=True, title=True, title_ch=True, title_bg=True,
title_nbursts=True, title_kws=None, xrotation=0,
top=0.93, bottom=None, hspace=0.18, wspace=None, left=0.08,
right=0.96, dec=1, **kwargs):
"""Plot wrapper for 48-spot measurements. Use `dplot` instead."""
msg = "Wrong layout '%s'. Valid values: 'horiz', 'vert', '8x6'."
assert (layout.startswith('vert') or layout.startswith('horiz') or
layout == '8x6'), (msg % layout)
if dec > 1:
assert dec == 2 or dec == 4
assert layout.startswith('horiz')
global gui_status
ch_map = np.arange(48).reshape(4, 12)[::dec, ::dec]
iter_ch = ch_map.ravel()
if layout == '8x6':
nrows, ncols = 6, 8
if figsize is None:
figsize = (18, 6)
else:
nrows, ncols = 4 // dec, 12 // dec
if layout.startswith('vert'):
nrows, ncols = ncols, nrows
iter_ch = ch_map.T.ravel()
if figsize is None:
figsize = (1.5 * ncols + 2, 1.5 * nrows + 1)
if layout.startswith('vert'):
figsize = figsize[1], figsize[0]
return _iter_plot(d, func, kwargs, iter_ch, nrows, ncols, figsize, AX,
sharex, sharey, suptitle, grid, scale, skip_ch=skip_ch,
top=top, bottom=bottom, hspace=hspace, wspace=wspace,
left=left, right=right, xrotation=xrotation,
title=title, title_ch=title_ch, title_bg=title_bg,
title_nbursts=title_nbursts, title_kws=title_kws)
def dplot_16ch(d, func, sharex=True, sharey=True, ncols=8,
pgrid=True, figsize=None, AX=None, suptitle=True,
scale=True, skip_ch=None, top=0.93, bottom=None,
hspace=0.15, wspace=None, left=0.08, right=0.96, **kwargs):
"""Plot wrapper for 16-spot measurements. Use `dplot` instead."""
assert (ncols <= 16), '`ncols` needs to be <= 16.'
global gui_status
iter_ch = range(16)
nrows = int(np.ceil(d.nch / ncols))
if figsize is None:
subplotsize = (3, 3)
figsize = (subplotsize[0] * ncols, subplotsize[1] * nrows)
return _iter_plot(d, func, kwargs, iter_ch, nrows, ncols, figsize, AX,
sharex, sharey, suptitle, grid, scale, skip_ch=skip_ch,
top=top, bottom=bottom, hspace=hspace, wspace=wspace,
left=left, right=right)
def dplot_8ch(d, func, sharex=True, sharey=True,
pgrid=True, figsize=(12, 9), nosuptitle=False, AX=None,
scale=True, **kwargs):
"""Plot wrapper for 8-spot measurements. Use `dplot` instead."""
global gui_status
if AX is None:
fig, AX = plt.subplots(4, 2, figsize=figsize, sharex=sharex,
sharey=sharey)
fig.subplots_adjust(left=0.08, right=0.96, top=0.93, bottom=0.07,
wspace=0.05)
old_ax = False
else:
fig = AX[0, 0].figure
old_ax = True
for i in range(d.nch):
b = d.mburst[i] if 'mburst' in d else None
if (func not in [timetrace, ratetrace, timetrace_single,
ratetrace_single, hist_bg_single, hist_bg,
timetrace_bg]) and np.size(b) == 0:
continue
ax = AX.ravel()[i]
if i == 0 and not nosuptitle:
fig.suptitle(d.status())
s = u'[%d]' % (i+1)
if 'bg_mean' in d:
s += (' BG=%.1fk' % (d.bg_mean[Ph_sel('all')][i]*1e-3))
if 'T' in d:
s += (u', T=%dμs' % (d.T[i]*1e6))
if b is not None: s += (', #bu=%d' % b.num_bursts)
ax.set_title(s, fontsize=12)
ax.grid(pgrid)
plt.sca(ax)
gui_status['first_plot_in_figure'] = (i == 0)
func(d, i, **kwargs)
if i % 2 == 1: ax.yaxis.tick_right()
[a.set_xlabel('') for a in AX[:-1, :].ravel()]
[a.set_ylabel('') for a in AX[:, 1:].ravel()]
if sharex:
plt.setp([a.get_xticklabels() for a in AX[:-1, :].ravel()],
visible=False)
[a.set_xlabel('') for a in AX[:-1, :].ravel()]
if not old_ax: fig.subplots_adjust(hspace=0.15)
if sharey:
plt.setp([a.get_yticklabels() for a in AX[:, 1]], visible=False)
fig.subplots_adjust(wspace=0.08)
func_allows_autoscale = True
if func.__name__ in _plot_status:
func_allows_autoscale = _plot_status[func.__name__]['autoscale']
if scale and func_allows_autoscale:
ax.autoscale(enable=True, axis='y')
return AX
def dplot_1ch(d, func, pgrid=True, ax=None,
figsize=(9, 4.5), fignum=None, nosuptitle=False, **kwargs):
"""Plot wrapper for single-spot measurements. Use `dplot` instead."""
global gui_status
if ax is None:
fig = plt.figure(num=fignum, figsize=figsize)
ax = fig.add_subplot(111)
else:
fig = ax.figure
s = d.name
if 'bg_mean' in d:
s += (' BG=%.1fk' % (d.bg_mean[Ph_sel('all')][0] * 1e-3))
if 'T' in d:
s += (u', T=%dμs' % (d.T[0] * 1e6))
if 'mburst' in d:
s += (', #bu=%d' % d.num_bursts[0])
if not nosuptitle:
ax.set_title(s, fontsize=12)
ax.grid(pgrid)
plt.sca(ax)
gui_status['first_plot_in_figure'] = True
func(d, **kwargs)
return ax
def dplot(d, func, **kwargs):
"""Main plot wrapper for single and multi-spot measurements."""
if hasattr(d, '__array__'):
nch = d.shape[1]
else:
nch = d.nch
if nch == 1:
return dplot_1ch(d=d, func=func, **kwargs)
elif nch == 8:
return dplot_8ch(d=d, func=func, **kwargs)
elif nch == 16:
return dplot_16ch(d=d, func=func, **kwargs)
elif nch == 48:
return dplot_48ch(d=d, func=func, **kwargs)
##
# ALEX join-plot using seaborn
#
def _alex_plot_style(g, colorbar=True):
"""Set plot style and colorbar for an ALEX joint plot.
"""
g.set_axis_labels(xlabel="E", ylabel="S")
g.ax_marg_x.grid(True)
g.ax_marg_y.grid(True)
g.ax_marg_x.set_xlabel('')
g.ax_marg_y.set_ylabel('')
plt.setp(g.ax_marg_y.get_xticklabels(), visible=True)
plt.setp(g.ax_marg_x.get_yticklabels(), visible=True)
g.ax_marg_x.locator_params(axis='y', tight=True, nbins=3)
g.ax_marg_y.locator_params(axis='x', tight=True, nbins=3)
if colorbar:
pos = g.ax_joint.get_position().get_points()
X, Y = pos[:, 0], pos[:, 1]
cax = plt.axes([1., Y[0], (X[1] - X[0]) * 0.045, Y[1] - Y[0]])
plt.colorbar(cax=cax)
def _hist_bursts_marg(arr, dx, i, E_name='E', S_name='S', **kwargs):
"""Wrapper to call hist_burst_data() from seaborn plot_marginals().
"""
vertical = kwargs.get('vertical', False)
data_name = S_name if vertical else E_name
hist_burst_data(dx, i=i, data_name=data_name, **kwargs)
def _alex_hexbin_vmax(patches, vmax_fret=True, Smax=0.8):
"""Return the max counts in the E-S hexbin histogram in `patches`.
When `vmax_fret` is True, returns the max count for S < Smax.
Otherwise returns the max count in all the histogram.
"""
counts = patches.get_array()
if vmax_fret:
offset = patches.get_offsets()
xoffset, yoffset = offset[:, 0], offset[:, 1]
mask = yoffset < Smax
vmax = counts[mask].max()
else:
vmax = counts.max()
return vmax
def alex_jointplot(d, i=0, gridsize=50, cmap='Spectral_r', kind='hex',
vmax_fret=True, vmin=1, vmax=None,
joint_kws=None, marginal_kws=None, marginal_color=10,
rightside_text=False, E_name='E', S_name='S'):
"""Plot an ALEX join plot: an E-S 2D histograms with marginal E and S.
This function plots a jointplot: an inner 2D E-S distribution plot
and the marginal distributions for E and S separately.
By default, the inner plot is an hexbin plot, i.e. the bin shape is
hexagonal. Hexagonal bins reduce artifacts due to discretization.
The marginal plots are histograms with a KDE overlay.
Arguments:
d (Data object): the variable containing the bursts to plot
i (int): the channel number. Default 0.
gridsize (int): the grid size for the 2D histogram (hexbin)
C (1D array or None): array of weights, it must have size equal to
the number of bursts in channel `i` (d.num_bursts[i]).
Passed to matplotlib hexbin().
cmap (string): name of the colormap for the 2D histogram. In
addition to matplotlib colormaps, FRETbursts defines
these custom colormaps: 'alex_light', 'alex_dark' and 'alex_lv'.
Default 'alex_light'.
kind (string): kind of plot for the 2-D distribution. Valid values:
'hex' for hexbin plots, 'kde' for kernel density estimation,
'scatter' for scatter plot.
vmax_fret (bool): if True, the colormap max value is equal to the
max bin counts in the FRET region (S < 0.8). If False the
colormap max is equal to the max bin counts.
vmin (int): min value in the histogram mapped by the colormap.
Default 0, the colormap lowest color represents bins with 0 counts.
vmax (int or None): max value in the histogram mapped by the colormap.
When None, vmax is computed automatically from the data and
dependes on the argument `vmax_fret`. Default `None`.
joint_kws (dict): keyword arguments passed to the function with plots
the inner 2-D distribution (i.e matplotlib scatter or hexbin or
seaborn kdeplot).
and hence to matplolib hexbin to customize the plot style.
marginal_kws (dict) : keyword arguments passed to the function
:func:`hist_burst_data` used to plot the maginal distributions.
marginal_color (int or color): color to be used for the marginal
histograms. It can be an integer or any color accepted by
matplotlib. If integer, it represents a color in the colormap
`cmap` from 0 (lowest cmap color) to 99 (highest cmap color).
rightside_text (bool): when True, print the measurement name on
the right side of the figure. When False (default) no additional
text is printed.
E_name, S_name (string): name of the `Data` attribute to be used for
E and S. The default is 'E' and 'S' respectively. These arguments
are used when adding your own cutom E or S attributes to Data
using `Data.add`. In this case, you can specify the name of
these custom attributes so that they can be plotted as an E-S
histogram.
Returns:
A ``seaborn.JointGrid`` object that can be used for tweaking the plot.
.. seealso::
The `Seaborn documentation <https://seaborn.pydata.org/>`__
has more info on plot customization:
* https://seaborn.pydata.org/generated/seaborn.JointGrid.html
"""
g = sns.JointGrid(x=d[E_name][i], y=d[S_name][i], ratio=3, space=0.2,
xlim=(-0.2, 1.2), ylim=(-0.2, 1.2))
if isinstance(marginal_color, int):
histcolor = sns.color_palette(cmap, 100)[marginal_color]
else:
histcolor = marginal_color
marginal_kws_ = dict(
show_kde=True, bandwidth=0.03, binwidth=0.03,
hist_bar_style={'facecolor': histcolor, 'edgecolor': 'k',
'linewidth': 0.15, 'alpha': 1})
if marginal_kws is not None:
marginal_kws_.update(_normalize_kwargs(marginal_kws))
if kind == "scatter":
joint_kws_ = dict(s=40, color=histcolor, alpha=0.1, linewidths=0)
if joint_kws is not None:
joint_kws_.update(_normalize_kwargs(joint_kws))
jplot = g.plot_joint(plt.scatter, **joint_kws_)
elif kind.startswith('hex'):
joint_kws_ = dict(edgecolor='none', linewidth=0.2, gridsize=gridsize,
cmap=cmap, extent=(-0.2, 1.2, -0.2, 1.2), mincnt=1)
if joint_kws is not None:
joint_kws_.update(_normalize_kwargs(joint_kws))
jplot = g.plot_joint(plt.hexbin, **joint_kws_)
# Set the vmin and vmax values for the colormap
polyc = [c for c in jplot.ax_joint.get_children()
if isinstance(c, PolyCollection)][0]
if vmax is None:
vmax = _alex_hexbin_vmax(polyc, vmax_fret=vmax_fret)
polyc.set_clim(vmin, vmax)
elif kind.startswith("kde"):
joint_kws_ = dict(shade=True, shade_lowest=False, n_levels=30,
cmap=cmap, clip=(-0.4, 1.4), bw=0.03)
if joint_kws is not None:
joint_kws_.update(_normalize_kwargs(joint_kws))
jplot = g.plot_joint(sns.kdeplot, **joint_kws_)
g.ax_joint.set_xlim(-0.19, 1.19)
g.ax_joint.set_xlim(-0.19, 1.19)
g.plot_marginals(_hist_bursts_marg, dx=d, i=i, E_name=E_name, S_name=S_name,
**marginal_kws_)
g.annotate(lambda x, y: x.size, stat='# Bursts',
template='{stat}: {val}', frameon=False)
colorbar = kind.startswith('hex')
_alex_plot_style(g, colorbar=colorbar)
if rightside_text:
plt.text(1.15, 0.6, d.name, transform=g.fig.transFigure, fontsize=14,
bbox=dict(edgecolor='r', facecolor='none', lw=1.3, alpha=0.5))
return g
def _register_colormaps():
import matplotlib as mpl
import seaborn as sns
c = sns.color_palette('nipy_spectral', 64)[2:43]
cmap = mpl.colors.LinearSegmentedColormap.from_list('alex_lv', c)
cmap.set_under(alpha=0)
mpl.cm.register_cmap(name='alex_lv', cmap=cmap)
c = sns.color_palette('YlGnBu', 64)[16:]
cmap = mpl.colors.LinearSegmentedColormap.from_list('alex', c)
cmap.set_under(alpha=0)
mpl.cm.register_cmap(name='alex_light', cmap=cmap)
mpl.cm.register_cmap(name='YlGnBu_crop', cmap=cmap)
mpl.cm.register_cmap(name='alex_dark', cmap=mpl.cm.GnBu_r)
# Temporary hack to workaround issue
# https://github.com/mwaskom/seaborn/issues/855
mpl.cm.alex_light = mpl.cm.get_cmap('alex_light')
mpl.cm.alex_dark = mpl.cm.get_cmap('alex_dark')
# Register colormaps on import if not mocking
if not hasattr(sns, '_mock'):
_register_colormaps()<|fim▁end|> | print('Fit Integral:', np.trapz(scale*y, x))
ax2.axvline(d.E_fit[i], lw=3, color=red, ls='--', alpha=0.6) |
<|file_name|>file_upload_test.go<|end_file_name|><|fim▁begin|>package grequests
import "testing"
<|fim▁hole|>func TestErrorOpenFile(t *testing.T) {
fd, err := FileUploadFromDisk("file", "I am Not A File")
if err == nil {
t.Error("We are not getting an error back from our non existent file: ")
}
if fd != nil {
t.Error("We actually got back a pointer: ", fd)
}
}<|fim▁end|> | |
<|file_name|>broadcast.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python3
"""
Broadcast a message, with or without a price.
Multiple messages per block are allowed. Bets are be made on the 'timestamp'
field, and not the block index.
An address is a feed of broadcasts. Feeds may be locked with a broadcast whose
text field is identical to ‘lock’ (case insensitive). Bets on a feed reference
the address that is the source of the feed in an output which includes the
(latest) required fee.
Broadcasts without a price may not be used for betting. Broadcasts about events
with a small number of possible outcomes (e.g. sports games), should be
written, for example, such that a price of 1 XBJ means one outcome, 2 XBJ means
another, etc., which schema should be described in the 'text' field.
fee_fraction: .05 XBJ means 5%. It may be greater than 1, however; but
because it is stored as a four‐byte integer, it may not be greater than about
42.
"""
import struct
import decimal
D = decimal.Decimal
from fractions import Fraction
import logging
from . import (util, exceptions, config, worldcoin)
from . import (bet)
FORMAT = '>IdI'
LENGTH = 4 + 8 + 4
ID = 30
# NOTE: Pascal strings are used for storing texts for backwards‐compatibility.
def validate (db, source, timestamp, value, fee_fraction_int, text, block_index):
problems = []
if fee_fraction_int > 4294967295:
problems.append('fee fraction greater than 42.94967295')
if timestamp < 0: problems.append('negative timestamp')
if not source:
problems.append('null source address')
# Check previous broadcast in this feed.
cursor = db.cursor()
broadcasts = list(cursor.execute('''SELECT * FROM broadcasts WHERE (status = ? AND source = ?) ORDER BY tx_index ASC''', ('valid', source)))
cursor.close()
if broadcasts:
last_broadcast = broadcasts[-1]
if last_broadcast['locked']:
problems.append('locked feed')
elif timestamp <= last_broadcast['timestamp']:
problems.append('feed timestamps not monotonically increasing')
if not (block_index >= 317500 or config.TESTNET): # Protocol change.
if len(text) > 52:
problems.append('text too long')
return problems
def compose (db, source, timestamp, value, fee_fraction, text):
# Store the fee fraction as an integer.
fee_fraction_int = int(fee_fraction * 1e8)
problems = validate(db, source, timestamp, value, fee_fraction_int, text, util.last_block(db)['block_index'])
if problems: raise exceptions.BroadcastError(problems)
data = struct.pack(config.TXTYPE_FORMAT, ID)
if len(text) <= 52:
curr_format = FORMAT + '{}p'.format(len(text) + 1)
else:
curr_format = FORMAT + '{}s'.format(len(text))
data += struct.pack(curr_format, timestamp, value, fee_fraction_int,
text.encode('utf-8'))
return (source, [], data)
def parse (db, tx, message):
cursor = db.cursor()
# Unpack message.
try:
if len(message) - LENGTH <= 52:
curr_format = FORMAT + '{}p'.format(len(message) - LENGTH)
else:
curr_format = FORMAT + '{}s'.format(len(message) - LENGTH)
timestamp, value, fee_fraction_int, text = struct.unpack(curr_format, message)
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
text = ''
status = 'valid'
except (struct.error) as e:
timestamp, value, fee_fraction_int, text = 0, None, 0, None
status = 'invalid: could not unpack'
if status == 'valid':
# For SQLite3
timestamp = min(timestamp, config.MAX_INT)
value = min(value, config.MAX_INT)
problems = validate(db, tx['source'], timestamp, value, fee_fraction_int, text, tx['block_index'])
if problems: status = 'invalid: ' + '; '.join(problems)
# Lock?
lock = False
if text and text.lower() == 'lock':
lock = True
timestamp, value, fee_fraction_int, text = 0, None, None, None
else:
lock = False
# Add parsed transaction to message-type–specific table.
bindings = {
'tx_index': tx['tx_index'],
'tx_hash': tx['tx_hash'],
'block_index': tx['block_index'],
'source': tx['source'],
'timestamp': timestamp,
'value': value,
'fee_fraction_int': fee_fraction_int,
'text': text,
'locked': lock,
'status': status,
}
sql='insert into broadcasts values(:tx_index, :tx_hash, :block_index, :source, :timestamp, :value, :fee_fraction_int, :text, :locked, :status)'
cursor.execute(sql, bindings)
# Negative values (default to ignore).
if value == None or value < 0:<|fim▁hole|> if value == -2:
cursor.execute('''SELECT * FROM bets \
WHERE (status = ? AND feed_address = ?)''',
('open', tx['source']))
for i in list(cursor):
bet.cancel_bet(db, i, 'dropped', tx['block_index'])
# Cancel Pending Bet Matches?
if value == -3:
cursor.execute('''SELECT * FROM bet_matches \
WHERE (status = ? AND feed_address = ?)''',
('pending', tx['source']))
for bet_match in list(cursor):
bet.cancel_bet_match(db, bet_match, 'dropped', tx['block_index'])
cursor.close()
return
# Handle bet matches that use this feed.
cursor.execute('''SELECT * FROM bet_matches \
WHERE (status=? AND feed_address=?)
ORDER BY tx1_index ASC, tx0_index ASC''',
('pending', tx['source']))
for bet_match in cursor.fetchall():
broadcast_bet_match_cursor = db.cursor()
bet_match_id = bet_match['tx0_hash'] + bet_match['tx1_hash']
bet_match_status = None
# Calculate total funds held in escrow and total fee to be paid if
# the bet match is settled. Escrow less fee is amount to be paid back
# to betters.
total_escrow = bet_match['forward_quantity'] + bet_match['backward_quantity']
fee_fraction = fee_fraction_int / config.UNIT
fee = int(fee_fraction * total_escrow) # Truncate.
escrow_less_fee = total_escrow - fee
# Get known bet match type IDs.
cfd_type_id = util.BET_TYPE_ID['BullCFD'] + util.BET_TYPE_ID['BearCFD']
equal_type_id = util.BET_TYPE_ID['Equal'] + util.BET_TYPE_ID['NotEqual']
# Get the bet match type ID of this bet match.
bet_match_type_id = bet_match['tx0_bet_type'] + bet_match['tx1_bet_type']
# Contract for difference, with determinate settlement date.
if bet_match_type_id == cfd_type_id:
# Recognise tx0, tx1 as the bull, bear (in the right direction).
if bet_match['tx0_bet_type'] < bet_match['tx1_bet_type']:
bull_address = bet_match['tx0_address']
bear_address = bet_match['tx1_address']
bull_escrow = bet_match['forward_quantity']
bear_escrow = bet_match['backward_quantity']
else:
bull_address = bet_match['tx1_address']
bear_address = bet_match['tx0_address']
bull_escrow = bet_match['backward_quantity']
bear_escrow = bet_match['forward_quantity']
leverage = Fraction(bet_match['leverage'], 5040)
initial_value = bet_match['initial_value']
bear_credit = bear_escrow - (value - initial_value) * leverage * config.UNIT
bull_credit = escrow_less_fee - bear_credit
bear_credit = round(bear_credit)
bull_credit = round(bull_credit)
# Liquidate, as necessary.
if bull_credit >= escrow_less_fee or bull_credit <= 0:
if bull_credit >= escrow_less_fee:
bull_credit = escrow_less_fee
bear_credit = 0
bet_match_status = 'settled: liquidated for bull'
util.credit(db, tx['block_index'], bull_address, config.XBJ, bull_credit, action='bet {}'.format(bet_match_status), event=tx['tx_hash'])
elif bull_credit <= 0:
bull_credit = 0
bear_credit = escrow_less_fee
bet_match_status = 'settled: liquidated for bear'
util.credit(db, tx['block_index'], bear_address, config.XBJ, bear_credit, action='bet {}'.format(bet_match_status), event=tx['tx_hash'])
# Pay fee to feed.
util.credit(db, tx['block_index'], bet_match['feed_address'], config.XBJ, fee, action='feed fee', event=tx['tx_hash'])
# For logging purposes.
bindings = {
'bet_match_id': bet_match_id,
'bet_match_type_id': bet_match_type_id,
'block_index': tx['block_index'],
'settled': False,
'bull_credit': bull_credit,
'bear_credit': bear_credit,
'winner': None,
'escrow_less_fee': None,
'fee': fee
}
sql='insert into bet_match_resolutions values(:bet_match_id, :bet_match_type_id, :block_index, :settled, :bull_credit, :bear_credit, :winner, :escrow_less_fee, :fee)'
cursor.execute(sql, bindings)
# Settle (if not liquidated).
elif timestamp >= bet_match['deadline']:
bet_match_status = 'settled'
util.credit(db, tx['block_index'], bull_address, config.XBJ, bull_credit, action='bet {}'.format(bet_match_status), event=tx['tx_hash'])
util.credit(db, tx['block_index'], bear_address, config.XBJ, bear_credit, action='bet {}'.format(bet_match_status), event=tx['tx_hash'])
# Pay fee to feed.
util.credit(db, tx['block_index'], bet_match['feed_address'], config.XBJ, fee, action='feed fee', event=tx['tx_hash'])
# For logging purposes.
bindings = {
'bet_match_id': bet_match_id,
'bet_match_type_id': bet_match_type_id,
'block_index': tx['block_index'],
'settled': True,
'bull_credit': bull_credit,
'bear_credit': bear_credit,
'winner': None,
'escrow_less_fee': None,
'fee': fee
}
sql='insert into bet_match_resolutions values(:bet_match_id, :bet_match_type_id, :block_index, :settled, :bull_credit, :bear_credit, :winner, :escrow_less_fee, :fee)'
cursor.execute(sql, bindings)
# Equal[/NotEqual] bet.
elif bet_match_type_id == equal_type_id and timestamp >= bet_match['deadline']:
# Recognise tx0, tx1 as the bull, bear (in the right direction).
if bet_match['tx0_bet_type'] < bet_match['tx1_bet_type']:
equal_address = bet_match['tx0_address']
notequal_address = bet_match['tx1_address']
else:
equal_address = bet_match['tx1_address']
notequal_address = bet_match['tx0_address']
# Decide who won, and credit appropriately.
if value == bet_match['target_value']:
winner = 'Equal'
bet_match_status = 'settled: for equal'
util.credit(db, tx['block_index'], equal_address, config.XBJ, escrow_less_fee, action='bet {}'.format(bet_match_status), event=tx['tx_hash'])
else:
winner = 'NotEqual'
bet_match_status = 'settled: for notequal'
util.credit(db, tx['block_index'], notequal_address, config.XBJ, escrow_less_fee, action='bet {}'.format(bet_match_status), event=tx['tx_hash'])
# Pay fee to feed.
util.credit(db, tx['block_index'], bet_match['feed_address'], config.XBJ, fee, action='feed fee', event=tx['tx_hash'])
# For logging purposes.
bindings = {
'bet_match_id': bet_match_id,
'bet_match_type_id': bet_match_type_id,
'block_index': tx['block_index'],
'settled': None,
'bull_credit': None,
'bear_credit': None,
'winner': winner,
'escrow_less_fee': escrow_less_fee,
'fee': fee
}
sql='insert into bet_match_resolutions values(:bet_match_id, :bet_match_type_id, :block_index, :settled, :bull_credit, :bear_credit, :winner, :escrow_less_fee, :fee)'
cursor.execute(sql, bindings)
# Update the bet match’s status.
if bet_match_status:
bindings = {
'status': bet_match_status,
'bet_match_id': bet_match['tx0_hash'] + bet_match['tx1_hash']
}
sql='update bet_matches set status = :status where id = :bet_match_id'
cursor.execute(sql, bindings)
util.message(db, tx['block_index'], 'update', 'bet_matches', bindings)
broadcast_bet_match_cursor.close()
cursor.close()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4<|fim▁end|> | # Cancel Open Bets? |
<|file_name|>Mon_ToBeads_3.py<|end_file_name|><|fim▁begin|>import sys
# where RobotControl.py, etc lives
sys.path.append('/home/pi/Desktop/ADL/YeastRobot/PythonLibrary')
from RobotControl import *
#################################
### Define Deck Layout
#################################
deck="""\
DW96P DW96P DW96W DW96W BLANK
DW96P DW96P DW96W DW96W BLANK
DW96P DW96P DW96W DW96W BLANK
BLANK BLANK BLANK BLANK BLANK
"""
# 2 3 4 5 6
# note the 1st user defined column is "2" not zero or one, since tips are at 0 & 1
# This takes ~36m to run in total
##################################
# Assume there is a Pellet in each well
OffsetDict={0: 'UL', 1: 'UR', 2: 'LL', 3: 'LR'}
# read in deck, etc
DefineDeck(deck)
printDeck()<|fim▁hole|>for row in [0,1,2]:
for offset in [0,1,2,3]:
#get tips
CurrentTipPosition = retrieveTips(CurrentTipPosition)
extraSeatTips()
adjusted_depth = 94 + row
#aspirate 2 x 250 ul of Tween20 (C2) -> discard to DW96W at C4 X2
position(row,2,position = OffsetDict[offset])
aspirate(300,depth=adjusted_depth - 4,speed=50, mix=0)
position(row,4, position = OffsetDict[offset])
dispense(300, depth=adjusted_depth - 18, speed=50)
position(row,2,position = OffsetDict[offset])
aspirate(250,depth=adjusted_depth + 2,speed=50, mix=0)
position(row,4, position = OffsetDict[offset])
dispense(250, depth=adjusted_depth - 28, speed=50)
# pick up 2 * 200ul of SDS from C5, add to C2
position(row,5,position = OffsetDict[offset])
aspirate(200,depth=adjusted_depth + 2,speed=50, mix=0)
position(row,2,position = OffsetDict[offset])
dispense(200, depth=adjusted_depth + 3, speed=100)
position(row,5,position = OffsetDict[offset])
aspirate(200,depth=adjusted_depth + 2,speed=50, mix=0)
position(row,2,position = OffsetDict[offset])
dispense(200, depth=adjusted_depth - 2, speed=100)
# initial mix
position(row,2,position = OffsetDict[offset])
mix(300,adjusted_depth - 4,100,5)
# 2 * 200 being careful of beads preloaded in 96 well plate
# from DW96 to DW96 loaded with beads
position(row,2,position = OffsetDict[offset])
aspirate(200, depth=adjusted_depth + 1,speed=50, mix=0)
position(row,3,position = OffsetDict[offset])
dispense(200, depth=adjusted_depth - 25, speed=50)
position(row,2,position = OffsetDict[offset])
mix(300,adjusted_depth + 5,100,5)
position(row,2,position = OffsetDict[offset])
aspirate(200, depth=adjusted_depth + 6,speed=50, mix=0)
position(row,3,position = OffsetDict[offset])
dispense(200, depth=adjusted_depth - 39, speed=50)
#disposeTips()
manualDisposeTips()
position(0,0)
ShutDownRobot()
quit()<|fim▁end|> | InitializeRobot()
CurrentTipPosition = 1
|
<|file_name|>lint-stability.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:lint_stability.rs
// aux-build:inherited_stability.rs
// aux-build:stability_cfg1.rs
// aux-build:stability_cfg2.rs
#![deny(deprecated)]
#![allow(dead_code)]
#![feature(staged_api)]
#![staged_api]
#[macro_use]
extern crate lint_stability;
mod cross_crate {
extern crate stability_cfg1;
extern crate stability_cfg2; //~ ERROR use of unstable library feature
use lint_stability::*;
fn test() {
type Foo = MethodTester;
let foo = MethodTester;
deprecated(); //~ ERROR use of deprecated item
foo.method_deprecated(); //~ ERROR use of deprecated item
Foo::method_deprecated(&foo); //~ ERROR use of deprecated item
<Foo>::method_deprecated(&foo); //~ ERROR use of deprecated item
foo.trait_deprecated(); //~ ERROR use of deprecated item
Trait::trait_deprecated(&foo); //~ ERROR use of deprecated item
<Foo>::trait_deprecated(&foo); //~ ERROR use of deprecated item
<Foo as Trait>::trait_deprecated(&foo); //~ ERROR use of deprecated item
deprecated_text(); //~ ERROR use of deprecated item: text
foo.method_deprecated_text(); //~ ERROR use of deprecated item: text
Foo::method_deprecated_text(&foo); //~ ERROR use of deprecated item: text
<Foo>::method_deprecated_text(&foo); //~ ERROR use of deprecated item: text
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
Trait::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
<Foo>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
<Foo as Trait>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
deprecated_unstable(); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
foo.method_deprecated_unstable(); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
Foo::method_deprecated_unstable(&foo); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
<Foo>::method_deprecated_unstable(&foo); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
foo.trait_deprecated_unstable(); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
Trait::trait_deprecated_unstable(&foo); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
<Foo>::trait_deprecated_unstable(&foo); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
<Foo as Trait>::trait_deprecated_unstable(&foo); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
deprecated_unstable_text(); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
foo.method_deprecated_unstable_text(); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
Foo::method_deprecated_unstable_text(&foo); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
<Foo>::method_deprecated_unstable_text(&foo); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
foo.trait_deprecated_unstable_text(); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
Trait::trait_deprecated_unstable_text(&foo); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
<Foo>::trait_deprecated_unstable_text(&foo); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
<Foo as Trait>::trait_deprecated_unstable_text(&foo); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
unstable(); //~ ERROR use of unstable library feature
foo.method_unstable(); //~ ERROR use of unstable library feature
Foo::method_unstable(&foo); //~ ERROR use of unstable library feature
<Foo>::method_unstable(&foo); //~ ERROR use of unstable library feature
foo.trait_unstable(); //~ ERROR use of unstable library feature
Trait::trait_unstable(&foo); //~ ERROR use of unstable library feature
<Foo>::trait_unstable(&foo); //~ ERROR use of unstable library feature
<Foo as Trait>::trait_unstable(&foo); //~ ERROR use of unstable library feature
unstable_text();
//~^ ERROR use of unstable library feature 'test_feature': text
foo.method_unstable_text();
//~^ ERROR use of unstable library feature 'test_feature': text
Foo::method_unstable_text(&foo);
//~^ ERROR use of unstable library feature 'test_feature': text
<Foo>::method_unstable_text(&foo);
//~^ ERROR use of unstable library feature 'test_feature': text
foo.trait_unstable_text();
//~^ ERROR use of unstable library feature 'test_feature': text
Trait::trait_unstable_text(&foo);
//~^ ERROR use of unstable library feature 'test_feature': text
<Foo>::trait_unstable_text(&foo);
//~^ ERROR use of unstable library feature 'test_feature': text
<Foo as Trait>::trait_unstable_text(&foo);
//~^ ERROR use of unstable library feature 'test_feature': text
stable();
foo.method_stable();
Foo::method_stable(&foo);
<Foo>::method_stable(&foo);
foo.trait_stable();
Trait::trait_stable(&foo);
<Foo>::trait_stable(&foo);
<Foo as Trait>::trait_stable(&foo);
stable_text();
foo.method_stable_text();
Foo::method_stable_text(&foo);
<Foo>::method_stable_text(&foo);
foo.trait_stable_text();
Trait::trait_stable_text(&foo);
<Foo>::trait_stable_text(&foo);
<Foo as Trait>::trait_stable_text(&foo);
let _ = DeprecatedStruct { i: 0 }; //~ ERROR use of deprecated item
let _ = DeprecatedUnstableStruct {
//~^ ERROR use of deprecated item
//~^^ ERROR use of unstable library feature
i: 0 //~ ERROR use of deprecated item
};
let _ = UnstableStruct { i: 0 }; //~ ERROR use of unstable library feature
let _ = StableStruct { i: 0 };
let _ = DeprecatedUnitStruct; //~ ERROR use of deprecated item
let _ = DeprecatedUnstableUnitStruct; //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
let _ = UnstableUnitStruct; //~ ERROR use of unstable library feature
let _ = StableUnitStruct;
let _ = Enum::DeprecatedVariant; //~ ERROR use of deprecated item
let _ = Enum::DeprecatedUnstableVariant; //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
let _ = Enum::UnstableVariant; //~ ERROR use of unstable library feature
let _ = Enum::StableVariant;
let _ = DeprecatedTupleStruct (1); //~ ERROR use of deprecated item
let _ = DeprecatedUnstableTupleStruct (1); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
let _ = UnstableTupleStruct (1); //~ ERROR use of unstable library feature
let _ = StableTupleStruct (1);
// At the moment, the lint checker only checks stability in
// in the arguments of macros.
// Eventually, we will want to lint the contents of the
// macro in the module *defining* it. Also, stability levels
// on macros themselves are not yet linted.
macro_test_arg!(deprecated_text()); //~ ERROR use of deprecated item: text
macro_test_arg!(deprecated_unstable_text()); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
macro_test_arg!(macro_test_arg!(deprecated_text())); //~ ERROR use of deprecated item: text
}
fn test_method_param<Foo: Trait>(foo: Foo) {
foo.trait_deprecated(); //~ ERROR use of deprecated item
Trait::trait_deprecated(&foo); //~ ERROR use of deprecated item
<Foo>::trait_deprecated(&foo); //~ ERROR use of deprecated item
<Foo as Trait>::trait_deprecated(&foo); //~ ERROR use of deprecated item
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
Trait::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
<Foo>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
<Foo as Trait>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
foo.trait_deprecated_unstable(); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
Trait::trait_deprecated_unstable(&foo); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
<Foo>::trait_deprecated_unstable(&foo); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
<Foo as Trait>::trait_deprecated_unstable(&foo); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
foo.trait_deprecated_unstable_text(); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
Trait::trait_deprecated_unstable_text(&foo); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
<Foo>::trait_deprecated_unstable_text(&foo); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
<Foo as Trait>::trait_deprecated_unstable_text(&foo); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
foo.trait_unstable(); //~ ERROR use of unstable library feature
Trait::trait_unstable(&foo); //~ ERROR use of unstable library feature
<Foo>::trait_unstable(&foo); //~ ERROR use of unstable library feature
<Foo as Trait>::trait_unstable(&foo); //~ ERROR use of unstable library feature
foo.trait_unstable_text();
//~^ ERROR use of unstable library feature 'test_feature': text
Trait::trait_unstable_text(&foo);
//~^ ERROR use of unstable library feature 'test_feature': text
<Foo>::trait_unstable_text(&foo);
//~^ ERROR use of unstable library feature 'test_feature': text
<Foo as Trait>::trait_unstable_text(&foo);
//~^ ERROR use of unstable library feature 'test_feature': text
foo.trait_stable();
Trait::trait_stable(&foo);
<Foo>::trait_stable(&foo);
<Foo as Trait>::trait_stable(&foo);
}
fn test_method_object(foo: &Trait) {
foo.trait_deprecated(); //~ ERROR use of deprecated item
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
foo.trait_deprecated_unstable(); //~ ERROR use of deprecated item
//~^ ERROR use of unstable library feature
foo.trait_deprecated_unstable_text(); //~ ERROR use of deprecated item: text
//~^ ERROR use of unstable library feature
foo.trait_unstable(); //~ ERROR use of unstable library feature
foo.trait_unstable_text();
//~^ ERROR use of unstable library feature 'test_feature': text
foo.trait_stable();
}
struct S;
impl UnstableTrait for S { } //~ ERROR use of unstable library feature
trait LocalTrait : UnstableTrait { } //~ ERROR use of unstable library feature
impl Trait for S {
fn trait_stable(&self) {}
fn trait_unstable(&self) {} //~ ERROR use of unstable library feature
}
}
mod inheritance {
extern crate inherited_stability; //~ ERROR use of unstable library feature
use self::inherited_stability::*; //~ ERROR use of unstable library feature
fn test_inheritance() {
unstable(); //~ ERROR use of unstable library feature
stable();
stable_mod::unstable(); //~ ERROR use of unstable library feature
stable_mod::stable();
unstable_mod::deprecated(); //~ ERROR use of deprecated item
unstable_mod::unstable(); //~ ERROR use of unstable library feature
let _ = Unstable::UnstableVariant; //~ ERROR use of unstable library feature
let _ = Unstable::StableVariant;
let x: usize = 0;
x.unstable(); //~ ERROR use of unstable library feature
x.stable();
}
}
mod this_crate {
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0")]
pub fn deprecated() {}
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0", reason = "text")]
pub fn deprecated_text() {}
#[unstable(feature = "test_feature", issue = "0")]
pub fn unstable() {}
#[unstable(feature = "test_feature", reason = "text", issue = "0")]
pub fn unstable_text() {}
#[stable(feature = "rust1", since = "1.0.0")]
pub fn stable() {}
#[stable(feature = "rust1", since = "1.0.0", reason = "text")]
pub fn stable_text() {}
#[stable(feature = "rust1", since = "1.0.0")]
pub struct MethodTester;
impl MethodTester {
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0")]
pub fn method_deprecated(&self) {}
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0", reason = "text")]
pub fn method_deprecated_text(&self) {}
#[unstable(feature = "test_feature", issue = "0")]
pub fn method_unstable(&self) {}
#[unstable(feature = "test_feature", reason = "text", issue = "0")]
pub fn method_unstable_text(&self) {}
#[stable(feature = "rust1", since = "1.0.0")]
pub fn method_stable(&self) {}
#[stable(feature = "rust1", since = "1.0.0", reason = "text")]
pub fn method_stable_text(&self) {}
}
pub trait Trait {
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0")]
fn trait_deprecated(&self) {}
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0", reason = "text")]
fn trait_deprecated_text(&self) {}
#[unstable(feature = "test_feature", issue = "0")]
fn trait_unstable(&self) {}
#[unstable(feature = "test_feature", reason = "text", issue = "0")]
fn trait_unstable_text(&self) {}
#[stable(feature = "rust1", since = "1.0.0")]
fn trait_stable(&self) {}
#[stable(feature = "rust1", since = "1.0.0", reason = "text")]
fn trait_stable_text(&self) {}
}
impl Trait for MethodTester {}
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0")]
pub struct DeprecatedStruct {
#[stable(feature = "test_feature", since = "1.0.0")] i: isize
}
#[unstable(feature = "test_feature", issue = "0")]
pub struct UnstableStruct {
#[stable(feature = "test_feature", since = "1.0.0")] i: isize
}
#[stable(feature = "rust1", since = "1.0.0")]
pub struct StableStruct {
#[stable(feature = "test_feature", since = "1.0.0")] i: isize
}
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0")]
pub struct DeprecatedUnitStruct;
#[unstable(feature = "test_feature", issue = "0")]
pub struct UnstableUnitStruct;
#[stable(feature = "rust1", since = "1.0.0")]
pub struct StableUnitStruct;
pub enum Enum {
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0")]
DeprecatedVariant,
#[unstable(feature = "test_feature", issue = "0")]
UnstableVariant,
#[stable(feature = "rust1", since = "1.0.0")]
StableVariant,
}
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0")]
pub struct DeprecatedTupleStruct(isize);
#[unstable(feature = "test_feature", issue = "0")]
pub struct UnstableTupleStruct(isize);
#[stable(feature = "rust1", since = "1.0.0")]
pub struct StableTupleStruct(isize);
fn test() {
// Only the deprecated cases of the following should generate
// errors, because other stability attributes now have meaning
// only *across* crates, not within a single crate.
type Foo = MethodTester;
let foo = MethodTester;
deprecated(); //~ ERROR use of deprecated item
foo.method_deprecated(); //~ ERROR use of deprecated item
Foo::method_deprecated(&foo); //~ ERROR use of deprecated item
<Foo>::method_deprecated(&foo); //~ ERROR use of deprecated item
foo.trait_deprecated(); //~ ERROR use of deprecated item
Trait::trait_deprecated(&foo); //~ ERROR use of deprecated item
<Foo>::trait_deprecated(&foo); //~ ERROR use of deprecated item
<Foo as Trait>::trait_deprecated(&foo); //~ ERROR use of deprecated item
deprecated_text(); //~ ERROR use of deprecated item: text
foo.method_deprecated_text(); //~ ERROR use of deprecated item: text
Foo::method_deprecated_text(&foo); //~ ERROR use of deprecated item: text
<Foo>::method_deprecated_text(&foo); //~ ERROR use of deprecated item: text
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
Trait::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
<Foo>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
<Foo as Trait>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
unstable();
foo.method_unstable();
Foo::method_unstable(&foo);
<Foo>::method_unstable(&foo);
foo.trait_unstable();
Trait::trait_unstable(&foo);
<Foo>::trait_unstable(&foo);
<Foo as Trait>::trait_unstable(&foo);
unstable_text();
foo.method_unstable_text();
Foo::method_unstable_text(&foo);
<Foo>::method_unstable_text(&foo);
foo.trait_unstable_text();
Trait::trait_unstable_text(&foo);
<Foo>::trait_unstable_text(&foo);
<Foo as Trait>::trait_unstable_text(&foo);<|fim▁hole|> <Foo>::method_stable(&foo);
foo.trait_stable();
Trait::trait_stable(&foo);
<Foo>::trait_stable(&foo);
<Foo as Trait>::trait_stable(&foo);
stable_text();
foo.method_stable_text();
Foo::method_stable_text(&foo);
<Foo>::method_stable_text(&foo);
foo.trait_stable_text();
Trait::trait_stable_text(&foo);
<Foo>::trait_stable_text(&foo);
<Foo as Trait>::trait_stable_text(&foo);
let _ = DeprecatedStruct {
//~^ ERROR use of deprecated item
i: 0 //~ ERROR use of deprecated item
};
let _ = UnstableStruct { i: 0 };
let _ = StableStruct { i: 0 };
let _ = DeprecatedUnitStruct; //~ ERROR use of deprecated item
let _ = UnstableUnitStruct;
let _ = StableUnitStruct;
let _ = Enum::DeprecatedVariant; //~ ERROR use of deprecated item
let _ = Enum::UnstableVariant;
let _ = Enum::StableVariant;
let _ = DeprecatedTupleStruct (1); //~ ERROR use of deprecated item
let _ = UnstableTupleStruct (1);
let _ = StableTupleStruct (1);
}
fn test_method_param<Foo: Trait>(foo: Foo) {
foo.trait_deprecated(); //~ ERROR use of deprecated item
Trait::trait_deprecated(&foo); //~ ERROR use of deprecated item
<Foo>::trait_deprecated(&foo); //~ ERROR use of deprecated item
<Foo as Trait>::trait_deprecated(&foo); //~ ERROR use of deprecated item
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
Trait::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
<Foo>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
<Foo as Trait>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item: text
foo.trait_unstable();
Trait::trait_unstable(&foo);
<Foo>::trait_unstable(&foo);
<Foo as Trait>::trait_unstable(&foo);
foo.trait_unstable_text();
Trait::trait_unstable_text(&foo);
<Foo>::trait_unstable_text(&foo);
<Foo as Trait>::trait_unstable_text(&foo);
foo.trait_stable();
Trait::trait_stable(&foo);
<Foo>::trait_stable(&foo);
<Foo as Trait>::trait_stable(&foo);
}
fn test_method_object(foo: &Trait) {
foo.trait_deprecated(); //~ ERROR use of deprecated item
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
foo.trait_unstable();
foo.trait_unstable_text();
foo.trait_stable();
}
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0")]
fn test_fn_body() {
fn fn_in_body() {}
fn_in_body();
}
impl MethodTester {
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0")]
fn test_method_body(&self) {
fn fn_in_body() {}
fn_in_body();
}
}
#[unstable(feature = "test_feature", issue = "0")]
#[deprecated(since = "1.0.0")]
pub trait DeprecatedTrait {
fn dummy(&self) { }
}
struct S;
impl DeprecatedTrait for S { } //~ ERROR use of deprecated item
trait LocalTrait : DeprecatedTrait { } //~ ERROR use of deprecated item
}
fn main() {}<|fim▁end|> |
stable();
foo.method_stable();
Foo::method_stable(&foo); |
<|file_name|>multi_cycle_lr.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2020, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import copy
from torch.optim.lr_scheduler import OneCycleLR
from nupic.research.frameworks.vernon.experiment_utils import create_lr_scheduler
class MultiCycleLR:
"""
Composes a sequence of OneCycleLR regimes, allowing different configurations
for each cycle. This infers args like total_batches, epochs, and also the
div_factor for subsequent cycles.
"""
def setup_experiment(self, config):
"""
:param config:
- multi_cycle_lr_args: A list of (epoch, dict) pairs.
The dicts don't need to include epoch
counts, this is inferred from the config.
"""
config = copy.deepcopy(config)
ignored_class = config.pop("lr_scheduler_class", None)
ignored_args = config.pop("lr_scheduler_args", None)
config["lr_scheduler_step_every_batch"] = True
super().setup_experiment(config)
if ignored_class is not None and ignored_class != OneCycleLR:
self.logger.warning("Ignoring lr_scheduler_class, using OneCycleLR")
if ignored_args is not None and len(ignored_args) > 0:
self.logger.warning("Ignoring lr_scheduler_args, using "
"multi_cycle_lr_args")
# Insert epoch counts and div_factors
improved_args = {}
multi_cycle_lr_args = sorted(config["multi_cycle_lr_args"],
key=lambda x: x[0])
for i, (start_epoch, cycle_config) in enumerate(multi_cycle_lr_args):
if i + 1 < len(multi_cycle_lr_args):
end_epoch = multi_cycle_lr_args[i + 1][0]
else:
end_epoch = config["epochs"]
cycle_config = copy.deepcopy(cycle_config)
cycle_config["epochs"] = end_epoch - start_epoch
# Default behavior: no sudden change in learning rate between
# cycles.
if "div_factor" not in cycle_config and i > 0:
prev_cycle_config = multi_cycle_lr_args[i - 1][1]
if "final_div_factor" in prev_cycle_config:
cycle_config["div_factor"] = \
prev_cycle_config["final_div_factor"]
improved_args[start_epoch] = cycle_config
self.multi_cycle_args_by_epoch = improved_args
self.logger.info("MultiCycleLR regime: "
f"{self.multi_cycle_args_by_epoch}")
# Set it immediately, rather than waiting for the pre_epoch, in case a
# restore is occurring.
args = self.multi_cycle_args_by_epoch[0]
self.lr_scheduler = create_lr_scheduler(
optimizer=self.optimizer,
lr_scheduler_class=OneCycleLR,
lr_scheduler_args=args,
steps_per_epoch=self.total_batches)
def pre_epoch(self):
super().pre_epoch()
if self.current_epoch != 0 and \
self.current_epoch in self.multi_cycle_args_by_epoch:
args = self.multi_cycle_args_by_epoch[self.current_epoch]
self.lr_scheduler = create_lr_scheduler(
optimizer=self.optimizer,
lr_scheduler_class=OneCycleLR,
lr_scheduler_args=args,
steps_per_epoch=self.total_batches)
@classmethod
def get_execution_order(cls):
eo = super().get_execution_order()
eo["setup_experiment"].insert(<|fim▁hole|> eo["setup_experiment"].append("MultiCycleLR: Initialize")
eo["pre_epoch"].append("MultiCycleLR: Maybe initialize lr_scheduler")
return eo<|fim▁end|> | 0, "MultiCycleLR: Prevent LR scheduler from being constructed") |
<|file_name|>importotp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# 2017-01-23 Cornelius Kölbel <[email protected]>
# Avoid XML bombs
# 2016-07-17 Cornelius Kölbel <[email protected]>
# Add GPG encrpyted import
# 2016-01-16 Cornelius Kölbel <[email protected]>
# Add PSKC import with pre shared key
# 2015-05-28 Cornelius Kölbel <[email protected]>
# Add PSKC import
# 2014-12-11 Cornelius Kölbel <[email protected]>
# code cleanup during flask migration
# 2014-10-27 Cornelius Kölbel <[email protected]>
# add parsePSKCdata
# 2014-05-08 Cornelius Kölbel
# License: AGPLv3
# contact: http://www.privacyidea.org
#
# Copyright (C) 2010 - 2014 LSE Leading Security Experts GmbH
# License: AGPLv3
# contact: http://www.linotp.org
# http://www.lsexperts.de
# [email protected]
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
'''This file is part of the privacyidea service
It is used for importing SafeNet (former Aladdin)
XML files, that hold the OTP secrets for eToken PASS.
'''
import defusedxml.ElementTree as etree
import re
import binascii
import base64
from privacyidea.lib.utils import modhex_decode
from privacyidea.lib.utils import modhex_encode
from privacyidea.lib.log import log_with
from privacyidea.lib.crypto import aes_decrypt
from Crypto.Cipher import AES
from bs4 import BeautifulSoup
import traceback
from passlib.utils.pbkdf2 import pbkdf2
from privacyidea.lib.utils import to_utf8
import gnupg
import logging
log = logging.getLogger(__name__)
def _create_static_password(key_hex):
'''
According to yubikey manual 5.5.5 the static-ticket is the same
algorithm with no moving factors.
The msg_hex that is encoded with the AES key is
'000000000000ffffffffffffffff0f2e'
'''
msg_hex = "000000000000ffffffffffffffff0f2e"
msg_bin = binascii.unhexlify(msg_hex)
aes = AES.new(binascii.unhexlify(key_hex), AES.MODE_ECB)
password_bin = aes.encrypt(msg_bin)
password = modhex_encode(password_bin)
return password
class ImportException(Exception):
def __init__(self, description):
self.description = description
def __str__(self):
return ('{0!s}'.format(self.description))
def getTagName(elem):
match = re.match("^({.*?})(.*)$", elem.tag)
if match:
return match.group(2)
else:
return elem.tag
@log_with(log)
def parseOATHcsv(csv):
'''
(#653)
This function parses CSV data for oath token.
The file format is
serial, key, [hotp,totp], [6,8], [30|60],
serial, key, ocra, [ocra-suite]
It imports sha1 hotp or totp token.
I can also import ocra token.
The default is hotp
if totp is set, the default seconds are 30
if ocra is set, an ocra-suite is required, otherwise the default
ocra-suite is used.
It returns a dictionary:
{
serial: { 'type' : xxxx,
'otpkey' : xxxx,
'timeStep' : xxxx,
'otplen' : xxx,
'ocrasuite' : xxx }
}
'''
TOKENS = {}
csv_array = csv.split('\n')
log.debug("the file contains {0:d} tokens.".format(len(csv_array)))
for line in csv_array:
l = line.split(',')
serial = ""
key = ""
ttype = "hotp"
seconds = 30
otplen = 6
hashlib = "sha1"
ocrasuite = ""
serial = l[0].strip()
# check for empty line
if len(serial) > 0 and not serial.startswith('#'):
if len(l) >= 2:
key = l[1].strip()
if len(key) == 32:
hashlib = "sha256"
else:
log.error("the line {0!s} did not contain a hotp key".format(line))
continue
# ttype
if len(l) >= 3:
ttype = l[2].strip().lower()
# otplen or ocrasuite
if len(l) >= 4:
if ttype != "ocra":
otplen = int(l[3].strip())
elif ttype == "ocra":
ocrasuite = l[3].strip()
# timeStep
if len(l) >= 5:
seconds = int(l[4].strip())
log.debug("read the line |{0!s}|{1!s}|{2!s}|{3:d} {4!s}|{5:d}|".format(serial, key, ttype, otplen, ocrasuite, seconds))
TOKENS[serial] = {'type': ttype,
'otpkey': key,
'timeStep': seconds,
'otplen': otplen,
'hashlib': hashlib,
'ocrasuite': ocrasuite
}
return TOKENS
@log_with(log)
def parseYubicoCSV(csv):
'''
This function reads the CSV data as created by the Yubico personalization
GUI.
Traditional Format:
Yubico OTP,12/11/2013 11:10,1,vvgutbiedkvi,
ab86c04de6a3,d26a7c0f85fdda28bd816e406342b214,,,0,0,0,0,0,0,0,0,0,0
OATH-HOTP,11.12.13 18:55,1,cccccccccccc,,
916821d3a138bf855e70069605559a206ba854cd,,,0,0,0,6,0,0,0,0,0,0
Static Password,11.12.13 19:08,1,,d5a3d50327dc,
0e8e37b0e38b314a56748c030f58d21d,,,0,0,0,0,0,0,0,0,0,0
Yubico Format:
# OATH mode
508326,,0,69cfb9202438ca68964ec3244bfa4843d073a43b,,2013-12-12T08:41:07,
1382042,,0,bf7efc1c8b6f23604930a9ce693bdd6c3265be00,,2013-12-12T08:41:17,
# Yubico mode
508326,cccccccccccc,83cebdfb7b93,a47c5bf9c152202f577be6721c0113af,,
2013-12-12T08:43:17,
# static mode
508326,,,9e2fd386224a7f77e9b5aee775464033,,2013-12-12T08:44:34,
column 0: serial
column 1: public ID in yubico mode
column 2: private ID in yubico mode, 0 in OATH mode, blank in static mode
column 3: AES key
BUMMER: The Yubico Format does not contain the information,
which slot of the token was written.<|fim▁hole|> returned dictionary needs the token serial as a key.
It returns a dictionary with the new tokens to be created:
{
serial: { 'type' : yubico,
'otpkey' : xxxx,
'otplen' : xxx,
'description' : xxx
}
}
'''
TOKENS = {}
csv_array = csv.split('\n')
log.debug("the file contains {0:d} tokens.".format(len(csv_array)))
for line in csv_array:
l = line.split(',')
serial = ""
key = ""
otplen = 32
public_id = ""
slot = ""
if len(l) >= 6:
first_column = l[0].strip()
if first_column.lower() in ["yubico otp",
"oath-hotp",
"static password"]:
# traditional format
typ = l[0].strip()
slot = l[2].strip()
public_id = l[3].strip()
key = l[5].strip()
if public_id == "":
# Usually a "static password" does not have a public ID!
# So we would bail out here for static passwords.
log.warning("No public ID in line {0!r}".format(line))
continue
serial_int = int(binascii.hexlify(modhex_decode(public_id)),
16)
if typ.lower() == "yubico otp":
ttype = "yubikey"
otplen = 32 + len(public_id)
serial = "UBAM{0:08d}_{1!s}".format(serial_int, slot)
TOKENS[serial] = {'type': ttype,
'otpkey': key,
'otplen': otplen,
'description': public_id
}
elif typ.lower() == "oath-hotp":
'''
WARNING: this does not work out at the moment, since the
Yubico GUI either
1. creates a serial in the CSV, but then the serial is
always prefixed! We can not authenticate with this!
2. if it does not prefix the serial there is no serial in
the CSV! We can not import and assign the token!
'''
ttype = "hotp"
otplen = 6
serial = "UBOM{0:08d}_{1!s}".format(serial_int, slot)
TOKENS[serial] = {'type': ttype,
'otpkey': key,
'otplen': otplen,
'description': public_id
}
else:
log.warning("at the moment we do only support Yubico OTP"
" and HOTP: %r" % line)
continue
elif first_column.isdigit():
# first column is a number, (serial number), so we are
# in the yubico format
serial = first_column
# the yubico format does not specify a slot
slot = "X"
key = l[3].strip()
if l[2].strip() == "0":
# HOTP
typ = "hotp"
serial = "UBOM{0!s}_{1!s}".format(serial, slot)
otplen = 6
elif l[2].strip() == "":
# Static
typ = "pw"
serial = "UBSM{0!s}_{1!s}".format(serial, slot)
key = _create_static_password(key)
otplen = len(key)
log.warning("We can not enroll a static mode, since we do"
" not know the private identify and so we do"
" not know the static password.")
continue
else:
# Yubico
typ = "yubikey"
serial = "UBAM{0!s}_{1!s}".format(serial, slot)
public_id = l[1].strip()
otplen = 32 + len(public_id)
TOKENS[serial] = {'type': typ,
'otpkey': key,
'otplen': otplen,
'description': public_id
}
else:
log.warning("the line {0!r} did not contain a enough values".format(line))
continue
return TOKENS
@log_with(log)
def parseSafeNetXML(xml):
"""
This function parses XML data of a Aladdin/SafeNet XML
file for eToken PASS
It returns a dictionary of
serial : { otpkey , counter, type }
"""
TOKENS = {}
elem_tokencontainer = etree.fromstring(xml)
if getTagName(elem_tokencontainer) != "Tokens":
raise ImportException("No toplevel element Tokens")
for elem_token in list(elem_tokencontainer):
SERIAL = None
COUNTER = None
HMAC = None
DESCRIPTION = None
if getTagName(elem_token) == "Token":
SERIAL = elem_token.get("serial")
log.debug("Found token with serial {0!s}".format(SERIAL))
for elem_tdata in list(elem_token):
tag = getTagName(elem_tdata)
if "ProductName" == tag:
DESCRIPTION = elem_tdata.text
log.debug("The Token with the serial %s has the "
"productname %s" % (SERIAL, DESCRIPTION))
if "Applications" == tag:
for elem_apps in elem_tdata:
if getTagName(elem_apps) == "Application":
for elem_app in elem_apps:
tag = getTagName(elem_app)
if "Seed" == tag:
HMAC = elem_app.text
if "MovingFactor" == tag:
COUNTER = elem_app.text
if not SERIAL:
log.error("Found token without a serial")
else:
if HMAC:
hashlib = "sha1"
if len(HMAC) == 64:
hashlib = "sha256"
TOKENS[SERIAL] = {'otpkey': HMAC,
'counter': COUNTER,
'type': 'hotp',
'hashlib': hashlib
}
else:
log.error("Found token {0!s} without a element 'Seed'".format(
SERIAL))
return TOKENS
def strip_prefix_from_soup(xml_soup):
"""
We strip prefixes from the XML tags.
<pskc:encryption>
</pskc:encryption>
results in:
<encryption>
</encryption>
:param xml_soup: Beautiful Soup XML with tags with prefixes
:type xml_soup: Beautiful Soup object
:return: Beautiful Soup without prefixes in the tags
"""
# strip the prefixes from the tags!
for tag in xml_soup.findAll():
if tag.name.find(":") >= 1:
prefix, name = tag.name.split(":")
tag.name = name
return xml_soup
def derive_key(xml, password):
"""
Derive the encryption key from the password with the parameters given
in the XML soup.
:param xml: The XML
:param password: the password
:return: The derived key, hexlified
"""
if not password:
raise ImportException("The XML KeyContainer specifies a derived "
"encryption key, but no password given!")
keymeth= xml.keycontainer.encryptionkey.derivedkey.keyderivationmethod
derivation_algo = keymeth["algorithm"].split("#")[-1]
if derivation_algo.lower() != "pbkdf2":
raise ImportException("We only support PBKDF2 as Key derivation "
"function!")
salt = keymeth.find("salt").text.strip()
keylength = keymeth.find("keylength").text.strip()
rounds = keymeth.find("iterationcount").text.strip()
r = pbkdf2(to_utf8(password), base64.b64decode(salt), int(rounds),
int(keylength))
return binascii.hexlify(r)
@log_with(log)
def parsePSKCdata(xml_data,
preshared_key_hex=None,
password=None,
do_checkserial=False):
"""
This function parses XML data of a PSKC file, (RFC6030)
It can read
* AES-128-CBC encrypted (preshared_key_bin) data
* password based encrypted data
* plain text data
:param xml_data: The XML data
:type xml_data: basestring
:param preshared_key_hex: The preshared key, hexlified
:param password: The password that encrypted the keys
:param do_checkserial: Check if the serial numbers conform to the OATH
specification (not yet implemented)
:return: a dictionary of token dictionaries
{ serial : { otpkey , counter, .... }}
"""
tokens = {}
#xml = BeautifulSoup(xml_data, "lxml")
xml = strip_prefix_from_soup(BeautifulSoup(xml_data, "lxml"))
if xml.keycontainer.encryptionkey and \
xml.keycontainer.encryptionkey.derivedkey:
# If we have a password we also need a tag EncryptionKey in the
# KeyContainer
preshared_key_hex = derive_key(xml, password)
key_packages = xml.keycontainer.findAll("keypackage")
for key_package in key_packages:
token = {}
key = key_package.key
try:
token["description"] = key_package.deviceinfo.manufacturer.string
except Exception as exx:
log.debug("Can not get manufacturer string {0!s}".format(exx))
serial = key["id"]
try:
serial = key_package.deviceinfo.serialno.string
except Exception as exx:
log.debug("Can not get serial string from device info {0!s}".format(exx))
algo = key["algorithm"]
token["type"] = algo[-4:].lower()
parameters = key.algorithmparameters
token["otplen"] = parameters.responseformat["length"] or 6
try:
if key.data.secret.plainvalue:
secret = key.data.secret.plainvalue.string
token["otpkey"] = binascii.hexlify(base64.b64decode(secret))
elif key.data.secret.encryptedvalue:
encryptionmethod = key.data.secret.encryptedvalue.encryptionmethod
enc_algorithm = encryptionmethod["algorithm"].split("#")[-1]
if enc_algorithm.lower() != "aes128-cbc":
raise ImportException("We only import PSKC files with "
"AES128-CBC.")
enc_data = key.data.secret.encryptedvalue.ciphervalue.text
enc_data = base64.b64decode(enc_data.strip())
enc_iv = enc_data[:16]
enc_cipher = enc_data[16:]
secret = aes_decrypt(binascii.unhexlify(preshared_key_hex),
enc_iv, enc_cipher)
token["otpkey"] = binascii.hexlify(secret)
except Exception as exx:
log.error("Failed to import tokendata: {0!s}".format(exx))
log.debug(traceback.format_exc())
raise ImportException("Failed to import tokendata. Wrong "
"encryption key? %s" % exx)
if token["type"] == "hotp" and key.data.counter:
token["counter"] = key.data.counter.text.strip()
elif token["type"] == "totp" and key.data.timeinterval:
token["timeStep"] = key.data.timeinterval.text.strip()
tokens[serial] = token
return tokens
class GPGImport(object):
"""
This class is used to decrypt GPG encrypted import files.
The decrypt method returns the unencrpyted files.
Create the keypair like this:
GNUPGHOME=/etc/privacyidea/gpg gpg --gen-key
"""
def __init__(self, config=None):
self.config = config or {}
self.gnupg_home = self.config.get("PI_GNUPG_HOME",
"/etc/privacyidea/gpg")
self.gpg = gnupg.GPG(gnupghome=self.gnupg_home)
self.private_keys = self.gpg.list_keys(True)
def get_publickeys(self):
"""
This returns the public GPG key to be displayed in the Import Dialog.
The administrator can send this public key to his token vendor and
the token vendor can use this public key to encrypt the token import
file.
:return: a dictionary of public keys with fingerprint
"""
public_keys = {}
keys = self.gpg.list_keys(secret=True)
for key in keys:
ascii_armored_public_key = self.gpg.export_keys(key.get("keyid"))
public_keys[key.get("keyid")] = {"armor": ascii_armored_public_key,
"fingerprint": key.get(
"fingerprint")}
return public_keys
def decrypt(self, input_data):
"""
Decrypts the input data with one of the private keys
:param input_data:
:return:
"""
decrypted = self.gpg.decrypt(message=input_data)
if not decrypted.ok:
log.error(u"Decrpytion failed: {0!s}. {1!s}".format(
decrypted.status, decrypted.stderr))
raise Exception(decrypted.stderr)
return decrypted.data<|fim▁end|> |
If now public ID or serial is given, we can not import the token, as the |
<|file_name|>pages.js<|end_file_name|><|fim▁begin|>'use strict';
var React = require('react');
var PureRenderMixin = require('react-addons-pure-render-mixin');
var SvgIcon = require('../../svg-icon');
var SocialPages = React.createClass({
displayName: 'SocialPages',
mixins: [PureRenderMixin],
render: function render() {
return React.createElement(
SvgIcon,
this.props,
React.createElement('path', { d: 'M3 5v6h5L7 7l4 1V3H5c-1.1 0-2 .9-2 2zm5 8H3v6c0 1.1.9 2 2 2h6v-5l-4 1 1-4zm9 4l-4-1v5h6c1.1 0 2-.9 2-2v-6h-5l1 4zm2-14h-6v5l4-1-1 4h5V5c0-1.1-.9-2-2-2z' })
);
}
});<|fim▁hole|>
module.exports = SocialPages;<|fim▁end|> | |
<|file_name|>MovementGenerator.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2005-2011 MaNGOS <http://getmangos.com/>
* Copyright (C) 2009-2011 MaNGOSZero <https://github.com/mangos/zero>
* Copyright (C) 2011-2016 Nostalrius <https://nostalrius.org>
* Copyright (C) 2016-2017 Elysium Project <https://github.com/elysium-project>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*<|fim▁hole|> * along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "MovementGenerator.h"
#include "Unit.h"
MovementGenerator::~MovementGenerator()
{
}
bool MovementGenerator::IsActive(Unit& u)
{
// When movement generator list modified from Update movegen object erase delayed,
// so pointer still valid and be used for check
return !u.GetMotionMaster()->empty() && u.GetMotionMaster()->top() == this;
}<|fim▁end|> | * You should have received a copy of the GNU General Public License |
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import os
from flask import Flask, url_for, request, render_template, jsonify, send_file
from werkzeug.utils import secure_filename
import deepchem as dc
import subprocess
from shutil import copyfile
import csv
import rdkit
from rdkit import Chem
from rdkit.Chem import AllChem
from rdkit.Chem import Draw
STATIC_DIR = os.path.join(os.path.dirname(__file__), 'static/')
DEEPCHEM_GUI = Flask('deepchem-gui', static_folder=STATIC_DIR,
static_url_path='/static',
template_folder=os.path.join(STATIC_DIR, 'deepchem-gui',
'templates')
)
UPLOAD_DIR = os.path.join(STATIC_DIR, "data/")
if not os.path.isdir(UPLOAD_DIR):<|fim▁hole|>@DEEPCHEM_GUI.route('/')
def webapp():
return render_template('webapp.html')
# download protein and ligand files
@DEEPCHEM_GUI.route('/upload', methods=['POST'])
def upload():
if request.method == 'POST':
proteins = request.files.getlist('proteins')
ligands = request.files.getlist('ligands')
smiles = request.files.getlist('smiles')
smarts = request.files.getlist('smarts')
if proteins and ligands:
protein_fns = []
ligand_fns = []
for protein in proteins:
protein_fn = os.path.join(
UPLOAD_DIR,
secure_filename(protein.filename)
)
protein.save(protein_fn)
protein_fns.append(protein_fn)
for ligand in ligands:
ligand_fn = os.path.join(
UPLOAD_DIR,
secure_filename(ligand.filename)
)
ligand.save(ligand_fn)
ligand_fns.append(ligand_fn)
docking_result = dock(protein_fns, ligand_fns)
print(docking_result)
for i in range(len(protein_fns)):
for j in range(len(ligand_fns)):
protein_fn = docking_result[i][j]["protein"]
new_protein_fn = protein_fn.split("/")[-1]
copyfile(protein_fn, os.path.join(
UPLOAD_DIR, new_protein_fn))
docking_result[i][j]["protein"] = url_for(
'static', filename="data/" + new_protein_fn)
ligand_fn = docking_result[i][j]["ligand"]
new_ligand_fn = ligand_fn.split("/")[-1]
copyfile(ligand_fn,
os.path.join(UPLOAD_DIR, new_ligand_fn))
docking_result[i][j]["ligand"] = url_for(
'static', filename="data/" + new_ligand_fn)
return jsonify(docking_result)
elif smiles:
smiles = smiles[0]
smiles_fn = os.path.join(
UPLOAD_DIR,
secure_filename(smiles.filename)
)
smiles.save(smiles_fn)
csvfile = open(smiles_fn, 'r')
csvreader = csv.reader(csvfile, delimiter=',')
data = []
for row in csvreader:
data.append(row)
data = render_smiles(data)
return jsonify(data)
elif smarts:
smarts = smarts[0]
smarts_fn = os.path.join(
UPLOAD_DIR,
secure_filename(smarts.filename)
)
smarts.save(smarts_fn)
csvfile = open(smarts_fn, 'r')
csvreader = csv.reader(csvfile, delimiter=',')
data = []
for row in csvreader:
data.append(row)
data = render_smarts(data)
return jsonify(data)
else:
return jsonify(error_msg="Invalid file transfer.")
else:
raise NotImplementedError
def render_smiles(data):
smiles_col_idx = [j for j in range(len(data[0])) if data[0][j]=="SMILES"][0]
for i, row in enumerate(data):
if i==0:
data[i].append("SMILES IMG")
continue
try:
smiles_str = data[i][smiles_col_idx]
smiles = Chem.MolFromSmiles(smiles_str)
AllChem.Compute2DCoords(smiles)
smiles_fn = 'smiles_%d.png' % i
smiles_img = os.path.join(UPLOAD_DIR, smiles_fn)
Draw.MolToFile(smiles, smiles_img)
data[i].append(url_for('static', filename='data/' + smiles_fn))
except Exception as e:
print(e)
data[i].append("Invalid")
pass
return data
def render_smarts(data):
smarts_col_idx = [j for j in range(len(data[0])) if data[0][j]=="SMARTS"][0]
smiles_col_idx_1 = [j for j in range(len(data[0])) if data[0][j]=="SMILES_1"][0]
smiles_col_idx_2 = [j for j in range(len(data[0])) if data[0][j]=="SMILES_2"][0]
for i, row in enumerate(data):
if i==0:
data[i].append("PRODUCT")
data[i].append("SMILES_1 IMG")
data[i].append("SMILES_2 IMG")
data[i].append("PRODUCT IMG")
continue
try:
smarts_str = data[i][smarts_col_idx]
smiles_str_1 = data[i][smiles_col_idx_1]
smiles_str_2 = data[i][smiles_col_idx_2]
rxn = AllChem.ReactionFromSmarts(smarts_str)
ps = rxn.RunReactants((Chem.MolFromSmiles(smiles_str_1), Chem.MolFromSmiles(smiles_str_2)))
product = ps[0][0]
product_str = Chem.MolToSmiles(product)
data[i].append(product_str)
AllChem.Compute2DCoords(product)
product_fn = 'product_%d.png' % i
product_img = os.path.join(UPLOAD_DIR, product_fn)
Draw.MolToFile(product, product_img)
smiles_1 = Chem.MolFromSmiles(smiles_str_1)
AllChem.Compute2DCoords(smiles_1)
smiles_1_fn = 'smiles_1_%d.png' % i
smiles_1_img = os.path.join(UPLOAD_DIR, smiles_1_fn)
Draw.MolToFile(smiles_1, smiles_1_img)
smiles_2 = Chem.MolFromSmiles(smiles_str_2)
AllChem.Compute2DCoords(smiles_2)
smiles_2_fn = 'smiles_2_%d.png' % i
smiles_2_img = os.path.join(UPLOAD_DIR, smiles_2_fn)
Draw.MolToFile(smiles_2, smiles_2_img)
data[i].append(url_for('static', filename='data/' + product_fn))
data[i].append(url_for('static', filename='data/' + smiles_1_fn))
data[i].append(url_for('static', filename='data/' + smiles_2_fn))
except Exception as e:
print(e)
data[i].append("Invalid")
data[i].append("Invalid")
data[i].append("Invalid")
pass
return data
def dock(protein_fns, ligand_fns):
docking_result = [[{} for j in range(len(ligand_fns))]
for i in range(len(protein_fns))]
for i in range(len(protein_fns)):
for j in range(len(ligand_fns)):
protein_fn = protein_fns[i]
ligand_fn = ligand_fns[j]
print("Docking: %s to %s" % (ligand_fn, protein_fn))
docker = dc.dock.VinaGridDNNDocker(
exhaustiveness=1, detect_pockets=False)
(score, (protein_docked, ligand_docked)
) = docker.dock(protein_fn, ligand_fn)
print("Scores: %f" % (score))
print("Docked protein: %s" % (protein_docked))
print("Docked ligand: %s" % (ligand_docked))
ligand_docked_fn = ligand_docked.replace(".pdbqt", "")
subprocess.call("csh %s %s" % (os.path.join(STATIC_DIR, 'deepchem-gui', 'scripts', 'stripqt.sh'),
ligand_docked_fn), shell=True)
ligand_docked_pdb = ligand_docked_fn + ".pdb"
docking_result[i][j] = {'score': score[
0], 'protein': protein_docked, 'ligand': ligand_docked_pdb}
return docking_result<|fim▁end|> | os.mkdir(UPLOAD_DIR)
print("Created data directory")
# serve ngl webapp clone |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.