prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from . uuid64 import *<|fim▁end|> | |
<|file_name|>interface.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""The parsers and plugins interface classes."""
import abc
import os
from plaso.lib import errors
class BaseFileEntryFilter(object):
"""File entry filter interface."""
# pylint: disable=redundant-returns-doc
@abc.abstractmethod
def Match(self, file_entry):
"""Determines if a file entry matches the filter.
Args:
file_entry (dfvfs.FileEntry): a file entry.
Returns:
bool: True if the file entry matches the filter.
"""
class FileNameFileEntryFilter(BaseFileEntryFilter):
"""File name file entry filter."""
def __init__(self, filename):
"""Initializes a file entry filter.
Args:
filename (str): name of the file.
"""
super(FileNameFileEntryFilter, self).__init__()
self._filename = filename.lower()
def Match(self, file_entry):
"""Determines if a file entry matches the filter.
Args:
file_entry (dfvfs.FileEntry): a file entry.
Returns:
bool: True if the file entry matches the filter.
"""
if not file_entry:
return False
filename = file_entry.name.lower()
return filename == self._filename
class BaseParser(object):
"""The parser interface."""
# The name of the parser. This is the name that is used in the registration
# and used for parser/plugin selection, so this needs to be concise and unique
# for all plugins/parsers, such as 'Chrome', 'Safari' or 'UserAssist'.
NAME = 'base_parser'
# Data format supported by the parser plugin. This information is used by
# the parser manager to generate parser and plugin information.
DATA_FORMAT = ''
# List of filters that should match for the parser to be applied.
FILTERS = frozenset()
# Every derived parser class that implements plugins should define
# its own _plugin_classes dict:
# _plugin_classes = {}
# We deliberately don't define it here to make sure the plugins of
# different parser classes don't end up in the same dict.
_plugin_classes = None
def __init__(self):
"""Initializes a parser.
By default all plugins will be enabled. To only enable specific plugins
use the EnablePlugins method and pass it a list of strings containing
the names of the plugins to enable.
The default plugin, named "{self.NAME:s}_default", if it exists,
is always enabled and cannot be disabled.
"""
super(BaseParser, self).__init__()
self._default_plugin = None
self._plugins = None
self.EnablePlugins([])
@classmethod
def DeregisterPlugin(cls, plugin_class):
"""Deregisters a plugin class.
The plugin classes are identified based on their lower case name.
Args:
plugin_class (type): class of the plugin.
Raises:
KeyError: if plugin class is not set for the corresponding name.
"""
plugin_name = plugin_class.NAME.lower()
if plugin_name not in cls._plugin_classes:
raise KeyError(
'Plugin class not set for name: {0:s}.'.format(
plugin_class.NAME))
del cls._plugin_classes[plugin_name]
def EnablePlugins(self, plugin_includes):
"""Enables parser plugins.
Args:
plugin_includes (list[str]): names of the plugins to enable, where None
or an empty list represents all plugins. Note the default plugin, if
it exists, is always enabled and cannot be disabled.
"""
self._plugins = []
if not self._plugin_classes:
return
default_plugin_name = '{0:s}_default'.format(self.NAME)
for plugin_name, plugin_class in self._plugin_classes.items():
if plugin_name == default_plugin_name:
self._default_plugin = plugin_class()
continue
if plugin_includes and plugin_name not in plugin_includes:
continue
plugin_object = plugin_class()
self._plugins.append(plugin_object)
# TODO: move this to a filter.
# pylint: disable=redundant-returns-doc
@classmethod
def GetFormatSpecification(cls):
"""Retrieves the format specification.
Returns:
FormatSpecification: a format specification or None if not available.
"""
return
@classmethod
def GetPluginObjectByName(cls, plugin_name):
"""Retrieves a specific plugin object by its name.
Args:
plugin_name (str): name of the plugin.
Returns:
BasePlugin: a plugin object or None if not available.
"""
plugin_class = cls._plugin_classes.get(plugin_name, None)
if plugin_class:
return plugin_class()
return None
@classmethod
def GetPlugins(cls):
"""Retrieves the registered plugins.
Yields:
tuple[str, type]: name and class of the plugin.
"""
for plugin_name, plugin_class in cls._plugin_classes.items():
yield plugin_name, plugin_class
@classmethod
def RegisterPlugin(cls, plugin_class):
"""Registers a plugin class.
The plugin classes are identified based on their lower case name.
Args:
plugin_class (type): class of the plugin.
Raises:
KeyError: if plugin class is already set for the corresponding name.
"""
plugin_name = plugin_class.NAME.lower()
if plugin_name in cls._plugin_classes:
raise KeyError((
'Plugin class already set for name: {0:s}.').format(
plugin_class.NAME))
cls._plugin_classes[plugin_name] = plugin_class
@classmethod
def RegisterPlugins(cls, plugin_classes):
"""Registers plugin classes.
Args:
plugin_classes (list[type]): classes of plugins.
Raises:
KeyError: if plugin class is already set for the corresponding name.
"""
for plugin_class in plugin_classes:
cls.RegisterPlugin(plugin_class)
@classmethod
def SupportsPlugins(cls):
"""Determines if a parser supports plugins.
Returns:
bool: True if the parser supports plugins.
"""
return cls._plugin_classes is not None
class FileEntryParser(BaseParser):
"""The file entry parser interface."""
def Parse(self, parser_mediator):
"""Parsers the file entry and extracts event objects.
Args:
parser_mediator (ParserMediator): a parser mediator.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
file_entry = parser_mediator.GetFileEntry()
if not file_entry:
raise errors.UnableToParseFile('Invalid file entry')
parser_mediator.AppendToParserChain(self)
try:<|fim▁hole|> @abc.abstractmethod
def ParseFileEntry(self, parser_mediator, file_entry):
"""Parses a file entry.
Args:
parser_mediator (ParserMediator): a parser mediator.
file_entry (dfvfs.FileEntry): a file entry to parse.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
class FileObjectParser(BaseParser):
"""The file-like object parser interface."""
# The initial file offset. Set this value to None if no initial
# file offset seek needs to be performed.
_INITIAL_FILE_OFFSET = 0
def Parse(self, parser_mediator, file_object):
"""Parses a single file-like object.
Args:
parser_mediator (ParserMediator): a parser mediator.
file_object (dvfvs.FileIO): a file-like object to parse.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
if not file_object:
raise errors.UnableToParseFile('Invalid file object')
if self._INITIAL_FILE_OFFSET is not None:
file_object.seek(self._INITIAL_FILE_OFFSET, os.SEEK_SET)
parser_mediator.AppendToParserChain(self)
try:
self.ParseFileObject(parser_mediator, file_object)
finally:
parser_mediator.PopFromParserChain()
@abc.abstractmethod
def ParseFileObject(self, parser_mediator, file_object):
"""Parses a file-like object.
Args:
parser_mediator (ParserMediator): a parser mediator.
file_object (dvfvs.FileIO): a file-like object to parse.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""<|fim▁end|> | self.ParseFileEntry(parser_mediator, file_entry)
finally:
parser_mediator.PopFromParserChain()
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(dead_code)]
#![allow(unreachable_code)]
#![allow(unused_imports)]
#![allow(unused_variables)]
#![deny(unsafe_code)]
#[macro_use]
extern crate serde;
use style::properties::ComputedValues;
use style::values::computed::{Length, LengthOrAuto};
use style::Zero;
pub mod context;
pub mod data;
mod dom_traversal;
mod element_data;
mod flow;
mod fragments;
mod geom;
mod opaque_node;
mod positioned;
pub mod query;
mod replaced;
mod style_ext;
pub mod traversal;
pub mod wrapper;
<|fim▁hole|>
use crate::dom_traversal::{Contents, NodeExt};
use crate::flow::{BlockFormattingContext, FlowChildren};
use crate::geom::flow_relative::Vec2;
use crate::positioned::AbsolutelyPositionedFragment;
use crate::replaced::ReplacedContent;
use crate::style_ext::{ComputedValuesExt, Direction, Position, WritingMode};
use servo_arc::Arc;
use std::convert::TryInto;
use style::context::SharedStyleContext;
use style::values::specified::box_::DisplayInside;
/// https://drafts.csswg.org/css-display/#independent-formatting-context
#[derive(Debug)]
enum IndependentFormattingContext {
Flow(BlockFormattingContext),
// Not called FC in specs, but behaves close enough
Replaced(ReplacedContent),
// Other layout modes go here
}
enum NonReplacedIFC<'a> {
Flow(&'a BlockFormattingContext),
}
impl IndependentFormattingContext {
fn construct<'dom, 'style>(
context: &SharedStyleContext<'style>,
style: &Arc<ComputedValues>,
display_inside: DisplayInside,
contents: Contents<impl NodeExt<'dom>>,
) -> Self {
match contents.try_into() {
Ok(non_replaced) => match display_inside {
DisplayInside::Flow | DisplayInside::FlowRoot => {
IndependentFormattingContext::Flow(BlockFormattingContext::construct(
context,
style,
non_replaced,
))
},
DisplayInside::None | DisplayInside::Contents => panic!(":("),
},
Err(replaced) => IndependentFormattingContext::Replaced(replaced),
}
}
fn as_replaced(&self) -> Result<&ReplacedContent, NonReplacedIFC> {
match self {
IndependentFormattingContext::Replaced(r) => Ok(r),
IndependentFormattingContext::Flow(f) => Err(NonReplacedIFC::Flow(f)),
}
}
fn layout<'a>(
&'a self,
containing_block: &ContainingBlock,
tree_rank: usize,
absolutely_positioned_fragments: &mut Vec<AbsolutelyPositionedFragment<'a>>,
) -> FlowChildren {
match self.as_replaced() {
Ok(replaced) => match *replaced {},
Err(ifc) => ifc.layout(containing_block, tree_rank, absolutely_positioned_fragments),
}
}
}
impl<'a> NonReplacedIFC<'a> {
fn layout(
&self,
containing_block: &ContainingBlock,
tree_rank: usize,
absolutely_positioned_fragments: &mut Vec<AbsolutelyPositionedFragment<'a>>,
) -> FlowChildren {
match self {
NonReplacedIFC::Flow(bfc) => {
bfc.layout(containing_block, tree_rank, absolutely_positioned_fragments)
},
}
}
}
struct ContainingBlock {
inline_size: Length,
block_size: LengthOrAuto,
mode: (WritingMode, Direction),
}
struct DefiniteContainingBlock {
size: Vec2<Length>,
mode: (WritingMode, Direction),
}
/// https://drafts.csswg.org/css2/visuren.html#relative-positioning
fn relative_adjustement(
style: &ComputedValues,
inline_size: Length,
block_size: LengthOrAuto,
) -> Vec2<Length> {
if style.get_box().position != Position::Relative {
return Vec2::zero();
}
fn adjust(start: LengthOrAuto, end: LengthOrAuto) -> Length {
match (start, end) {
(LengthOrAuto::Auto, LengthOrAuto::Auto) => Length::zero(),
(LengthOrAuto::Auto, LengthOrAuto::LengthPercentage(end)) => -end,
(LengthOrAuto::LengthPercentage(start), _) => start,
}
}
let block_size = block_size.auto_is(Length::zero);
let box_offsets = style.box_offsets().map_inline_and_block_axes(
|v| v.percentage_relative_to(inline_size),
|v| v.percentage_relative_to(block_size),
);
Vec2 {
inline: adjust(box_offsets.inline_start, box_offsets.inline_end),
block: adjust(box_offsets.block_start, box_offsets.block_end),
}
}
// FIXME: use std::mem::take when it’s stable.
// https://github.com/rust-lang/rust/issues/61129
fn take<T>(x: &mut T) -> T
where
T: Default,
{
std::mem::replace(x, Default::default())
}<|fim▁end|> | pub use flow::BoxTreeRoot; |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
from setuptools import find_packages, setup
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst'), encoding='utf-8') as fp:
README = fp.read()
with open(os.path.join(here, 'VERSION')) as version_file:
VERSION = version_file.read().strip()
excluded_packages = ["docs", "tests", "tests.*"]
if not os.environ.get('READTHEDOCS', False):
excluded_packages += ["faker.sphinx", "faker.sphinx.*"]
# this module can be zip-safe if the zipimporter implements iter_modules or if
# pkgutil.iter_importer_modules has registered a dispatch for the zipimporter.
try:
import pkgutil
import zipimport
zip_safe = hasattr(zipimport.zipimporter, "iter_modules") or \
zipimport.zipimporter in pkgutil.iter_importer_modules.registry.keys()
except AttributeError:
zip_safe = False
setup(
name='Faker',
version=VERSION,
description="Faker is a Python package that generates fake data for you.",
long_description=README,
entry_points={
'console_scripts': ['faker=faker.cli:execute_from_command_line'],
'pytest11': ['faker = faker.contrib.pytest.plugin'],
},
classifiers=[<|fim▁hole|> 'Environment :: Console',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
],
keywords='faker fixtures data test mock generator',
author='joke2k',
author_email='[email protected]',
url='https://github.com/joke2k/faker',
license='MIT License',
packages=find_packages(exclude=excluded_packages),
platforms=["any"],
zip_safe=zip_safe,
python_requires=">=3.4",
install_requires=[
"python-dateutil>=2.4",
"text-unidecode==1.3",
],
)<|fim▁end|> | # See https://pypi.org/pypi?%3Aaction=list_classifiers
'Development Status :: 5 - Production/Stable', |
<|file_name|>navleft.js<|end_file_name|><|fim▁begin|>/*分页JS*/
var rsss = false;
$(function () {
$(".leftNav_side").css("min-height", $(".leftNav_side").height());
$(window).resize(function () {
$(".leftNav_side").height($(window).height());
}).trigger("resize");//左侧菜单高度自适应,但是保留内容最小高度
//切换左导航一级菜单
$(".Nav_lvl dt").click(function () {
$(this).parent().siblings().find("dd").hide();
$(this).siblings().slideDown(300);
});
//切换左导航二级菜单
$(".Nav_lvl dd").click(function () {
$(".Nav_lvl dd").removeClass();
$(this).addClass("Nav_lvl_dd_on");
});
//切换顶部导航
$(".topNav_ul li").click(function () {
$(this).addClass("topNav_li_on").siblings().removeClass();
});
if(Number($("[name='totalCount']").val()) > 0){
var pages = [],
totalPage = Number($("[name='totalPage']").val()),
totalCount = Number($("[name='totalCount']").val()),
currentPage = Number($("[name='pageNum']").val())==0 ? 1 :Number($("[name='pageNum']").val());
pages[pages.length] = ' <th colspan="100"><i>当前第'+currentPage+'页/共'+totalPage+'页</i><i>共'+totalCount+'条记录</i>';
if (currentPage == 1) {
pages[pages.length] = ' <span>首页</span><span>上一页</span>';
}
else {
pages[pages.length] = ' <a class="first" href="#">首页</a><a class="prev" href="#">上一页</a>';
}
if (currentPage < 5) {
for (var i = 1; i <= (totalPage > 10 ? 10 : totalPage); i++) {
if (currentPage == i)
pages[pages.length] = '<span class="sel">' + i + '</span>';
else
pages[pages.length] = '<a href="#">' + i + '</a>';
}
}
else if (currentPage >= totalPage - 5)
for (var i = totalPage - 9; i <= totalPage; i++) {
if (currentPage == i)
pages[pages.length] = '<span class="sel">' + i + '</span>';
else
pages[pages.length] = '<a href="#">' + i + '</a>';
}
else {
for (var i = currentPage - 5; i <= currentPage + 4; i++) {
if (currentPage == i)
pages[pages.length] = '<span class="sel">' + i + '</span>';
else
pages[pages.length] = '<a href="#">' + i + '</a>';
}
}
if (currentPage < totalPage) {
<|fim▁hole|> }
else {
pages[pages.length] = '<span>下一页</span><span>尾页</span>';
}
pages[pages.length] = '<input type="text" name="page" value="'+currentPage+'"/>';
pages[pages.length] = '<input type="button" value="跳转" class="btn_violet" />';
$(".pager").html(pages.join("")).find("a:not(.next):not(.prev)").click(function(){
$("[name='currentPage']").val($(this).text());
$("#pagerForm").submit();
});
$(".pager").find("a.first").click(function(){
num = 1;
$("[name='currentPage']").val(num);
$("#pagerForm").submit();
});
$(".pager").find("a.prev").click(function(){
num = Number($("[name='currentPage']").val()) - 1 < 0 ? 0 :Number($("[name='currentPage']").val()) - 1;
$("[name='currentPage']").val(num);
$("#pagerForm").submit();
});
$(".pager").find("a.next").click(function(){
$("[name='currentPage']").val(Number($("[name='currentPage']").val()) + 1);
$("#pagerForm").submit();
});
$(".pager").find("a.last").click(function(){
num = Number($("[name='totalPage']").val());
$("[name='currentPage']").val(num);
$("#pagerForm").submit();
});
$(".pager").find("input.btn_violet").click(function(){
num = Number($("[name='page']").val());
if(num > totalPage){
num = totalPage;
} else if(num < 1){
num = 1;
}
$("[name='currentPage']").val(num);
$("#pagerForm").submit();
});
}
});<|fim▁end|> | pages[pages.length] = '<a class="next" href="#">下一页</a><a class="last" href="#">尾页</a>';
|
<|file_name|>probe.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::MethodError;
use super::ItemIndex;
use super::{CandidateSource,ImplSource,TraitSource};
use super::suggest;
use check;
use check::{FnCtxt, NoPreference, UnresolvedTypeAction};
use middle::fast_reject;
use middle::subst;
use middle::subst::Subst;
use middle::traits;
use middle::ty::{self, RegionEscape, Ty, ToPolyTraitRef};
use middle::ty_fold::TypeFoldable;
use middle::infer;
use middle::infer::InferCtxt;
use syntax::ast;
use syntax::codemap::{Span, DUMMY_SP};
use std::collections::HashSet;
use std::mem;
use std::rc::Rc;
use util::ppaux::Repr;
use self::CandidateKind::*;
pub use self::PickKind::*;
struct ProbeContext<'a, 'tcx:'a> {
fcx: &'a FnCtxt<'a, 'tcx>,
span: Span,
mode: Mode,
item_name: ast::Name,
steps: Rc<Vec<CandidateStep<'tcx>>>,
opt_simplified_steps: Option<Vec<fast_reject::SimplifiedType>>,
inherent_candidates: Vec<Candidate<'tcx>>,
extension_candidates: Vec<Candidate<'tcx>>,
impl_dups: HashSet<ast::DefId>,
static_candidates: Vec<CandidateSource>,
}
struct CandidateStep<'tcx> {
self_ty: Ty<'tcx>,
autoderefs: usize,
unsize: bool
}
struct Candidate<'tcx> {
xform_self_ty: Ty<'tcx>,
item: ty::ImplOrTraitItem<'tcx>,
kind: CandidateKind<'tcx>,
}
enum CandidateKind<'tcx> {
InherentImplCandidate(/* Impl */ ast::DefId, subst::Substs<'tcx>),
ObjectCandidate(/* Trait */ ast::DefId, /* method_num */ usize, /* vtable index */ usize),
ExtensionImplCandidate(/* Impl */ ast::DefId, ty::TraitRef<'tcx>,
subst::Substs<'tcx>, ItemIndex),
ClosureCandidate(/* Trait */ ast::DefId, ItemIndex),
WhereClauseCandidate(ty::PolyTraitRef<'tcx>, ItemIndex),
ProjectionCandidate(ast::DefId, ItemIndex),
}
pub struct Pick<'tcx> {
pub item: ty::ImplOrTraitItem<'tcx>,
pub kind: PickKind<'tcx>,
// Indicates that the source expression should be autoderef'd N times
//
// A = expr | *expr | **expr | ...
pub autoderefs: usize,
// Indicates that an autoref is applied after the optional autoderefs
//
// B = A | &A | &mut A
pub autoref: Option<ast::Mutability>,
// Indicates that the source expression should be "unsized" to a
// target type. This should probably eventually go away in favor
// of just coercing method receivers.
//
// C = B | unsize(B)
pub unsize: Option<Ty<'tcx>>,
}
#[derive(Clone,Debug)]
pub enum PickKind<'tcx> {
InherentImplPick(/* Impl */ ast::DefId),
ObjectPick(/* Trait */ ast::DefId, /* method_num */ usize, /* real_index */ usize),
ExtensionImplPick(/* Impl */ ast::DefId, ItemIndex),
TraitPick(/* Trait */ ast::DefId, ItemIndex),
WhereClausePick(/* Trait */ ty::PolyTraitRef<'tcx>, ItemIndex),
}
pub type PickResult<'tcx> = Result<Pick<'tcx>, MethodError>;
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
pub enum Mode {
// An expression of the form `receiver.method_name(...)`.
// Autoderefs are performed on `receiver`, lookup is done based on the
// `self` argument of the method, and static methods aren't considered.
MethodCall,
// An expression of the form `Type::item` or `<T>::item`.
// No autoderefs are performed, lookup is done based on the type each
// implementation is for, and static methods are included.
Path
}
pub fn probe<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
mode: Mode,
item_name: ast::Name,
self_ty: Ty<'tcx>,
scope_expr_id: ast::NodeId)
-> PickResult<'tcx>
{
debug!("probe(self_ty={}, item_name={}, scope_expr_id={})",
self_ty.repr(fcx.tcx()),
item_name,
scope_expr_id);
// FIXME(#18741) -- right now, creating the steps involves evaluating the
// `*` operator, which registers obligations that then escape into
// the global fulfillment context and thus has global
// side-effects. This is a bit of a pain to refactor. So just let
// it ride, although it's really not great, and in fact could I
// think cause spurious errors. Really though this part should
// take place in the `fcx.infcx().probe` below.
let steps = if mode == Mode::MethodCall {
match create_steps(fcx, span, self_ty) {
Some(steps) => steps,
None => return Err(MethodError::NoMatch(Vec::new(), Vec::new(), mode)),
}
} else {
vec![CandidateStep {
self_ty: self_ty,
autoderefs: 0,
unsize: false
}]
};
// Create a list of simplified self types, if we can.
let mut simplified_steps = Vec::new();
for step in &steps {
match fast_reject::simplify_type(fcx.tcx(), step.self_ty, true) {
None => { break; }
Some(simplified_type) => { simplified_steps.push(simplified_type); }
}
}
let opt_simplified_steps =
if simplified_steps.len() < steps.len() {
None // failed to convert at least one of the steps
} else {
Some(simplified_steps)
};
debug!("ProbeContext: steps for self_ty={} are {}",
self_ty.repr(fcx.tcx()),
steps.repr(fcx.tcx()));
// this creates one big transaction so that all type variables etc
// that we create during the probe process are removed later
fcx.infcx().probe(|_| {
let mut probe_cx = ProbeContext::new(fcx,
span,
mode,
item_name,
steps,
opt_simplified_steps);
probe_cx.assemble_inherent_candidates();
try!(probe_cx.assemble_extension_candidates_for_traits_in_scope(scope_expr_id));
probe_cx.pick()
})
}
fn create_steps<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
self_ty: Ty<'tcx>)
-> Option<Vec<CandidateStep<'tcx>>> {
let mut steps = Vec::new();
let (final_ty, dereferences, _) = check::autoderef(fcx,
span,
self_ty,
None,
UnresolvedTypeAction::Error,
NoPreference,
|t, d| {
steps.push(CandidateStep {
self_ty: t,
autoderefs: d,
unsize: false
});
None::<()> // keep iterating until we can't anymore
});
match final_ty.sty {
ty::ty_vec(elem_ty, Some(_)) => {
let slice_ty = ty::mk_vec(fcx.tcx(), elem_ty, None);
steps.push(CandidateStep {
self_ty: slice_ty,
autoderefs: dereferences,
unsize: true
});
}
ty::ty_err => return None,
_ => (),
}
Some(steps)
}
impl<'a,'tcx> ProbeContext<'a,'tcx> {
fn new(fcx: &'a FnCtxt<'a,'tcx>,
span: Span,
mode: Mode,
item_name: ast::Name,
steps: Vec<CandidateStep<'tcx>>,
opt_simplified_steps: Option<Vec<fast_reject::SimplifiedType>>)
-> ProbeContext<'a,'tcx>
{
ProbeContext {
fcx: fcx,
span: span,
mode: mode,
item_name: item_name,
inherent_candidates: Vec::new(),
extension_candidates: Vec::new(),
impl_dups: HashSet::new(),
steps: Rc::new(steps),
opt_simplified_steps: opt_simplified_steps,
static_candidates: Vec::new(),
}
}
fn reset(&mut self) {
self.inherent_candidates.clear();
self.extension_candidates.clear();
self.impl_dups.clear();
self.static_candidates.clear();
}
fn tcx(&self) -> &'a ty::ctxt<'tcx> {
self.fcx.tcx()
}
fn infcx(&self) -> &'a InferCtxt<'a, 'tcx> {
self.fcx.infcx()
}
///////////////////////////////////////////////////////////////////////////
// CANDIDATE ASSEMBLY
fn assemble_inherent_candidates(&mut self) {
let steps = self.steps.clone();
for step in &*steps {
self.assemble_probe(step.self_ty);
}
}
fn assemble_probe(&mut self, self_ty: Ty<'tcx>) {
debug!("assemble_probe: self_ty={}",
self_ty.repr(self.tcx()));
match self_ty.sty {
ty::ty_trait(box ref data) => {
self.assemble_inherent_candidates_from_object(self_ty, data);
self.assemble_inherent_impl_candidates_for_type(data.principal_def_id());
}
ty::ty_enum(did, _) |
ty::ty_struct(did, _) |
ty::ty_closure(did, _) => {
self.assemble_inherent_impl_candidates_for_type(did);
}
ty::ty_uniq(_) => {
if let Some(box_did) = self.tcx().lang_items.owned_box() {
self.assemble_inherent_impl_candidates_for_type(box_did);
}
}
ty::ty_param(p) => {
self.assemble_inherent_candidates_from_param(self_ty, p);
}
ty::ty_char => {
let lang_def_id = self.tcx().lang_items.char_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_str => {
let lang_def_id = self.tcx().lang_items.str_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_vec(_, None) => {
let lang_def_id = self.tcx().lang_items.slice_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_ptr(ty::mt { ty: _, mutbl: ast::MutImmutable }) => {
let lang_def_id = self.tcx().lang_items.const_ptr_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_ptr(ty::mt { ty: _, mutbl: ast::MutMutable }) => {
let lang_def_id = self.tcx().lang_items.mut_ptr_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_int(ast::TyI8) => {
let lang_def_id = self.tcx().lang_items.i8_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_int(ast::TyI16) => {
let lang_def_id = self.tcx().lang_items.i16_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_int(ast::TyI32) => {
let lang_def_id = self.tcx().lang_items.i32_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_int(ast::TyI64) => {
let lang_def_id = self.tcx().lang_items.i64_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_int(ast::TyIs) => {
let lang_def_id = self.tcx().lang_items.isize_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_uint(ast::TyU8) => {
let lang_def_id = self.tcx().lang_items.u8_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_uint(ast::TyU16) => {
let lang_def_id = self.tcx().lang_items.u16_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_uint(ast::TyU32) => {
let lang_def_id = self.tcx().lang_items.u32_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_uint(ast::TyU64) => {
let lang_def_id = self.tcx().lang_items.u64_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_uint(ast::TyUs) => {
let lang_def_id = self.tcx().lang_items.usize_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_float(ast::TyF32) => {
let lang_def_id = self.tcx().lang_items.f32_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
ty::ty_float(ast::TyF64) => {
let lang_def_id = self.tcx().lang_items.f64_impl();
self.assemble_inherent_impl_for_primitive(lang_def_id);
}
_ => {
}
}
}
fn assemble_inherent_impl_for_primitive(&mut self, lang_def_id: Option<ast::DefId>) {
if let Some(impl_def_id) = lang_def_id {
ty::populate_implementations_for_primitive_if_necessary(self.tcx(), impl_def_id);
self.assemble_inherent_impl_probe(impl_def_id);
}
}
fn assemble_inherent_impl_candidates_for_type(&mut self, def_id: ast::DefId) {
// Read the inherent implementation candidates for this type from the
// metadata if necessary.
ty::populate_inherent_implementations_for_type_if_necessary(self.tcx(), def_id);
if let Some(impl_infos) = self.tcx().inherent_impls.borrow().get(&def_id) {
for &impl_def_id in &***impl_infos {
self.assemble_inherent_impl_probe(impl_def_id);
}
}
}
fn assemble_inherent_impl_probe(&mut self, impl_def_id: ast::DefId) {
if !self.impl_dups.insert(impl_def_id) {
return; // already visited
}
debug!("assemble_inherent_impl_probe {:?}", impl_def_id);
let item = match impl_item(self.tcx(), impl_def_id, self.item_name) {
Some(m) => m,
None => { return; } // No method with correct name on this impl
};
if !self.has_applicable_self(&item) {
// No receiver declared. Not a candidate.
return self.record_static_candidate(ImplSource(impl_def_id));
}
let (impl_ty, impl_substs) = self.impl_ty_and_substs(impl_def_id);
let impl_ty = self.fcx.instantiate_type_scheme(self.span, &impl_substs, &impl_ty);
// Determine the receiver type that the method itself expects.
let xform_self_ty =
self.xform_self_ty(&item, impl_ty, &impl_substs);
self.inherent_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
item: item,
kind: InherentImplCandidate(impl_def_id, impl_substs)
});
}
fn assemble_inherent_candidates_from_object(&mut self,
self_ty: Ty<'tcx>,
data: &ty::TyTrait<'tcx>) {
debug!("assemble_inherent_candidates_from_object(self_ty={})",
self_ty.repr(self.tcx()));
let tcx = self.tcx();
// It is illegal to invoke a method on a trait instance that
// refers to the `Self` type. An error will be reported by
// `enforce_object_limitations()` if the method refers to the
// `Self` type anywhere other than the receiver. Here, we use
// a substitution that replaces `Self` with the object type
// itself. Hence, a `&self` method will wind up with an
// argument type like `&Trait`.
let trait_ref = data.principal_trait_ref_with_self_ty(self.tcx(), self_ty);
self.elaborate_bounds(&[trait_ref.clone()], |this, new_trait_ref, item, item_num| {
let new_trait_ref = this.erase_late_bound_regions(&new_trait_ref);
let vtable_index =
traits::get_vtable_index_of_object_method(tcx,
trait_ref.clone(),
new_trait_ref.def_id,
item_num);
let xform_self_ty = this.xform_self_ty(&item,
new_trait_ref.self_ty(),
new_trait_ref.substs);
this.inherent_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
item: item,
kind: ObjectCandidate(new_trait_ref.def_id, item_num, vtable_index)
});
});
}
fn assemble_inherent_candidates_from_param(&mut self,
_rcvr_ty: Ty<'tcx>,
param_ty: ty::ParamTy) {
// FIXME -- Do we want to commit to this behavior for param bounds?
let bounds: Vec<_> =
self.fcx.inh.param_env.caller_bounds
.iter()
.filter_map(|predicate| {
match *predicate {
ty::Predicate::Trait(ref trait_predicate) => {
match trait_predicate.0.trait_ref.self_ty().sty {
ty::ty_param(ref p) if *p == param_ty => {
Some(trait_predicate.to_poly_trait_ref())
}
_ => None
}
}
ty::Predicate::Equate(..) |
ty::Predicate::Projection(..) |
ty::Predicate::RegionOutlives(..) |
ty::Predicate::TypeOutlives(..) => {
None
}
}
})
.collect();
self.elaborate_bounds(&bounds, |this, poly_trait_ref, item, item_num| {
let trait_ref =
this.erase_late_bound_regions(&poly_trait_ref);
let xform_self_ty =
this.xform_self_ty(&item,
trait_ref.self_ty(),
trait_ref.substs);
if let Some(ref m) = item.as_opt_method() {
debug!("found match: trait_ref={} substs={} m={}",
trait_ref.repr(this.tcx()),
trait_ref.substs.repr(this.tcx()),
m.repr(this.tcx()));
assert_eq!(m.generics.types.get_slice(subst::TypeSpace).len(),
trait_ref.substs.types.get_slice(subst::TypeSpace).len());
assert_eq!(m.generics.regions.get_slice(subst::TypeSpace).len(),
trait_ref.substs.regions().get_slice(subst::TypeSpace).len());
assert_eq!(m.generics.types.get_slice(subst::SelfSpace).len(),
trait_ref.substs.types.get_slice(subst::SelfSpace).len());
assert_eq!(m.generics.regions.get_slice(subst::SelfSpace).len(),
trait_ref.substs.regions().get_slice(subst::SelfSpace).len());
}
// Because this trait derives from a where-clause, it
// should not contain any inference variables or other
// artifacts. This means it is safe to put into the
// `WhereClauseCandidate` and (eventually) into the
// `WhereClausePick`.
assert!(trait_ref.substs.types.iter().all(|&t| !ty::type_needs_infer(t)));
this.inherent_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
item: item,
kind: WhereClauseCandidate(poly_trait_ref, item_num)
});
});
}
// Do a search through a list of bounds, using a callback to actually
// create the candidates.
fn elaborate_bounds<F>(
&mut self,
bounds: &[ty::PolyTraitRef<'tcx>],
mut mk_cand: F,
) where
F: for<'b> FnMut(
&mut ProbeContext<'b, 'tcx>,
ty::PolyTraitRef<'tcx>,
ty::ImplOrTraitItem<'tcx>,
usize,
),
{
debug!("elaborate_bounds(bounds={})", bounds.repr(self.tcx()));
let tcx = self.tcx();
for bound_trait_ref in traits::transitive_bounds(tcx, bounds) {
let (pos, item) = match trait_item(tcx,
bound_trait_ref.def_id(),
self.item_name) {
Some(v) => v,
None => { continue; }
};
if !self.has_applicable_self(&item) {
self.record_static_candidate(TraitSource(bound_trait_ref.def_id()));
} else {
mk_cand(self, bound_trait_ref, item, pos);
}
}
}
fn assemble_extension_candidates_for_traits_in_scope(&mut self,
expr_id: ast::NodeId)
-> Result<(),MethodError>
{
let mut duplicates = HashSet::new();
let opt_applicable_traits = self.fcx.ccx.trait_map.get(&expr_id);
if let Some(applicable_traits) = opt_applicable_traits {
for &trait_did in applicable_traits {
if duplicates.insert(trait_did) {
try!(self.assemble_extension_candidates_for_trait(trait_did));
}
}
}
Ok(())
}
fn assemble_extension_candidates_for_all_traits(&mut self) -> Result<(),MethodError> {
let mut duplicates = HashSet::new();
for trait_info in suggest::all_traits(self.fcx.ccx) {
if duplicates.insert(trait_info.def_id) {
try!(self.assemble_extension_candidates_for_trait(trait_info.def_id));
}<|fim▁hole|> }
Ok(())
}
fn assemble_extension_candidates_for_trait(&mut self,
trait_def_id: ast::DefId)
-> Result<(),MethodError>
{
debug!("assemble_extension_candidates_for_trait(trait_def_id={})",
trait_def_id.repr(self.tcx()));
// Check whether `trait_def_id` defines a method with suitable name:
let trait_items =
ty::trait_items(self.tcx(), trait_def_id);
let matching_index =
trait_items.iter()
.position(|item| item.name() == self.item_name);
let matching_index = match matching_index {
Some(i) => i,
None => { return Ok(()); }
};
let ref item = (&*trait_items)[matching_index];
// Check whether `trait_def_id` defines a method with suitable name:
if !self.has_applicable_self(item) {
debug!("method has inapplicable self");
self.record_static_candidate(TraitSource(trait_def_id));
return Ok(());
}
self.assemble_extension_candidates_for_trait_impls(trait_def_id,
item.clone(),
matching_index);
try!(self.assemble_closure_candidates(trait_def_id,
item.clone(),
matching_index));
self.assemble_projection_candidates(trait_def_id,
item.clone(),
matching_index);
self.assemble_where_clause_candidates(trait_def_id,
item.clone(),
matching_index);
Ok(())
}
fn assemble_extension_candidates_for_trait_impls(&mut self,
trait_def_id: ast::DefId,
item: ty::ImplOrTraitItem<'tcx>,
item_index: usize)
{
let trait_def = ty::lookup_trait_def(self.tcx(), trait_def_id);
// FIXME(arielb1): can we use for_each_relevant_impl here?
trait_def.for_each_impl(self.tcx(), |impl_def_id| {
debug!("assemble_extension_candidates_for_trait_impl: trait_def_id={} impl_def_id={}",
trait_def_id.repr(self.tcx()),
impl_def_id.repr(self.tcx()));
if !self.impl_can_possibly_match(impl_def_id) {
return;
}
let (_, impl_substs) = self.impl_ty_and_substs(impl_def_id);
debug!("impl_substs={}", impl_substs.repr(self.tcx()));
let impl_trait_ref =
ty::impl_trait_ref(self.tcx(), impl_def_id)
.unwrap() // we know this is a trait impl
.subst(self.tcx(), &impl_substs);
debug!("impl_trait_ref={}", impl_trait_ref.repr(self.tcx()));
// Determine the receiver type that the method itself expects.
let xform_self_ty =
self.xform_self_ty(&item,
impl_trait_ref.self_ty(),
impl_trait_ref.substs);
debug!("xform_self_ty={}", xform_self_ty.repr(self.tcx()));
self.extension_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
item: item.clone(),
kind: ExtensionImplCandidate(impl_def_id, impl_trait_ref, impl_substs, item_index)
});
});
}
fn impl_can_possibly_match(&self, impl_def_id: ast::DefId) -> bool {
let simplified_steps = match self.opt_simplified_steps {
Some(ref simplified_steps) => simplified_steps,
None => { return true; }
};
let impl_type = ty::lookup_item_type(self.tcx(), impl_def_id);
let impl_simplified_type =
match fast_reject::simplify_type(self.tcx(), impl_type.ty, false) {
Some(simplified_type) => simplified_type,
None => { return true; }
};
simplified_steps.contains(&impl_simplified_type)
}
fn assemble_closure_candidates(&mut self,
trait_def_id: ast::DefId,
item: ty::ImplOrTraitItem<'tcx>,
item_index: usize)
-> Result<(),MethodError>
{
// Check if this is one of the Fn,FnMut,FnOnce traits.
let tcx = self.tcx();
let kind = if Some(trait_def_id) == tcx.lang_items.fn_trait() {
ty::FnClosureKind
} else if Some(trait_def_id) == tcx.lang_items.fn_mut_trait() {
ty::FnMutClosureKind
} else if Some(trait_def_id) == tcx.lang_items.fn_once_trait() {
ty::FnOnceClosureKind
} else {
return Ok(());
};
// Check if there is an unboxed-closure self-type in the list of receivers.
// If so, add "synthetic impls".
let steps = self.steps.clone();
for step in &*steps {
let closure_def_id = match step.self_ty.sty {
ty::ty_closure(a, _) => a,
_ => continue,
};
let closure_kinds = self.fcx.inh.closure_kinds.borrow();
let closure_kind = match closure_kinds.get(&closure_def_id) {
Some(&k) => k,
None => {
return Err(MethodError::ClosureAmbiguity(trait_def_id));
}
};
// this closure doesn't implement the right kind of `Fn` trait
if !closure_kind.extends(kind) {
continue;
}
// create some substitutions for the argument/return type;
// for the purposes of our method lookup, we only take
// receiver type into account, so we can just substitute
// fresh types here to use during substitution and subtyping.
let trait_def = ty::lookup_trait_def(self.tcx(), trait_def_id);
let substs = self.infcx().fresh_substs_for_trait(self.span,
&trait_def.generics,
step.self_ty);
let xform_self_ty = self.xform_self_ty(&item,
step.self_ty,
&substs);
self.inherent_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
item: item.clone(),
kind: ClosureCandidate(trait_def_id, item_index)
});
}
Ok(())
}
fn assemble_projection_candidates(&mut self,
trait_def_id: ast::DefId,
item: ty::ImplOrTraitItem<'tcx>,
item_index: usize)
{
debug!("assemble_projection_candidates(\
trait_def_id={}, \
item={}, \
item_index={})",
trait_def_id.repr(self.tcx()),
item.repr(self.tcx()),
item_index);
for step in &*self.steps {
debug!("assemble_projection_candidates: step={}",
step.repr(self.tcx()));
let projection_trait_ref = match step.self_ty.sty {
ty::ty_projection(ref data) => &data.trait_ref,
_ => continue,
};
debug!("assemble_projection_candidates: projection_trait_ref={}",
projection_trait_ref.repr(self.tcx()));
let trait_predicates = ty::lookup_predicates(self.tcx(),
projection_trait_ref.def_id);
let bounds = trait_predicates.instantiate(self.tcx(), projection_trait_ref.substs);
let predicates = bounds.predicates.into_vec();
debug!("assemble_projection_candidates: predicates={}",
predicates.repr(self.tcx()));
for poly_bound in
traits::elaborate_predicates(self.tcx(), predicates)
.filter_map(|p| p.to_opt_poly_trait_ref())
.filter(|b| b.def_id() == trait_def_id)
{
let bound = self.erase_late_bound_regions(&poly_bound);
debug!("assemble_projection_candidates: projection_trait_ref={} bound={}",
projection_trait_ref.repr(self.tcx()),
bound.repr(self.tcx()));
if self.infcx().can_equate(&step.self_ty, &bound.self_ty()).is_ok() {
let xform_self_ty = self.xform_self_ty(&item,
bound.self_ty(),
bound.substs);
debug!("assemble_projection_candidates: bound={} xform_self_ty={}",
bound.repr(self.tcx()),
xform_self_ty.repr(self.tcx()));
self.extension_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
item: item.clone(),
kind: ProjectionCandidate(trait_def_id, item_index)
});
}
}
}
}
fn assemble_where_clause_candidates(&mut self,
trait_def_id: ast::DefId,
item: ty::ImplOrTraitItem<'tcx>,
item_index: usize)
{
debug!("assemble_where_clause_candidates(trait_def_id={})",
trait_def_id.repr(self.tcx()));
let caller_predicates = self.fcx.inh.param_env.caller_bounds.clone();
for poly_bound in traits::elaborate_predicates(self.tcx(), caller_predicates)
.filter_map(|p| p.to_opt_poly_trait_ref())
.filter(|b| b.def_id() == trait_def_id)
{
let bound = self.erase_late_bound_regions(&poly_bound);
let xform_self_ty = self.xform_self_ty(&item,
bound.self_ty(),
bound.substs);
debug!("assemble_where_clause_candidates: bound={} xform_self_ty={}",
bound.repr(self.tcx()),
xform_self_ty.repr(self.tcx()));
self.extension_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
item: item.clone(),
kind: WhereClauseCandidate(poly_bound, item_index)
});
}
}
///////////////////////////////////////////////////////////////////////////
// THE ACTUAL SEARCH
fn pick(mut self) -> PickResult<'tcx> {
match self.pick_core() {
Some(r) => return r,
None => {}
}
let static_candidates = mem::replace(&mut self.static_candidates, vec![]);
// things failed, so lets look at all traits, for diagnostic purposes now:
self.reset();
let span = self.span;
let tcx = self.tcx();
try!(self.assemble_extension_candidates_for_all_traits());
let out_of_scope_traits = match self.pick_core() {
Some(Ok(p)) => vec![p.item.container().id()],
Some(Err(MethodError::Ambiguity(v))) => v.into_iter().map(|source| {
match source {
TraitSource(id) => id,
ImplSource(impl_id) => {
match ty::trait_id_of_impl(tcx, impl_id) {
Some(id) => id,
None =>
tcx.sess.span_bug(span,
"found inherent method when looking at traits")
}
}
}
}).collect(),
Some(Err(MethodError::NoMatch(_, others, _))) => {
assert!(others.is_empty());
vec![]
}
Some(Err(MethodError::ClosureAmbiguity(..))) => {
// this error only occurs when assembling candidates
tcx.sess.span_bug(span, "encountered ClosureAmbiguity from pick_core");
}
None => vec![],
};
Err(MethodError::NoMatch(static_candidates, out_of_scope_traits, self.mode))
}
fn pick_core(&mut self) -> Option<PickResult<'tcx>> {
let steps = self.steps.clone();
// find the first step that works
steps.iter().filter_map(|step| self.pick_step(step)).next()
}
fn pick_step(&mut self, step: &CandidateStep<'tcx>) -> Option<PickResult<'tcx>> {
debug!("pick_step: step={}", step.repr(self.tcx()));
if ty::type_is_error(step.self_ty) {
return None;
}
match self.pick_by_value_method(step) {
Some(result) => return Some(result),
None => {}
}
self.pick_autorefd_method(step)
}
fn pick_by_value_method(&mut self,
step: &CandidateStep<'tcx>)
-> Option<PickResult<'tcx>>
{
/*!
* For each type `T` in the step list, this attempts to find a
* method where the (transformed) self type is exactly `T`. We
* do however do one transformation on the adjustment: if we
* are passing a region pointer in, we will potentially
* *reborrow* it to a shorter lifetime. This allows us to
* transparently pass `&mut` pointers, in particular, without
* consuming them for their entire lifetime.
*/
if step.unsize {
return None;
}
self.pick_method(step.self_ty).map(|r| r.map(|mut pick| {
pick.autoderefs = step.autoderefs;
// Insert a `&*` or `&mut *` if this is a reference type:
if let ty::ty_rptr(_, mt) = step.self_ty.sty {
pick.autoderefs += 1;
pick.autoref = Some(mt.mutbl);
}
pick
}))
}
fn pick_autorefd_method(&mut self,
step: &CandidateStep<'tcx>)
-> Option<PickResult<'tcx>>
{
let tcx = self.tcx();
// In general, during probing we erase regions. See
// `impl_self_ty()` for an explanation.
let region = tcx.mk_region(ty::ReStatic);
// Search through mutabilities in order to find one where pick works:
[ast::MutImmutable, ast::MutMutable].iter().filter_map(|&m| {
let autoref_ty = ty::mk_rptr(tcx, region, ty::mt {
ty: step.self_ty,
mutbl: m
});
self.pick_method(autoref_ty).map(|r| r.map(|mut pick| {
pick.autoderefs = step.autoderefs;
pick.autoref = Some(m);
pick.unsize = if step.unsize {
Some(step.self_ty)
} else {
None
};
pick
}))
}).nth(0)
}
fn pick_method(&mut self, self_ty: Ty<'tcx>) -> Option<PickResult<'tcx>> {
debug!("pick_method(self_ty={})", self.infcx().ty_to_string(self_ty));
debug!("searching inherent candidates");
match self.consider_candidates(self_ty, &self.inherent_candidates) {
None => {}
Some(pick) => {
return Some(pick);
}
}
debug!("searching extension candidates");
self.consider_candidates(self_ty, &self.extension_candidates)
}
fn consider_candidates(&self,
self_ty: Ty<'tcx>,
probes: &[Candidate<'tcx>])
-> Option<PickResult<'tcx>> {
let mut applicable_candidates: Vec<_> =
probes.iter()
.filter(|&probe| self.consider_probe(self_ty, probe))
.collect();
debug!("applicable_candidates: {}", applicable_candidates.repr(self.tcx()));
if applicable_candidates.len() > 1 {
match self.collapse_candidates_to_trait_pick(&applicable_candidates[..]) {
Some(pick) => { return Some(Ok(pick)); }
None => { }
}
}
if applicable_candidates.len() > 1 {
let sources = probes.iter().map(|p| p.to_source()).collect();
return Some(Err(MethodError::Ambiguity(sources)));
}
applicable_candidates.pop().map(|probe| {
let pick = probe.to_unadjusted_pick();
Ok(pick)
})
}
fn consider_probe(&self, self_ty: Ty<'tcx>, probe: &Candidate<'tcx>) -> bool {
debug!("consider_probe: self_ty={} probe={}",
self_ty.repr(self.tcx()),
probe.repr(self.tcx()));
self.infcx().probe(|_| {
// First check that the self type can be related.
match self.make_sub_ty(self_ty, probe.xform_self_ty) {
Ok(()) => { }
Err(_) => {
debug!("--> cannot relate self-types");
return false;
}
}
// If so, impls may carry other conditions (e.g., where
// clauses) that must be considered. Make sure that those
// match as well (or at least may match, sometimes we
// don't have enough information to fully evaluate).
match probe.kind {
InherentImplCandidate(impl_def_id, ref substs) |
ExtensionImplCandidate(impl_def_id, _, ref substs, _) => {
let selcx = &mut traits::SelectionContext::new(self.infcx(), self.fcx);
let cause = traits::ObligationCause::misc(self.span, self.fcx.body_id);
// Check whether the impl imposes obligations we have to worry about.
let impl_bounds = ty::lookup_predicates(self.tcx(), impl_def_id);
let impl_bounds = impl_bounds.instantiate(self.tcx(), substs);
let traits::Normalized { value: impl_bounds,
obligations: norm_obligations } =
traits::normalize(selcx, cause.clone(), &impl_bounds);
// Convert the bounds into obligations.
let obligations =
traits::predicates_for_generics(self.tcx(),
cause.clone(),
&impl_bounds);
debug!("impl_obligations={}", obligations.repr(self.tcx()));
// Evaluate those obligations to see if they might possibly hold.
obligations.all(|o| selcx.evaluate_obligation(o)) &&
norm_obligations.iter().all(|o| selcx.evaluate_obligation(o))
}
ProjectionCandidate(..) |
ObjectCandidate(..) |
ClosureCandidate(..) |
WhereClauseCandidate(..) => {
// These have no additional conditions to check.
true
}
}
})
}
/// Sometimes we get in a situation where we have multiple probes that are all impls of the
/// same trait, but we don't know which impl to use. In this case, since in all cases the
/// external interface of the method can be determined from the trait, it's ok not to decide.
/// We can basically just collapse all of the probes for various impls into one where-clause
/// probe. This will result in a pending obligation so when more type-info is available we can
/// make the final decision.
///
/// Example (`src/test/run-pass/method-two-trait-defer-resolution-1.rs`):
///
/// ```
/// trait Foo { ... }
/// impl Foo for Vec<int> { ... }
/// impl Foo for Vec<usize> { ... }
/// ```
///
/// Now imagine the receiver is `Vec<_>`. It doesn't really matter at this time which impl we
/// use, so it's ok to just commit to "using the method from the trait Foo".
fn collapse_candidates_to_trait_pick(&self,
probes: &[&Candidate<'tcx>])
-> Option<Pick<'tcx>> {
// Do all probes correspond to the same trait?
let trait_data = match probes[0].to_trait_data() {
Some(data) => data,
None => return None,
};
if probes[1..].iter().any(|p| p.to_trait_data() != Some(trait_data)) {
return None;
}
// If so, just use this trait and call it a day.
let (trait_def_id, item_num) = trait_data;
let item = probes[0].item.clone();
Some(Pick {
item: item,
kind: TraitPick(trait_def_id, item_num),
autoderefs: 0,
autoref: None,
unsize: None
})
}
///////////////////////////////////////////////////////////////////////////
// MISCELLANY
fn make_sub_ty(&self, sub: Ty<'tcx>, sup: Ty<'tcx>) -> infer::UnitResult<'tcx> {
self.infcx().sub_types(false, infer::Misc(DUMMY_SP), sub, sup)
}
fn has_applicable_self(&self, item: &ty::ImplOrTraitItem) -> bool {
// "fast track" -- check for usage of sugar
match *item {
ty::ImplOrTraitItem::MethodTraitItem(ref method) =>
match method.explicit_self {
ty::StaticExplicitSelfCategory => self.mode == Mode::Path,
ty::ByValueExplicitSelfCategory |
ty::ByReferenceExplicitSelfCategory(..) |
ty::ByBoxExplicitSelfCategory => true,
},
ty::ImplOrTraitItem::ConstTraitItem(..) => self.mode == Mode::Path,
_ => false,
}
// FIXME -- check for types that deref to `Self`,
// like `Rc<Self>` and so on.
//
// Note also that the current code will break if this type
// includes any of the type parameters defined on the method
// -- but this could be overcome.
}
fn record_static_candidate(&mut self, source: CandidateSource) {
self.static_candidates.push(source);
}
fn xform_self_ty(&self,
item: &ty::ImplOrTraitItem<'tcx>,
impl_ty: Ty<'tcx>,
substs: &subst::Substs<'tcx>)
-> Ty<'tcx>
{
match item.as_opt_method() {
Some(ref method) => self.xform_method_self_ty(method, impl_ty,
substs),
None => impl_ty,
}
}
fn xform_method_self_ty(&self,
method: &Rc<ty::Method<'tcx>>,
impl_ty: Ty<'tcx>,
substs: &subst::Substs<'tcx>)
-> Ty<'tcx>
{
debug!("xform_self_ty(impl_ty={}, self_ty={}, substs={})",
impl_ty.repr(self.tcx()),
method.fty.sig.0.inputs.get(0).repr(self.tcx()),
substs.repr(self.tcx()));
assert!(!substs.has_escaping_regions());
// It is possible for type parameters or early-bound lifetimes
// to appear in the signature of `self`. The substitutions we
// are given do not include type/lifetime parameters for the
// method yet. So create fresh variables here for those too,
// if there are any.
assert_eq!(substs.types.len(subst::FnSpace), 0);
assert_eq!(substs.regions().len(subst::FnSpace), 0);
if self.mode == Mode::Path {
return impl_ty;
}
let placeholder;
let mut substs = substs;
if
!method.generics.types.is_empty_in(subst::FnSpace) ||
!method.generics.regions.is_empty_in(subst::FnSpace)
{
let method_types =
self.infcx().next_ty_vars(
method.generics.types.len(subst::FnSpace));
// In general, during probe we erase regions. See
// `impl_self_ty()` for an explanation.
let method_regions =
method.generics.regions.get_slice(subst::FnSpace)
.iter()
.map(|_| ty::ReStatic)
.collect();
placeholder = (*substs).clone().with_method(method_types, method_regions);
substs = &placeholder;
}
// Erase any late-bound regions from the method and substitute
// in the values from the substitution.
let xform_self_ty = method.fty.sig.input(0);
let xform_self_ty = self.erase_late_bound_regions(&xform_self_ty);
let xform_self_ty = xform_self_ty.subst(self.tcx(), substs);
xform_self_ty
}
/// Get the type of an impl and generate substitutions with placeholders.
fn impl_ty_and_substs(&self,
impl_def_id: ast::DefId)
-> (Ty<'tcx>, subst::Substs<'tcx>)
{
let impl_pty = ty::lookup_item_type(self.tcx(), impl_def_id);
let type_vars =
impl_pty.generics.types.map(
|_| self.infcx().next_ty_var());
let region_placeholders =
impl_pty.generics.regions.map(
|_| ty::ReStatic); // see erase_late_bound_regions() for an expl of why 'static
let substs = subst::Substs::new(type_vars, region_placeholders);
(impl_pty.ty, substs)
}
/// Replace late-bound-regions bound by `value` with `'static` using
/// `ty::erase_late_bound_regions`.
///
/// This is only a reasonable thing to do during the *probe* phase, not the *confirm* phase, of
/// method matching. It is reasonable during the probe phase because we don't consider region
/// relationships at all. Therefore, we can just replace all the region variables with 'static
/// rather than creating fresh region variables. This is nice for two reasons:
///
/// 1. Because the numbers of the region variables would otherwise be fairly unique to this
/// particular method call, it winds up creating fewer types overall, which helps for memory
/// usage. (Admittedly, this is a rather small effect, though measureable.)
///
/// 2. It makes it easier to deal with higher-ranked trait bounds, because we can replace any
/// late-bound regions with 'static. Otherwise, if we were going to replace late-bound
/// regions with actual region variables as is proper, we'd have to ensure that the same
/// region got replaced with the same variable, which requires a bit more coordination
/// and/or tracking the substitution and
/// so forth.
fn erase_late_bound_regions<T>(&self, value: &ty::Binder<T>) -> T
where T : TypeFoldable<'tcx> + Repr<'tcx>
{
ty::erase_late_bound_regions(self.tcx(), value)
}
}
fn impl_item<'tcx>(tcx: &ty::ctxt<'tcx>,
impl_def_id: ast::DefId,
item_name: ast::Name)
-> Option<ty::ImplOrTraitItem<'tcx>>
{
let impl_items = tcx.impl_items.borrow();
let impl_items = impl_items.get(&impl_def_id).unwrap();
impl_items
.iter()
.map(|&did| ty::impl_or_trait_item(tcx, did.def_id()))
.find(|item| item.name() == item_name)
}
/// Find item with name `item_name` defined in `trait_def_id` and return it,
/// along with its index (or `None`, if no such item).
fn trait_item<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId,
item_name: ast::Name)
-> Option<(usize, ty::ImplOrTraitItem<'tcx>)>
{
let trait_items = ty::trait_items(tcx, trait_def_id);
debug!("trait_method; items: {:?}", trait_items);
trait_items
.iter()
.enumerate()
.find(|&(_, ref item)| item.name() == item_name)
.map(|(num, ref item)| (num, (*item).clone()))
}
impl<'tcx> Candidate<'tcx> {
fn to_unadjusted_pick(&self) -> Pick<'tcx> {
Pick {
item: self.item.clone(),
kind: match self.kind {
InherentImplCandidate(def_id, _) => {
InherentImplPick(def_id)
}
ObjectCandidate(def_id, item_num, real_index) => {
ObjectPick(def_id, item_num, real_index)
}
ExtensionImplCandidate(def_id, _, _, index) => {
ExtensionImplPick(def_id, index)
}
ClosureCandidate(trait_def_id, index) => {
TraitPick(trait_def_id, index)
}
WhereClauseCandidate(ref trait_ref, index) => {
// Only trait derived from where-clauses should
// appear here, so they should not contain any
// inference variables or other artifacts. This
// means they are safe to put into the
// `WhereClausePick`.
assert!(trait_ref.substs().types.iter().all(|&t| !ty::type_needs_infer(t)));
WhereClausePick((*trait_ref).clone(), index)
}
ProjectionCandidate(def_id, index) => {
TraitPick(def_id, index)
}
},
autoderefs: 0,
autoref: None,
unsize: None
}
}
fn to_source(&self) -> CandidateSource {
match self.kind {
InherentImplCandidate(def_id, _) => ImplSource(def_id),
ObjectCandidate(def_id, _, _) => TraitSource(def_id),
ExtensionImplCandidate(def_id, _, _, _) => ImplSource(def_id),
ClosureCandidate(trait_def_id, _) => TraitSource(trait_def_id),
WhereClauseCandidate(ref trait_ref, _) => TraitSource(trait_ref.def_id()),
ProjectionCandidate(trait_def_id, _) => TraitSource(trait_def_id),
}
}
fn to_trait_data(&self) -> Option<(ast::DefId, ItemIndex)> {
match self.kind {
InherentImplCandidate(..) => {
None
}
ObjectCandidate(trait_def_id, item_num, _) => {
Some((trait_def_id, item_num))
}
ClosureCandidate(trait_def_id, item_num) => {
Some((trait_def_id, item_num))
}
ExtensionImplCandidate(_, ref trait_ref, _, item_num) => {
Some((trait_ref.def_id, item_num))
}
WhereClauseCandidate(ref trait_ref, item_num) => {
Some((trait_ref.def_id(), item_num))
}
ProjectionCandidate(trait_def_id, item_num) => {
Some((trait_def_id, item_num))
}
}
}
}
impl<'tcx> Repr<'tcx> for Candidate<'tcx> {
fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String {
format!("Candidate(xform_self_ty={}, kind={})",
self.xform_self_ty.repr(tcx),
self.kind.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for CandidateKind<'tcx> {
fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String {
match *self {
InherentImplCandidate(ref a, ref b) =>
format!("InherentImplCandidate({},{})", a.repr(tcx), b.repr(tcx)),
ObjectCandidate(a, b, c) =>
format!("ObjectCandidate({},{},{})", a.repr(tcx), b, c),
ExtensionImplCandidate(ref a, ref b, ref c, ref d) =>
format!("ExtensionImplCandidate({},{},{},{})", a.repr(tcx), b.repr(tcx),
c.repr(tcx), d),
ClosureCandidate(ref a, ref b) =>
format!("ClosureCandidate({},{})", a.repr(tcx), b),
WhereClauseCandidate(ref a, ref b) =>
format!("WhereClauseCandidate({},{})", a.repr(tcx), b),
ProjectionCandidate(ref a, ref b) =>
format!("ProjectionCandidate({},{})", a.repr(tcx), b),
}
}
}
impl<'tcx> Repr<'tcx> for CandidateStep<'tcx> {
fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String {
format!("CandidateStep({}, autoderefs={}, unsize={})",
self.self_ty.repr(tcx),
self.autoderefs,
self.unsize)
}
}
impl<'tcx> Repr<'tcx> for PickKind<'tcx> {
fn repr(&self, _tcx: &ty::ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for Pick<'tcx> {
fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String {
format!("Pick(item={}, autoderefs={},
autoref={}, unsize={}, kind={:?})",
self.item.repr(tcx),
self.autoderefs,
self.autoref.repr(tcx),
self.unsize.repr(tcx),
self.kind)
}
}<|fim▁end|> | |
<|file_name|>svc-system-messages-spec.js<|end_file_name|><|fim▁begin|>/*jshint expr:true */
describe("Services: Core System Messages", function() {
<|fim▁hole|>
beforeEach(module(function ($provide) {
//stub services
$provide.service("$q", function() {return Q;});
$provide.value("userState", {
isRiseVisionUser: function () {return true; }
});
}));
it("should exist", function() {
inject(function(getCoreSystemMessages) {
expect(getCoreSystemMessages).be.defined;
});
});
});<|fim▁end|> | beforeEach(module("risevision.core.systemmessages")); |
<|file_name|>GitParser.py<|end_file_name|><|fim▁begin|># Copyright (C) 2007 LibreSoft
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors :
# Carlos Garcia Campos <[email protected]>
<|fim▁hole|>import re
import time
import datetime
from Parser import Parser
from Repository import Commit, Action, Person
from utils import printout, printdbg
class GitParser(Parser):
class GitCommit:
def __init__(self, commit, parents):
self.commit = commit
self.parents = parents
self.svn_tag = None
def is_my_child(self, git_commit):
return git_commit.parents and self.commit.revision in git_commit.parents
class GitBranch:
(REMOTE, LOCAL, STASH) = range(3)
def __init__(self, type, name, tail):
self.type = type
self.name = name
self.set_tail(tail)
def is_my_parent(self, git_commit):
return git_commit.is_my_child(self.tail)
def is_stash(self):
return self.type == self.STASH
def set_tail(self, tail):
self.tail = tail
self.tail.commit.branch = self.name
patterns = {}
# commit 801c1b2511957ea99308bf0733e695cc78cd4a31 481e028bba471b788bddc53e0b925256c4585295
patterns['commit'] = re.compile("^commit[ \t]+([^ ]+)( ([^\(]+))?( \((.*)\))?$")
# Author: Santiago Duenas <[email protected]>
patterns['author'] = re.compile("^Author:[ \t]+(.*)[ \t]+<(.*)>$")
# AuthorDate: Wed Apr 16 18:44:59 2014 +0200
patterns['author_date'] = re.compile(
"^AuthorDate: (.* [0-9]+ [0-9]+:[0-9]+:[0-9]+ [0-9][0-9][0-9][0-9]) ([+-][0-9][0-9][0-9][0-9])$")
# Commit: Santiago Duenas <[email protected]>
patterns['committer'] = re.compile("^Commit:[ \t]+(.*)[ \t]+<(.*)>$")
# CommitDate: Wed Apr 16 18:44:59 2014 +0200
patterns['date'] = re.compile(
"^CommitDate: (.* [0-9]+ [0-9]+:[0-9]+:[0-9]+ [0-9][0-9][0-9][0-9]) ([+-][0-9][0-9][0-9][0-9])$")
patterns['file'] = re.compile("^([MAD]+)[ \t]+(.*)$")
patterns['file-moved'] = re.compile("^([RC])[0-9]+[ \t]+(.*)[ \t]+(.*)$")
patterns['branch'] = re.compile("refs/remotes/origin/([^,]*)")
patterns['local-branch'] = re.compile("refs/heads/([^,]*)")
patterns['tag'] = re.compile("tag: refs/tags/([^,]*)")
patterns['stash'] = re.compile("refs/stash")
patterns['ignore'] = [re.compile("^Merge: .*$")]
patterns['svn-tag'] = re.compile("^svn path=/tags/(.*)/?; revision=([0-9]+)$")
def __init__(self):
Parser.__init__(self)
self.is_gnome = None
# Parser context
self.commit = None
self.branch = None
self.branches = []
def set_repository(self, repo, uri):
Parser.set_repository(self, repo, uri)
self.is_gnome = re.search("^[a-z]+://(.*@)?git\.gnome\.org/.*$", repo.get_uri()) is not None
def flush(self):
if self.branches:
self.handler.commit(self.branch.tail.commit)
self.branch = None
self.branches = None
def _parse_line(self, line):
if line is None or line == '':
return
# Ignore
for patt in self.patterns['ignore']:
if patt.match(line):
return
# Commit
match = self.patterns['commit'].match(line)
if match:
if self.commit is not None and self.branch is not None:
if self.branch.tail.svn_tag is None: # Skip commits on svn tags
self.handler.commit(self.branch.tail.commit)
self.commit = Commit()
self.commit.revision = match.group(1)
parents = match.group(3)
if parents:
parents = parents.split()
self.commit.parents = parents
git_commit = self.GitCommit(self.commit, parents)
decorate = match.group(5)
branch = None
if decorate:
# Remote branch
m = re.search(self.patterns['branch'], decorate)
if m:
branch = self.GitBranch(self.GitBranch.REMOTE, m.group(1), git_commit)
printdbg("Branch '%s' head at acommit %s", (branch.name, self.commit.revision))
else:
# Local Branch
m = re.search(self.patterns['local-branch'], decorate)
if m:
branch = self.GitBranch(self.GitBranch.LOCAL, m.group(1), git_commit)
printdbg("Commit %s on local branch '%s'", (self.commit.revision, branch.name))
# If local branch was merged we just ignore this decoration
if self.branch and self.branch.is_my_parent(git_commit):
printdbg("Local branch '%s' was merged", (branch.name,))
branch = None
else:
# Stash
m = re.search(self.patterns['stash'], decorate)
if m:
branch = self.GitBranch(self.GitBranch.STASH, "stash", git_commit)
printdbg("Commit %s on stash", (self.commit.revision,))
# Tag
m = re.search(self.patterns['tag'], decorate)
if m:
self.commit.tags = [m.group(1)]
printdbg("Commit %s tagged as '%s'", (self.commit.revision, self.commit.tags[0]))
if not branch and not self.branch:
branch = self.GitBranch(self.GitBranch.LOCAL, "(no-branch)", git_commit)
printdbg("Commit %s on unknown local branch '%s'", (self.commit.revision, branch.name))
# This part of code looks wired at first time so here is a small description what it does:
#
# * self.branch is the branch to which the last inspected commit belonged to
# * branch is the branch of the current parsed commit
#
# This check is only to find branches which are fully merged into a already analyzed branch
#
# For more detailed information see https://github.com/MetricsGrimoire/CVSAnalY/issues/64
if branch is not None and self.branch is not None:
# Detect empty branches.
# Ideally, the head of a branch can't have children.
# When this happens is because the branch is empty, so we just ignore such branch.
if self.branch.is_my_parent(git_commit):
printout(
"Info: Branch '%s' will be ignored, because it was already merged in an active one.",
(branch.name,)
)
branch = None
if len(self.branches) >= 2:
# If current commit is the start point of a new branch
# we have to look at all the current branches since
# we haven't inserted the new branch yet.
# If not, look at all other branches excluding the current one
for i, b in enumerate(self.branches):
if i == 0 and branch is None:
continue
if b.is_my_parent(git_commit):
# We assume current branch is always the last one
# AFAIK there's no way to make sure this is right
printdbg("Start point of branch '%s' at commit %s",
(self.branches[0].name, self.commit.revision))
self.branches.pop(0)
self.branch = b
if self.branch and self.branch.tail.svn_tag is not None and self.branch.is_my_parent(git_commit):
# There's a pending tag in previous commit
pending_tag = self.branch.tail.svn_tag
printdbg("Move pending tag '%s' from previous commit %s to current %s", (pending_tag,
self.branch.tail.commit.revision,
self.commit.revision))
if self.commit.tags and pending_tag not in self.commit.tags:
self.commit.tags.append(pending_tag)
else:
self.commit.tags = [pending_tag]
self.branch.tail.svn_tag = None
if branch is not None:
self.branch = branch
# Insert master always at the end
if branch.name == 'master':
self.branches.append(self.branch)
else:
self.branches.insert(0, self.branch)
else:
if self.branch is not None:
self.branch.set_tail(git_commit)
return
# Committer
match = self.patterns['committer'].match(line)
if match:
self.commit.committer = Person()
self.commit.committer.name = match.group(1)
self.commit.committer.email = match.group(2)
self.handler.committer(self.commit.committer)
return
# Author
match = self.patterns['author'].match(line)
if match:
self.commit.author = Person()
self.commit.author.name = match.group(1)
self.commit.author.email = match.group(2)
self.handler.author(self.commit.author)
return
# Commit date
match = self.patterns['date'].match(line)
if match:
self.commit.date = datetime.datetime(
*(time.strptime(match.group(1).strip(" "), "%a %b %d %H:%M:%S %Y")[0:6]))
# datetime.datetime.strptime not supported by Python2.4
#self.commit.date = datetime.datetime.strptime (match.group (1).strip (" "), "%a %b %d %H:%M:%S %Y")
# match.group(2) represents the timezone. E.g. -0300, +0200, +0430 (Afghanistan)
# This string will be parsed to int and recalculated into seconds (60 * 60)
self.commit.date_tz = (((int(match.group(2))) * 60 * 60) / 100)
return
# Author date
match = self.patterns['author_date'].match(line)
if match:
self.commit.author_date = datetime.datetime(
*(time.strptime(match.group(1).strip(" "), "%a %b %d %H:%M:%S %Y")[0:6]))
# datetime.datetime.strptime not supported by Python2.4
#self.commit.author_date = datetime.datetime.strptime (match.group (1).strip (" "), "%a %b %d %H:%M:%S %Y")
# match.group(2) represents the timezone. E.g. -0300, +0200, +0430 (Afghanistan)
# This string will be parsed to int and recalculated into seconds (60 * 60)
self.commit.author_date_tz = (((int(match.group(2))) * 60 * 60) / 100)
return
# File
match = self.patterns['file'].match(line)
if match:
action = Action()
type = match.group(1)
if len(type) > 1:
# merge actions
if 'M' in type:
type = 'M'
else:
# ignore merge actions without 'M'
return
action.type = type
action.f1 = match.group(2)
self.commit.actions.append(action)
self.handler.file(action.f1)
return
# File moved/copied
match = self.patterns['file-moved'].match(line)
if match:
action = Action()
type = match.group(1)
if type == 'R':
action.type = 'V'
else:
action.type = type
action.f1 = match.group(3)
action.f2 = match.group(2)
action.rev = self.commit.revision
self.commit.actions.append(action)
self.handler.file(action.f1)
return
# This is a workaround for a bug in the GNOME Git migration
# There are commits on tags not correctly detected like this one:
# http://git.gnome.org/cgit/evolution/commit/?id=b8e52acac2b9fc5414a7795a73c74f7ee4eeb71f
# We want to ignore commits on tags since it doesn't make any sense in Git
if self.is_gnome:
match = self.patterns['svn-tag'].match(line.strip())
if match:
printout("Warning: detected a commit on a svn tag: %s", (match.group(0),))
tag = match.group(1)
if self.commit.tags and tag in self.commit.tags:
# The commit will be ignored, so move the tag
# to the next (previous in history) commit
self.branch.tail.svn_tag = tag
# Message
self.commit.message += line + '\n'
assert True, "Not match for line %s" % (line)<|fim▁end|> | |
<|file_name|>tokeniser_tests.py<|end_file_name|><|fim▁begin|>from hamcrest import assert_that, contains, has_properties
from mammoth.styles.parser.tokeniser import tokenise
def test_unknown_tokens_are_tokenised():
assert_tokens("~", is_token("unknown", "~"))
def test_empty_string_is_tokenised_to_end_of_file_token():
assert_tokens("")
def test_whitespace_is_tokenised():
assert_tokens(" \t\t ", is_token("whitespace", " \t\t "))
def test_identifiers_are_tokenised():
assert_tokens("Overture", is_token("identifier", "Overture"))
def test_escape_sequences_in_identifiers_are_tokenised():
assert_tokens(r"\:", is_token("identifier", r"\:"))
def test_integers_are_tokenised():
assert_tokens("123", is_token("integer", "123"))
def test_strings_are_tokenised():
assert_tokens("'Tristan'", is_token("string", "'Tristan'"))
def test_escape_sequences_in_strings_are_tokenised():
assert_tokens(r"'Tristan\''", is_token("string", r"'Tristan\''"))
def test_unterminated_strings_are_tokenised():
assert_tokens("'Tristan", is_token("unterminated string", "'Tristan"))
def test_arrows_are_tokenised():
assert_tokens("=>=>", is_token("symbol", "=>"), is_token("symbol", "=>"))
def test_dots_are_tokenised():
assert_tokens(".", is_token("symbol", "."))
def test_colons_are_tokenised():
assert_tokens("::", is_token("symbol", ":"), is_token("symbol", ":"))
def test_greater_thans_are_tokenised():
assert_tokens(">>", is_token("symbol", ">"), is_token("symbol", ">"))
def test_equals_are_tokenised():
assert_tokens("==", is_token("symbol", "="), is_token("symbol", "="))
def test_open_parens_are_tokenised():
assert_tokens("((", is_token("symbol", "("), is_token("symbol", "("))
def test_close_parens_are_tokenised():
assert_tokens("))", is_token("symbol", ")"), is_token("symbol", ")"))
def test_open_square_brackets_are_tokenised():
assert_tokens("[[", is_token("symbol", "["), is_token("symbol", "["))<|fim▁hole|>
def test_choices_are_tokenised():
assert_tokens("||", is_token("symbol", "|"), is_token("symbol", "|"))
def test_bangs_are_tokenised():
assert_tokens("!!", is_token("symbol", "!"), is_token("symbol", "!"))
def test_can_tokenise_multiple_tokens():
assert_tokens("The Magic Position",
is_token("identifier", "The"),
is_token("whitespace", " "),
is_token("identifier", "Magic"),
is_token("whitespace", " "),
is_token("identifier", "Position"),
)
def assert_tokens(string, *expected):
expected = list(expected)
expected.append(is_token("end", ""))
assert_that(
tokenise(string),
contains(*expected),
)
def is_token(token_type, value):
return has_properties(
type=token_type,
value=value,
)<|fim▁end|> |
def test_close_square_brackets_are_tokenised():
assert_tokens("]]", is_token("symbol", "]"), is_token("symbol", "]")) |
<|file_name|>series.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals, division, absolute_import
import copy
import datetime
from math import ceil
from flask import jsonify
from flask import request
from flask_restplus import inputs
from sqlalchemy.orm.exc import NoResultFound
from flexget.api import api, APIResource, ApiClient
from flexget.event import fire_event
from flexget.plugin import PluginError
from flexget.plugins.filter import series
series_api = api.namespace('series', description='Flexget Series operations')
default_error_schema = {
'type': 'object',
'properties': {
'status': {'type': 'string'},
'message': {'type': 'string'}
}
}
default_error_schema = api.schema('default_error_schema', default_error_schema)
empty_response = api.schema('empty', {'type': 'object'})
begin_object = {
'type': 'object',
'properties': {
'episode_id': {'type': 'integer'},
'episode_identifier': {'type': 'string'}
}
}
release_object = {
'type': 'object',
'properties': {
'release_id': {'type': 'integer'},
'release_title': {'type': 'string'},
'release_downloaded': {'type': 'string'},
'release_quality': {'type': 'string'},
'release_proper_count': {'type': 'integer'},
'release_first_seen': {'type': 'string', 'format': 'date-time'},
'release_episode_id': {'type': 'integer'}
}
}
release_schema = {
'type': 'object',
'properties': {
'show': {'type': 'string'},
'show_id': {'type': 'integer'},
'episode_id': {'type': 'integer'},
'release': release_object
}
}
release_schema = api.schema('release_schema', release_schema)
release_list_schema = {
'type': 'object',
'properties': {
'releases': {
'type': 'array',
'items': release_object
},
'number_of_releases': {'type': 'integer'},
'episode_id': {'type': 'integer'},
'show_id': {'type': 'integer'}
}
}
release_list_schema = api.schema('release_list_schema', release_list_schema)
latest_object = {
'type': 'object',
'properties': {
'episode_id': {'type': 'integer'},
'episode_identifier': {'type': 'string'},
'episode_age': {'type': 'string'},
'number_of_episodes_behind': {'type': 'integer'},
'downloaded_releases': {
'type': 'array',
'items': release_object
}
}
}
episode_object = {
'type': 'object',
'properties': {
"episode_first_seen": {'type': 'string', 'format': 'date-time'},
"episode_id": {'type': 'string'},
"episode_identified_by": {'type': 'string'},
"episode_identifier": {'type': 'string'},
"episode_premiere_type": {'type': 'string'},
"episode_number": {'type': 'string'},
"episode_season": {'type': 'string'},
"episode_series_id": {'type': 'string'},
"episode_number_of_releases": {'type': 'integer'}
}
}
show_object = {
'type': 'object',
'properties': {
'show_id': {'type': 'integer'},
'show_name': {'type': 'string'},
'alternate_names': {'type': 'array', 'items': {'type': 'string'}},
'begin_episode': begin_object,
'latest_downloaded_episode': latest_object,
'in_tasks': {'type': 'array', 'items': {'type': 'string'}}
}
}
series_list_schema = {
'type': 'object',
'properties': {
'shows': {
'type': 'array',
'items': show_object
},
'total_number_of_shows': {'type': 'integer'},
'page_size': {'type': 'integer'},
'total_number_of_pages': {'type': 'integer'},
'page': {'type': 'integer'}
}
}
series_list_schema = api.schema('list_series', series_list_schema)
episode_list_schema = {
'type': 'object',
'properties': {
'episodes': {
'type': 'array',
'items': episode_object
},
'number_of_episodes': {'type': 'integer'},
'total_number_of_episodes': {'type': 'integer'},
'page': {'type': 'integer'},
'total_number_of_pages': {'type': 'integer'},
'show_id': {'type': 'integer'},
'show': {'type': 'string'}
}
}
episode_list_schema = api.schema('episode_list', episode_list_schema)
episode_schema = {
'type': 'object',
'properties': {
'episode': episode_object,
'show_id': {'type': 'integer'},
'show': {'type': 'string'}
}
}
episode_schema = api.schema('episode_item', episode_schema)
series_edit_object = {
'type': 'object',
'properties': {
'episode_identifier': {'type': 'string'},
'alternate_names': {'type': 'array', 'items': {'type': 'string'}}
},
'anyOf': [
{'required': ['episode_identifier']},
{'required': ['alternate_names']}
],
'additionalProperties:': False
}
series_edit_schema = api.schema('series_edit_schema', series_edit_object)
series_input_object = copy.deepcopy(series_edit_object)
series_input_object['properties']['series_name'] = {'type': 'string'}
del series_input_object['anyOf']
series_input_object['required'] = ['series_name']
series_input_schema = api.schema('series_input_schema', series_input_object)
release_object = {
'type': 'object',
'properties': {
'quality': {'type': 'string'},
'title': {'type': 'string'},
'proper_count': {'type': 'integer'},
'downloaded': {'type': 'boolean'}
}
}
episode_object = {
'type': 'object',
'properties': {
'identifier': {'type': 'string'},
'identifier_type': {'type': 'string'},
'download_age': {'type': 'string'},
'releases': {
'type': 'array',
'items': release_object}
}
}
show_details_schema = {
'type': 'object',
'properties': {
'episodes': {
'type': 'array',
'items': episode_object
},
'show': show_object
}
}
shows_schema = {
'type': 'object',
'properties': {
'shows': {
'type': 'array',
'items': show_object
},
'number_of_shows': {'type': 'integer'}
}
}
def get_release_details(release):
release_item = {
'release_id': release.id,
'release_title': release.title,
'release_downloaded': release.downloaded,
'release_quality': release.quality.name,
'release_proper_count': release.proper_count,
'release_first_seen': release.first_seen,
'release_episode_id': release.episode_id,
}
return release_item
def get_episode_details(episode):
episode_item = {
'episode_id': episode.id,
'episode_identifier': episode.identifier,
'episode_season': episode.season,
'episode_identified_by': episode.identified_by,
'episode_number': episode.number,
'episode_series_id': episode.series_id,
'episode_first_seen': episode.first_seen,
'episode_premiere_type': episode.is_premiere,
'episode_number_of_releases': len(episode.releases)
}
return episode_item
def get_series_details(show):
latest_ep = series.get_latest_release(show)
begin_ep = show.begin
if begin_ep:
begin_ep_id = begin_ep.id
begin_ep_identifier = begin_ep.identifier
else:
begin_ep_id = begin_ep_identifier = None
begin = {
'episode_id': begin_ep_id,
'episode_identifier': begin_ep_identifier
}
if latest_ep:
latest_ep_id = latest_ep.id
latest_ep_identifier = latest_ep.identifier
latest_ep_age = latest_ep.age
new_eps_after_latest_ep = series.new_eps_after(latest_ep)
release = get_release_details(
sorted(latest_ep.downloaded_releases,
key=lambda release: release.first_seen if release.downloaded else None, reverse=True)[0])
else:
latest_ep_id = latest_ep_identifier = latest_ep_age = new_eps_after_latest_ep = release = None
latest = {
'episode_id': latest_ep_id,
'episode_identifier': latest_ep_identifier,
'episode_age': latest_ep_age,
'number_of_episodes_behind': new_eps_after_latest_ep,
'last_downloaded_release': release
}
show_item = {
'show_id': show.id,
'show_name': show.name,
'alternate_names': [n.alt_name for n in show.alternate_names],
'begin_episode': begin,
'latest_downloaded_episode': latest,
'in_tasks': [_show.name for _show in show.in_tasks]
}
return show_item
show_details_schema = api.schema('show_details', show_details_schema)
shows_schema = api.schema('list_of_shows', shows_schema)
series_list_parser = api.parser()
series_list_parser.add_argument('in_config', choices=('configured', 'unconfigured', 'all'), default='configured',
help="Filter list if shows are currently in configuration.")
series_list_parser.add_argument('premieres', type=inputs.boolean, default=False,
help="Filter by downloaded premieres only.")
series_list_parser.add_argument('status', choices=('new', 'stale'), help="Filter by status")
series_list_parser.add_argument('days', type=int,
help="Filter status by number of days.")
series_list_parser.add_argument('page', type=int, default=1, help='Page number. Default is 1')
series_list_parser.add_argument('page_size', type=int, default=10, help='Shows per page. Max is 100.')
series_list_parser.add_argument('sort_by', choices=('show_name', 'episodes_behind_latest', 'last_download_date'),
default='show_name',
help="Sort response by attribute.")
series_list_parser.add_argument('order', choices=('desc', 'asc'), default='desc', help="Sorting order.")
series_list_parser.add_argument('lookup', choices=('tvdb', 'tvmaze'), action='append',
help="Get lookup result for every show by sending another request to lookup API")
ep_identifier_doc = "'episode_identifier' should be one of SxxExx, integer or date formatted such as 2012-12-12"
@series_api.route('/')
class SeriesListAPI(APIResource):
@api.response(404, 'Page does not exist', default_error_schema)
@api.response(200, 'Series list retrieved successfully', series_list_schema)
@api.doc(parser=series_list_parser, description="Get a list of Flexget's shows in DB")
def get(self, session=None):
""" List existing shows """
args = series_list_parser.parse_args()
page = args['page']
page_size = args['page_size']
lookup = args.get('lookup')
# Handle max size limit
if page_size > 100:
page_size = 100
sort_by = args['sort_by']
order = args['order']
# In case the default 'desc' order was received
if order == 'desc':
order = True
else:
order = False
start = page_size * (page - 1)
stop = start + page_size
kwargs = {
'configured': args.get('in_config'),
'premieres': args.get('premieres'),
'status': args.get('status'),
'days': args.get('days'),
'start': start,
'stop': stop,
'session': session
}
num_of_shows = series.get_series_summary(count=True, **kwargs)
raw_series_list = series.get_series_summary(**kwargs)
converted_series_list = [get_series_details(show) for show in raw_series_list]
sorted_show_list = []
if sort_by == 'show_name':
sorted_show_list = sorted(converted_series_list, key=lambda show: show['show_name'], reverse=order)
elif sort_by == 'episodes_behind_latest':
sorted_show_list = sorted(converted_series_list,
key=lambda show: show['latest_downloaded_episode']['number_of_episodes_behind'],
reverse=order)
elif sort_by == 'last_download_date':
sorted_show_list = sorted(converted_series_list,
key=lambda show: show['latest_downloaded_episode']['last_downloaded_release'][
'release_first_seen'] if show['latest_downloaded_episode'][
'last_downloaded_release'] else datetime.datetime(1970, 1, 1),
reverse=order)
pages = int(ceil(num_of_shows / float(page_size)))
if page > pages and pages != 0:
return {'error': 'page %s does not exist' % page}, 404
number_of_shows = min(page_size, num_of_shows)
response = {
'shows': sorted_show_list,
'page_size': number_of_shows,
'total_number_of_shows': num_of_shows,
'page': page,
'total_number_of_pages': pages
}
if lookup:
api_client = ApiClient()
for endpoint in lookup:
base_url = '/%s/series/' % endpoint
for show in response['shows']:
pos = response['shows'].index(show)
response['shows'][pos].setdefault('lookup', {})
url = base_url + show['show_name'] + '/'
result = api_client.get_endpoint(url)
response['shows'][pos]['lookup'].update({endpoint: result})
return jsonify(response)
@api.response(200, 'Adding series and setting first accepted episode to ep_id', show_details_schema)
@api.response(500, 'Show already exists', default_error_schema)
@api.response(501, 'Episode Identifier format is incorrect', default_error_schema)
@api.response(502, 'Alternate name already exist for a different show', default_error_schema)
@api.validate(series_input_schema, description=ep_identifier_doc)
def post(self, session):
""" Create a new show and set its first accepted episode and/or alternate names """
data = request.json
series_name = data.get('series_name')
normalized_name = series.normalize_series_name(series_name)
matches = series.shows_by_exact_name(normalized_name, session=session)
if matches:
return {'status': 'error',
'message': 'Show `%s` already exist in DB' % series_name
}, 500
show = series.Series()
show.name = series_name
session.add(show)
ep_id = data.get('episode_identifier')
alt_names = data.get('alternate_names')
if ep_id:
try:
series.set_series_begin(show, ep_id)
except ValueError as e:
return {'status': 'error',
'message': e.args[0]
}, 501
if alt_names:
try:
series.set_alt_names(alt_names, show, session)
except PluginError as e:
return {'status': 'error',
'message': e.value
}, 502
return jsonify(get_series_details(show))
@series_api.route('/search/<string:name>')
@api.doc(description='Searches for a show in the DB via its name. Returns a list of matching shows.')
class SeriesGetShowsAPI(APIResource):
@api.response(200, 'Show list retrieved successfully', shows_schema)
@api.doc(params={'name': 'Name of the show(s) to search'})
def get(self, name, session):
""" List of shows matching lookup name """
name = series.normalize_series_name(name)
matches = series.shows_by_name(name, session=session)
shows = []
for match in matches:
shows.append(get_series_details(match))
return jsonify({
'shows': shows,
'number_of_shows': len(shows)
})
@series_api.route('/<int:show_id>')
@api.doc(params={'show_id': 'ID of the show'})
class SeriesShowAPI(APIResource):
@api.response(404, 'Show ID not found', default_error_schema)
@api.response(200, 'Show information retrieved successfully', show_details_schema)
@api.doc(description='Get a specific show using its ID')
def get(self, show_id, session):
""" Get show details by ID """
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
show = get_series_details(show)
return jsonify(show)
@api.response(200, 'Removed series from DB', empty_response)
@api.response(404, 'Show ID not found', default_error_schema)
@api.doc(description='Delete a specific show using its ID')
def delete(self, show_id, session):
""" Remove series from DB """
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
name = show.name
try:
series.forget_series(name)
except ValueError as e:
return {'status': 'error',
'message': e.args[0]
}, 404
return {}
@api.response(200, 'Episodes for series will be accepted starting with ep_id', show_details_schema)
@api.response(404, 'Show ID not found', default_error_schema)
@api.response(501, 'Episode Identifier format is incorrect', default_error_schema)
@api.response(502, 'Alternate name already exist for a different show', default_error_schema)
@api.validate(series_edit_schema, description=ep_identifier_doc)
@api.doc(description='Set a begin episode or alternate names using a show ID. Note that alternate names override '
'the existing names (if name does not belong to a different show).')
def put(self, show_id, session):
""" Set the initial episode of an existing show """
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
data = request.json
ep_id = data.get('episode_identifier')
alt_names = data.get('alternate_names')
if ep_id:
try:
series.set_series_begin(show, ep_id)
except ValueError as e:
return {'status': 'error',
'message': e.args[0]
}, 501
if alt_names:
try:
series.set_alt_names(alt_names, show, session)
except PluginError as e:
return {'status': 'error',<|fim▁hole|> return jsonify(get_series_details(show))
episode_parser = api.parser()
episode_parser.add_argument('page', type=int, default=1, help='Page number. Default is 1')
episode_parser.add_argument('page_size', type=int, default=10, help='Shows per page. Max is 100.')
episode_parser.add_argument('order', choices=('desc', 'asc'), default='desc', help="Sorting order.")
@api.response(404, 'Show ID not found', default_error_schema)
@series_api.route('/<int:show_id>/episodes')
@api.doc(params={'show_id': 'ID of the show'})
class SeriesEpisodesAPI(APIResource):
@api.response(200, 'Episodes retrieved successfully for show', episode_list_schema)
@api.response(405, 'Page does not exists', model=default_error_schema)
@api.doc(description='Get all show episodes via its ID', parser=episode_parser)
def get(self, show_id, session):
""" Get episodes by show ID """
args = episode_parser.parse_args()
page = args['page']
page_size = args['page_size']
# Handle max size limit
if page_size > 100:
page_size = 100
order = args['order']
# In case the default 'desc' order was received
if order == 'desc':
order = True
else:
order = False
start = page_size * (page - 1)
stop = start + page_size
kwargs = {
'start': start,
'stop': stop,
'descending': order,
'session': session
}
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
count = series.show_episodes(show, count=True, session=session)
episodes = [get_episode_details(episode) for episode in series.show_episodes(show, **kwargs)]
pages = int(ceil(count / float(page_size)))
if page > pages and pages != 0:
return {'status': 'error',
'message': 'page does not exist' % show_id
}, 500
return jsonify({'show': show.name,
'show_id': show_id,
'number_of_episodes': len(episodes),
'episodes': episodes,
'total_number_of_episodes': count,
'page': page,
'total_number_of_pages': pages})
@api.response(500, 'Error when trying to forget episode', default_error_schema)
@api.response(200, 'Successfully forgotten all episodes from show', empty_response)
@api.doc(description='Delete all show episodes via its ID. Deleting an episode will mark it as wanted again')
def delete(self, show_id, session):
""" Forgets all episodes of a show"""
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
for episode in show.episodes:
try:
series.forget_episodes_by_id(show.id, episode.id)
except ValueError as e:
return {'status': 'error',
'message': e.args[0]
}, 500
return {}
delete_parser = api.parser()
delete_parser.add_argument('delete_seen', type=inputs.boolean, default=False,
help="Enabling this will delete all the related releases from seen entries list as well, "
"enabling to re-download them")
@api.response(404, 'Show ID not found', default_error_schema)
@api.response(414, 'Episode ID not found', default_error_schema)
@api.response(400, 'Episode with ep_ids does not belong to show with show_id', default_error_schema)
@series_api.route('/<int:show_id>/episodes/<int:ep_id>')
@api.doc(params={'show_id': 'ID of the show', 'ep_id': 'Episode ID'})
class SeriesEpisodeAPI(APIResource):
@api.response(200, 'Episode retrieved successfully for show', episode_schema)
@api.doc(description='Get a specific episode via its ID and show ID')
def get(self, show_id, ep_id, session):
""" Get episode by show ID and episode ID"""
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
try:
episode = series.episode_by_id(ep_id, session)
except NoResultFound:
return {'status': 'error',
'message': 'Episode with ID %s not found' % ep_id
}, 414
if not series.episode_in_show(show_id, ep_id):
return {'status': 'error',
'message': 'Episode with id %s does not belong to show %s' % (ep_id, show_id)}, 400
return jsonify({
'show': show.name,
'show_id': show_id,
'episode': get_episode_details(episode)
})
@api.response(200, 'Episode successfully forgotten for show', empty_response)
@api.doc(description='Delete a specific episode via its ID and show ID. Deleting an episode will mark it as '
'wanted again',
parser=delete_parser)
def delete(self, show_id, ep_id, session):
""" Forgets episode by show ID and episode ID """
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
try:
episode = series.episode_by_id(ep_id, session)
except NoResultFound:
return {'status': 'error',
'message': 'Episode with ID %s not found' % ep_id
}, 414
if not series.episode_in_show(show_id, ep_id):
return {'status': 'error',
'message': 'Episode with id %s does not belong to show %s' % (ep_id, show_id)}, 400
args = delete_parser.parse_args()
if args.get('delete_seen'):
for release in episode.releases:
fire_event('forget', release.title)
series.forget_episodes_by_id(show_id, ep_id)
return {}
release_list_parser = api.parser()
release_list_parser.add_argument('downloaded', type=inputs.boolean, help='Filter between release status')
release_delete_parser = release_list_parser.copy()
release_delete_parser.add_argument('delete_seen', type=inputs.boolean, default=False,
help="Enabling this will delete all the related releases from seen entries list as well, "
"enabling to re-download them")
@api.response(404, 'Show ID not found', default_error_schema)
@api.response(414, 'Episode ID not found', default_error_schema)
@api.response(400, 'Episode with ep_ids does not belong to show with show_id', default_error_schema)
@series_api.route('/<int:show_id>/episodes/<int:ep_id>/releases')
@api.doc(params={'show_id': 'ID of the show', 'ep_id': 'Episode ID'},
description='Releases are any seen entries that match the episode. ')
class SeriesReleasesAPI(APIResource):
@api.response(200, 'Releases retrieved successfully for episode', release_list_schema)
@api.doc(description='Get all matching releases for a specific episode of a specific show.',
parser=release_list_parser)
def get(self, show_id, ep_id, session):
""" Get all episodes releases by show ID and episode ID """
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
try:
episode = series.episode_by_id(ep_id, session)
except NoResultFound:
return {'status': 'error',
'message': 'Episode with ID %s not found' % ep_id
}, 414
if not series.episode_in_show(show_id, ep_id):
return {'status': 'error',
'message': 'Episode with id %s does not belong to show %s' % (ep_id, show_id)}, 400
args = release_list_parser.parse_args()
downloaded = args.get('downloaded') == True if args.get('downloaded') is not None else None
release_items = []
for release in episode.releases:
if downloaded and release.downloaded or downloaded is False and not release.downloaded or not downloaded:
release_items.append(get_release_details(release))
return jsonify({
'releases': release_items,
'number_of_releases': len(release_items),
'episode_id': ep_id,
'show_id': show_id
})
@api.response(200, 'Successfully deleted all releases for episode', empty_response)
@api.doc(description='Delete all releases for a specific episode of a specific show.',
parser=release_delete_parser)
def delete(self, show_id, ep_id, session):
""" Deletes all episodes releases by show ID and episode ID """
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
try:
episode = series.episode_by_id(ep_id, session)
except NoResultFound:
return {'status': 'error',
'message': 'Episode with ID %s not found' % ep_id
}, 414
if not series.episode_in_show(show_id, ep_id):
return {'status': 'error',
'message': 'Episode with id %s does not belong to show %s' % (ep_id, show_id)}, 400
args = release_delete_parser.parse_args()
downloaded = args.get('downloaded') == True if args.get('downloaded') is not None else None
release_items = []
for release in episode.releases:
if downloaded and release.downloaded or downloaded is False and not release.downloaded or not downloaded:
release_items.append(release)
if args.get('delete_seen'):
fire_event('forget', release.title)
for release in release_items:
series.delete_release_by_id(release.id)
return {}
@api.response(200, 'Successfully reset all downloaded releases for episode', empty_response)
@api.doc(description='Resets all of the downloaded releases of an episode, clearing the quality to be downloaded '
'again,')
def put(self, show_id, ep_id, session):
""" Marks all downloaded releases as not downloaded """
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
try:
episode = series.episode_by_id(ep_id, session)
except NoResultFound:
return {'status': 'error',
'message': 'Episode with ID %s not found' % ep_id
}, 414
if not series.episode_in_show(show_id, ep_id):
return {'status': 'error',
'message': 'Episode with id %s does not belong to show %s' % (ep_id, show_id)}, 400
for release in episode.releases:
if release.downloaded:
release.downloaded = False
return {}
@api.response(404, 'Show ID not found', default_error_schema)
@api.response(414, 'Episode ID not found', default_error_schema)
@api.response(424, 'Release ID not found', default_error_schema)
@api.response(400, 'Episode with ep_id does not belong to show with show_id', default_error_schema)
@api.response(410, 'Release with rel_id does not belong to episode with ep_id', default_error_schema)
@series_api.route('/<int:show_id>/episodes/<int:ep_id>/releases/<int:rel_id>/')
@api.doc(params={'show_id': 'ID of the show', 'ep_id': 'Episode ID', 'rel_id': 'Release ID'})
class SeriesReleaseAPI(APIResource):
@api.response(200, 'Release retrieved successfully for episode', release_schema)
@api.doc(description='Get a specific downloaded release for a specific episode of a specific show')
def get(self, show_id, ep_id, rel_id, session):
''' Get episode release by show ID, episode ID and release ID '''
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
try:
episode = series.episode_by_id(ep_id, session)
except NoResultFound:
return {'status': 'error',
'message': 'Episode with ID %s not found' % ep_id
}, 414
try:
release = series.release_by_id(rel_id, session)
except NoResultFound:
return {'status': 'error',
'message': 'Release with ID %s not found' % rel_id
}, 424
if not series.episode_in_show(show_id, ep_id):
return {'status': 'error',
'message': 'Episode with id %s does not belong to show %s' % (ep_id, show_id)}, 400
if not series.release_in_episode(ep_id, rel_id):
return {'status': 'error',
'message': 'Release id %s does not belong to episode %s' % (rel_id, ep_id)}, 410
return jsonify({
'show': show.name,
'show_id': show_id,
'episode_id': ep_id,
'release': get_release_details(release)
})
@api.response(200, 'Release successfully deleted', empty_response)
@api.doc(description='Delete a specific releases for a specific episode of a specific show.',
parser=delete_parser)
def delete(self, show_id, ep_id, rel_id, session):
''' Delete episode release by show ID, episode ID and release ID '''
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
try:
episode = series.episode_by_id(ep_id, session)
except NoResultFound:
return {'status': 'error',
'message': 'Episode with ID %s not found' % ep_id
}, 414
try:
release = series.release_by_id(rel_id, session)
except NoResultFound:
return {'status': 'error',
'message': 'Release with ID %s not found' % rel_id
}, 424
if not series.episode_in_show(show_id, ep_id):
return {'status': 'error',
'message': 'Episode with id %s does not belong to show %s' % (ep_id, show_id)}, 400
if not series.release_in_episode(ep_id, rel_id):
return {'status': 'error',
'message': 'Release with id %s does not belong to episode %s' % (rel_id, ep_id)}, 410
args = delete_parser.parse_args()
if args.get('delete_seen'):
fire_event('forget', release.title)
series.delete_release_by_id(rel_id)
return {}
@api.response(200, 'Successfully reset downloaded release status', empty_response)
@api.response(500, 'Release is not marked as downloaded', default_error_schema)
@api.doc(description='Resets the downloaded release status, clearing the quality to be downloaded again')
def put(self, show_id, ep_id, rel_id, session):
""" Resets a downloaded release status """
try:
show = series.show_by_id(show_id, session=session)
except NoResultFound:
return {'status': 'error',
'message': 'Show with ID %s not found' % show_id
}, 404
try:
episode = series.episode_by_id(ep_id, session)
except NoResultFound:
return {'status': 'error',
'message': 'Episode with ID %s not found' % ep_id
}, 414
try:
release = series.release_by_id(rel_id, session)
except NoResultFound:
return {'status': 'error',
'message': 'Release with ID %s not found' % rel_id
}, 424
if not series.episode_in_show(show_id, ep_id):
return {'status': 'error',
'message': 'Episode with id %s does not belong to show %s' % (ep_id, show_id)}, 400
if not series.release_in_episode(ep_id, rel_id):
return {'status': 'error',
'message': 'Release with id %s does not belong to episode %s' % (rel_id, ep_id)}, 410
if not release.downloaded:
return {'status': 'error',
'message': 'Release with id %s is not set as downloaded' % rel_id}, 500
release.downloaded = False
return {}<|fim▁end|> | 'message': e.value
}, 502
|
<|file_name|>interface.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
// Package networkmanageriface provides an interface to enable mocking the AWS Network Manager service client
// for testing your code.
//
// It is important to note that this interface will have breaking changes
// when the service model is updated and adds new API operations, paginators,
// and waiters.
package networkmanageriface
import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/service/networkmanager"
)
// NetworkManagerAPI provides an interface to enable mocking the
// networkmanager.NetworkManager service client's API operation,
// paginators, and waiters. This make unit testing your code that calls out
// to the SDK's service client's calls easier.
//
// The best way to use this interface is so the SDK's service client's calls
// can be stubbed out for unit testing your code with the SDK without needing
// to inject custom request handlers into the SDK's request pipeline.
//
// // myFunc uses an SDK service client to make a request to
// // AWS Network Manager.
// func myFunc(svc networkmanageriface.NetworkManagerAPI) bool {
// // Make svc.AssociateCustomerGateway request
// }
//
// func main() {
// sess := session.New()
// svc := networkmanager.New(sess)
//
// myFunc(svc)
// }
//
// In your _test.go file:
//<|fim▁hole|>// networkmanageriface.NetworkManagerAPI
// }
// func (m *mockNetworkManagerClient) AssociateCustomerGateway(input *networkmanager.AssociateCustomerGatewayInput) (*networkmanager.AssociateCustomerGatewayOutput, error) {
// // mock response/functionality
// }
//
// func TestMyFunc(t *testing.T) {
// // Setup Test
// mockSvc := &mockNetworkManagerClient{}
//
// myfunc(mockSvc)
//
// // Verify myFunc's functionality
// }
//
// It is important to note that this interface will have breaking changes
// when the service model is updated and adds new API operations, paginators,
// and waiters. Its suggested to use the pattern above for testing, or using
// tooling to generate mocks to satisfy the interfaces.
type NetworkManagerAPI interface {
AssociateCustomerGateway(*networkmanager.AssociateCustomerGatewayInput) (*networkmanager.AssociateCustomerGatewayOutput, error)
AssociateCustomerGatewayWithContext(aws.Context, *networkmanager.AssociateCustomerGatewayInput, ...request.Option) (*networkmanager.AssociateCustomerGatewayOutput, error)
AssociateCustomerGatewayRequest(*networkmanager.AssociateCustomerGatewayInput) (*request.Request, *networkmanager.AssociateCustomerGatewayOutput)
AssociateLink(*networkmanager.AssociateLinkInput) (*networkmanager.AssociateLinkOutput, error)
AssociateLinkWithContext(aws.Context, *networkmanager.AssociateLinkInput, ...request.Option) (*networkmanager.AssociateLinkOutput, error)
AssociateLinkRequest(*networkmanager.AssociateLinkInput) (*request.Request, *networkmanager.AssociateLinkOutput)
AssociateTransitGatewayConnectPeer(*networkmanager.AssociateTransitGatewayConnectPeerInput) (*networkmanager.AssociateTransitGatewayConnectPeerOutput, error)
AssociateTransitGatewayConnectPeerWithContext(aws.Context, *networkmanager.AssociateTransitGatewayConnectPeerInput, ...request.Option) (*networkmanager.AssociateTransitGatewayConnectPeerOutput, error)
AssociateTransitGatewayConnectPeerRequest(*networkmanager.AssociateTransitGatewayConnectPeerInput) (*request.Request, *networkmanager.AssociateTransitGatewayConnectPeerOutput)
CreateConnection(*networkmanager.CreateConnectionInput) (*networkmanager.CreateConnectionOutput, error)
CreateConnectionWithContext(aws.Context, *networkmanager.CreateConnectionInput, ...request.Option) (*networkmanager.CreateConnectionOutput, error)
CreateConnectionRequest(*networkmanager.CreateConnectionInput) (*request.Request, *networkmanager.CreateConnectionOutput)
CreateDevice(*networkmanager.CreateDeviceInput) (*networkmanager.CreateDeviceOutput, error)
CreateDeviceWithContext(aws.Context, *networkmanager.CreateDeviceInput, ...request.Option) (*networkmanager.CreateDeviceOutput, error)
CreateDeviceRequest(*networkmanager.CreateDeviceInput) (*request.Request, *networkmanager.CreateDeviceOutput)
CreateGlobalNetwork(*networkmanager.CreateGlobalNetworkInput) (*networkmanager.CreateGlobalNetworkOutput, error)
CreateGlobalNetworkWithContext(aws.Context, *networkmanager.CreateGlobalNetworkInput, ...request.Option) (*networkmanager.CreateGlobalNetworkOutput, error)
CreateGlobalNetworkRequest(*networkmanager.CreateGlobalNetworkInput) (*request.Request, *networkmanager.CreateGlobalNetworkOutput)
CreateLink(*networkmanager.CreateLinkInput) (*networkmanager.CreateLinkOutput, error)
CreateLinkWithContext(aws.Context, *networkmanager.CreateLinkInput, ...request.Option) (*networkmanager.CreateLinkOutput, error)
CreateLinkRequest(*networkmanager.CreateLinkInput) (*request.Request, *networkmanager.CreateLinkOutput)
CreateSite(*networkmanager.CreateSiteInput) (*networkmanager.CreateSiteOutput, error)
CreateSiteWithContext(aws.Context, *networkmanager.CreateSiteInput, ...request.Option) (*networkmanager.CreateSiteOutput, error)
CreateSiteRequest(*networkmanager.CreateSiteInput) (*request.Request, *networkmanager.CreateSiteOutput)
DeleteConnection(*networkmanager.DeleteConnectionInput) (*networkmanager.DeleteConnectionOutput, error)
DeleteConnectionWithContext(aws.Context, *networkmanager.DeleteConnectionInput, ...request.Option) (*networkmanager.DeleteConnectionOutput, error)
DeleteConnectionRequest(*networkmanager.DeleteConnectionInput) (*request.Request, *networkmanager.DeleteConnectionOutput)
DeleteDevice(*networkmanager.DeleteDeviceInput) (*networkmanager.DeleteDeviceOutput, error)
DeleteDeviceWithContext(aws.Context, *networkmanager.DeleteDeviceInput, ...request.Option) (*networkmanager.DeleteDeviceOutput, error)
DeleteDeviceRequest(*networkmanager.DeleteDeviceInput) (*request.Request, *networkmanager.DeleteDeviceOutput)
DeleteGlobalNetwork(*networkmanager.DeleteGlobalNetworkInput) (*networkmanager.DeleteGlobalNetworkOutput, error)
DeleteGlobalNetworkWithContext(aws.Context, *networkmanager.DeleteGlobalNetworkInput, ...request.Option) (*networkmanager.DeleteGlobalNetworkOutput, error)
DeleteGlobalNetworkRequest(*networkmanager.DeleteGlobalNetworkInput) (*request.Request, *networkmanager.DeleteGlobalNetworkOutput)
DeleteLink(*networkmanager.DeleteLinkInput) (*networkmanager.DeleteLinkOutput, error)
DeleteLinkWithContext(aws.Context, *networkmanager.DeleteLinkInput, ...request.Option) (*networkmanager.DeleteLinkOutput, error)
DeleteLinkRequest(*networkmanager.DeleteLinkInput) (*request.Request, *networkmanager.DeleteLinkOutput)
DeleteSite(*networkmanager.DeleteSiteInput) (*networkmanager.DeleteSiteOutput, error)
DeleteSiteWithContext(aws.Context, *networkmanager.DeleteSiteInput, ...request.Option) (*networkmanager.DeleteSiteOutput, error)
DeleteSiteRequest(*networkmanager.DeleteSiteInput) (*request.Request, *networkmanager.DeleteSiteOutput)
DeregisterTransitGateway(*networkmanager.DeregisterTransitGatewayInput) (*networkmanager.DeregisterTransitGatewayOutput, error)
DeregisterTransitGatewayWithContext(aws.Context, *networkmanager.DeregisterTransitGatewayInput, ...request.Option) (*networkmanager.DeregisterTransitGatewayOutput, error)
DeregisterTransitGatewayRequest(*networkmanager.DeregisterTransitGatewayInput) (*request.Request, *networkmanager.DeregisterTransitGatewayOutput)
DescribeGlobalNetworks(*networkmanager.DescribeGlobalNetworksInput) (*networkmanager.DescribeGlobalNetworksOutput, error)
DescribeGlobalNetworksWithContext(aws.Context, *networkmanager.DescribeGlobalNetworksInput, ...request.Option) (*networkmanager.DescribeGlobalNetworksOutput, error)
DescribeGlobalNetworksRequest(*networkmanager.DescribeGlobalNetworksInput) (*request.Request, *networkmanager.DescribeGlobalNetworksOutput)
DescribeGlobalNetworksPages(*networkmanager.DescribeGlobalNetworksInput, func(*networkmanager.DescribeGlobalNetworksOutput, bool) bool) error
DescribeGlobalNetworksPagesWithContext(aws.Context, *networkmanager.DescribeGlobalNetworksInput, func(*networkmanager.DescribeGlobalNetworksOutput, bool) bool, ...request.Option) error
DisassociateCustomerGateway(*networkmanager.DisassociateCustomerGatewayInput) (*networkmanager.DisassociateCustomerGatewayOutput, error)
DisassociateCustomerGatewayWithContext(aws.Context, *networkmanager.DisassociateCustomerGatewayInput, ...request.Option) (*networkmanager.DisassociateCustomerGatewayOutput, error)
DisassociateCustomerGatewayRequest(*networkmanager.DisassociateCustomerGatewayInput) (*request.Request, *networkmanager.DisassociateCustomerGatewayOutput)
DisassociateLink(*networkmanager.DisassociateLinkInput) (*networkmanager.DisassociateLinkOutput, error)
DisassociateLinkWithContext(aws.Context, *networkmanager.DisassociateLinkInput, ...request.Option) (*networkmanager.DisassociateLinkOutput, error)
DisassociateLinkRequest(*networkmanager.DisassociateLinkInput) (*request.Request, *networkmanager.DisassociateLinkOutput)
DisassociateTransitGatewayConnectPeer(*networkmanager.DisassociateTransitGatewayConnectPeerInput) (*networkmanager.DisassociateTransitGatewayConnectPeerOutput, error)
DisassociateTransitGatewayConnectPeerWithContext(aws.Context, *networkmanager.DisassociateTransitGatewayConnectPeerInput, ...request.Option) (*networkmanager.DisassociateTransitGatewayConnectPeerOutput, error)
DisassociateTransitGatewayConnectPeerRequest(*networkmanager.DisassociateTransitGatewayConnectPeerInput) (*request.Request, *networkmanager.DisassociateTransitGatewayConnectPeerOutput)
GetConnections(*networkmanager.GetConnectionsInput) (*networkmanager.GetConnectionsOutput, error)
GetConnectionsWithContext(aws.Context, *networkmanager.GetConnectionsInput, ...request.Option) (*networkmanager.GetConnectionsOutput, error)
GetConnectionsRequest(*networkmanager.GetConnectionsInput) (*request.Request, *networkmanager.GetConnectionsOutput)
GetConnectionsPages(*networkmanager.GetConnectionsInput, func(*networkmanager.GetConnectionsOutput, bool) bool) error
GetConnectionsPagesWithContext(aws.Context, *networkmanager.GetConnectionsInput, func(*networkmanager.GetConnectionsOutput, bool) bool, ...request.Option) error
GetCustomerGatewayAssociations(*networkmanager.GetCustomerGatewayAssociationsInput) (*networkmanager.GetCustomerGatewayAssociationsOutput, error)
GetCustomerGatewayAssociationsWithContext(aws.Context, *networkmanager.GetCustomerGatewayAssociationsInput, ...request.Option) (*networkmanager.GetCustomerGatewayAssociationsOutput, error)
GetCustomerGatewayAssociationsRequest(*networkmanager.GetCustomerGatewayAssociationsInput) (*request.Request, *networkmanager.GetCustomerGatewayAssociationsOutput)
GetCustomerGatewayAssociationsPages(*networkmanager.GetCustomerGatewayAssociationsInput, func(*networkmanager.GetCustomerGatewayAssociationsOutput, bool) bool) error
GetCustomerGatewayAssociationsPagesWithContext(aws.Context, *networkmanager.GetCustomerGatewayAssociationsInput, func(*networkmanager.GetCustomerGatewayAssociationsOutput, bool) bool, ...request.Option) error
GetDevices(*networkmanager.GetDevicesInput) (*networkmanager.GetDevicesOutput, error)
GetDevicesWithContext(aws.Context, *networkmanager.GetDevicesInput, ...request.Option) (*networkmanager.GetDevicesOutput, error)
GetDevicesRequest(*networkmanager.GetDevicesInput) (*request.Request, *networkmanager.GetDevicesOutput)
GetDevicesPages(*networkmanager.GetDevicesInput, func(*networkmanager.GetDevicesOutput, bool) bool) error
GetDevicesPagesWithContext(aws.Context, *networkmanager.GetDevicesInput, func(*networkmanager.GetDevicesOutput, bool) bool, ...request.Option) error
GetLinkAssociations(*networkmanager.GetLinkAssociationsInput) (*networkmanager.GetLinkAssociationsOutput, error)
GetLinkAssociationsWithContext(aws.Context, *networkmanager.GetLinkAssociationsInput, ...request.Option) (*networkmanager.GetLinkAssociationsOutput, error)
GetLinkAssociationsRequest(*networkmanager.GetLinkAssociationsInput) (*request.Request, *networkmanager.GetLinkAssociationsOutput)
GetLinkAssociationsPages(*networkmanager.GetLinkAssociationsInput, func(*networkmanager.GetLinkAssociationsOutput, bool) bool) error
GetLinkAssociationsPagesWithContext(aws.Context, *networkmanager.GetLinkAssociationsInput, func(*networkmanager.GetLinkAssociationsOutput, bool) bool, ...request.Option) error
GetLinks(*networkmanager.GetLinksInput) (*networkmanager.GetLinksOutput, error)
GetLinksWithContext(aws.Context, *networkmanager.GetLinksInput, ...request.Option) (*networkmanager.GetLinksOutput, error)
GetLinksRequest(*networkmanager.GetLinksInput) (*request.Request, *networkmanager.GetLinksOutput)
GetLinksPages(*networkmanager.GetLinksInput, func(*networkmanager.GetLinksOutput, bool) bool) error
GetLinksPagesWithContext(aws.Context, *networkmanager.GetLinksInput, func(*networkmanager.GetLinksOutput, bool) bool, ...request.Option) error
GetSites(*networkmanager.GetSitesInput) (*networkmanager.GetSitesOutput, error)
GetSitesWithContext(aws.Context, *networkmanager.GetSitesInput, ...request.Option) (*networkmanager.GetSitesOutput, error)
GetSitesRequest(*networkmanager.GetSitesInput) (*request.Request, *networkmanager.GetSitesOutput)
GetSitesPages(*networkmanager.GetSitesInput, func(*networkmanager.GetSitesOutput, bool) bool) error
GetSitesPagesWithContext(aws.Context, *networkmanager.GetSitesInput, func(*networkmanager.GetSitesOutput, bool) bool, ...request.Option) error
GetTransitGatewayConnectPeerAssociations(*networkmanager.GetTransitGatewayConnectPeerAssociationsInput) (*networkmanager.GetTransitGatewayConnectPeerAssociationsOutput, error)
GetTransitGatewayConnectPeerAssociationsWithContext(aws.Context, *networkmanager.GetTransitGatewayConnectPeerAssociationsInput, ...request.Option) (*networkmanager.GetTransitGatewayConnectPeerAssociationsOutput, error)
GetTransitGatewayConnectPeerAssociationsRequest(*networkmanager.GetTransitGatewayConnectPeerAssociationsInput) (*request.Request, *networkmanager.GetTransitGatewayConnectPeerAssociationsOutput)
GetTransitGatewayConnectPeerAssociationsPages(*networkmanager.GetTransitGatewayConnectPeerAssociationsInput, func(*networkmanager.GetTransitGatewayConnectPeerAssociationsOutput, bool) bool) error
GetTransitGatewayConnectPeerAssociationsPagesWithContext(aws.Context, *networkmanager.GetTransitGatewayConnectPeerAssociationsInput, func(*networkmanager.GetTransitGatewayConnectPeerAssociationsOutput, bool) bool, ...request.Option) error
GetTransitGatewayRegistrations(*networkmanager.GetTransitGatewayRegistrationsInput) (*networkmanager.GetTransitGatewayRegistrationsOutput, error)
GetTransitGatewayRegistrationsWithContext(aws.Context, *networkmanager.GetTransitGatewayRegistrationsInput, ...request.Option) (*networkmanager.GetTransitGatewayRegistrationsOutput, error)
GetTransitGatewayRegistrationsRequest(*networkmanager.GetTransitGatewayRegistrationsInput) (*request.Request, *networkmanager.GetTransitGatewayRegistrationsOutput)
GetTransitGatewayRegistrationsPages(*networkmanager.GetTransitGatewayRegistrationsInput, func(*networkmanager.GetTransitGatewayRegistrationsOutput, bool) bool) error
GetTransitGatewayRegistrationsPagesWithContext(aws.Context, *networkmanager.GetTransitGatewayRegistrationsInput, func(*networkmanager.GetTransitGatewayRegistrationsOutput, bool) bool, ...request.Option) error
ListTagsForResource(*networkmanager.ListTagsForResourceInput) (*networkmanager.ListTagsForResourceOutput, error)
ListTagsForResourceWithContext(aws.Context, *networkmanager.ListTagsForResourceInput, ...request.Option) (*networkmanager.ListTagsForResourceOutput, error)
ListTagsForResourceRequest(*networkmanager.ListTagsForResourceInput) (*request.Request, *networkmanager.ListTagsForResourceOutput)
RegisterTransitGateway(*networkmanager.RegisterTransitGatewayInput) (*networkmanager.RegisterTransitGatewayOutput, error)
RegisterTransitGatewayWithContext(aws.Context, *networkmanager.RegisterTransitGatewayInput, ...request.Option) (*networkmanager.RegisterTransitGatewayOutput, error)
RegisterTransitGatewayRequest(*networkmanager.RegisterTransitGatewayInput) (*request.Request, *networkmanager.RegisterTransitGatewayOutput)
TagResource(*networkmanager.TagResourceInput) (*networkmanager.TagResourceOutput, error)
TagResourceWithContext(aws.Context, *networkmanager.TagResourceInput, ...request.Option) (*networkmanager.TagResourceOutput, error)
TagResourceRequest(*networkmanager.TagResourceInput) (*request.Request, *networkmanager.TagResourceOutput)
UntagResource(*networkmanager.UntagResourceInput) (*networkmanager.UntagResourceOutput, error)
UntagResourceWithContext(aws.Context, *networkmanager.UntagResourceInput, ...request.Option) (*networkmanager.UntagResourceOutput, error)
UntagResourceRequest(*networkmanager.UntagResourceInput) (*request.Request, *networkmanager.UntagResourceOutput)
UpdateConnection(*networkmanager.UpdateConnectionInput) (*networkmanager.UpdateConnectionOutput, error)
UpdateConnectionWithContext(aws.Context, *networkmanager.UpdateConnectionInput, ...request.Option) (*networkmanager.UpdateConnectionOutput, error)
UpdateConnectionRequest(*networkmanager.UpdateConnectionInput) (*request.Request, *networkmanager.UpdateConnectionOutput)
UpdateDevice(*networkmanager.UpdateDeviceInput) (*networkmanager.UpdateDeviceOutput, error)
UpdateDeviceWithContext(aws.Context, *networkmanager.UpdateDeviceInput, ...request.Option) (*networkmanager.UpdateDeviceOutput, error)
UpdateDeviceRequest(*networkmanager.UpdateDeviceInput) (*request.Request, *networkmanager.UpdateDeviceOutput)
UpdateGlobalNetwork(*networkmanager.UpdateGlobalNetworkInput) (*networkmanager.UpdateGlobalNetworkOutput, error)
UpdateGlobalNetworkWithContext(aws.Context, *networkmanager.UpdateGlobalNetworkInput, ...request.Option) (*networkmanager.UpdateGlobalNetworkOutput, error)
UpdateGlobalNetworkRequest(*networkmanager.UpdateGlobalNetworkInput) (*request.Request, *networkmanager.UpdateGlobalNetworkOutput)
UpdateLink(*networkmanager.UpdateLinkInput) (*networkmanager.UpdateLinkOutput, error)
UpdateLinkWithContext(aws.Context, *networkmanager.UpdateLinkInput, ...request.Option) (*networkmanager.UpdateLinkOutput, error)
UpdateLinkRequest(*networkmanager.UpdateLinkInput) (*request.Request, *networkmanager.UpdateLinkOutput)
UpdateSite(*networkmanager.UpdateSiteInput) (*networkmanager.UpdateSiteOutput, error)
UpdateSiteWithContext(aws.Context, *networkmanager.UpdateSiteInput, ...request.Option) (*networkmanager.UpdateSiteOutput, error)
UpdateSiteRequest(*networkmanager.UpdateSiteInput) (*request.Request, *networkmanager.UpdateSiteOutput)
}
var _ NetworkManagerAPI = (*networkmanager.NetworkManager)(nil)<|fim▁end|> | // // Define a mock struct to be used in your unit tests of myFunc.
// type mockNetworkManagerClient struct { |
<|file_name|>DownloadRemoteFileDialog.cpp<|end_file_name|><|fim▁begin|>/**
* UGENE - Integrated Bioinformatics Tools.
* Copyright (C) 2008-2017 UniPro <[email protected]>
* http://ugene.net
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License<|fim▁hole|> * MA 02110-1301, USA.
*/
#include <QNetworkAccessManager>
#include <QNetworkRequest>
#include <QNetworkReply>
#include <QXmlInputSource>
#include <QMessageBox>
#include <U2Core/AppContext.h>
#include <U2Core/GUrlUtils.h>
#include <U2Core/L10n.h>
#include <U2Core/LoadRemoteDocumentTask.h>
#include <U2Core/Log.h>
#include <U2Core/MultiTask.h>
#include <U2Core/Settings.h>
#include <U2Core/U2OpStatusUtils.h>
#include <U2Core/U2SafePoints.h>
#include <U2Gui/HelpButton.h>
#include <QPushButton>
#include <U2Gui/LastUsedDirHelper.h>
#include <U2Gui/U2FileDialog.h>
#include "DownloadRemoteFileDialog.h"
#include "OpenViewTask.h"
#include "ui_DownloadRemoteFileDialog.h"
static const QString SAVE_DIR("downloadremotefiledialog/savedir");
static const QString HINT_STYLE_SHEET = "color:green; font:bold";
namespace U2 {
QString DownloadRemoteFileDialog::defaultDB("");
DownloadRemoteFileDialog::DownloadRemoteFileDialog(QWidget *p):QDialog(p), isQueryDB(false) {
ui = new Ui_DownloadRemoteFileDialog;
ui->setupUi(this);
new HelpButton(this, ui->buttonBox, "19766692");
ui->buttonBox->button(QDialogButtonBox::Ok)->setText(tr("OK"));
ui->buttonBox->button(QDialogButtonBox::Cancel)->setText(tr("Cancel"));
ui->formatBox->hide();
ui->formatLabel->hide();
adjustSize();
RemoteDBRegistry& registry = RemoteDBRegistry::getRemoteDBRegistry();
const QList<QString> dataBases = registry.getDBs();
foreach(const QString& dbName, dataBases) {
ui->databasesBox->addItem(dbName, dbName);
}
if (!defaultDB.isEmpty()) {
int index = ui->databasesBox->findData(defaultDB);
if (index != -1){
ui->databasesBox->setCurrentIndex(index);
}
}
ui->hintLabel->setStyleSheet( HINT_STYLE_SHEET );
connect(ui->databasesBox, SIGNAL(currentIndexChanged ( int)), SLOT( sl_onDbChanged()));
connect(ui->saveFilenameToolButton, SIGNAL(clicked()), SLOT(sl_saveFilenameButtonClicked()));
connect(ui->hintLabel, SIGNAL(linkActivated(const QString&)), SLOT(sl_linkActivated(const QString& )));
sl_onDbChanged();
setSaveFilename();
}
DownloadRemoteFileDialog::DownloadRemoteFileDialog( const QString& id, const QString& dbId, QWidget *p /* = NULL*/ )
:QDialog(p), isQueryDB(false)
{
ui = new Ui_DownloadRemoteFileDialog;
ui->setupUi(this);
new HelpButton(this, ui->buttonBox, "19766704");
ui->formatBox->addItem(GENBANK_FORMAT);
ui->formatBox->addItem(FASTA_FORMAT);
connect(ui->formatBox, SIGNAL(currentIndexChanged(const QString &)), SLOT(sl_formatChanged(const QString &)));
adjustSize();
ui->databasesBox->clear();
const QString dbName =
dbId == EntrezUtils::NCBI_DB_PROTEIN ? RemoteDBRegistry::GENBANK_PROTEIN : RemoteDBRegistry::GENBANK_DNA;
ui->databasesBox->addItem(dbName,dbName);
ui->idLineEdit->setText(id);
ui->idLineEdit->setReadOnly(true);
delete ui->hintLabel;
ui->hintLabel = NULL;
setMinimumSize( 500, 0 );
connect(ui->saveFilenameToolButton, SIGNAL(clicked()), SLOT(sl_saveFilenameButtonClicked()));
setSaveFilename();
}
const QString DOWNLOAD_REMOTE_FILE_DOMAIN = "DownloadRemoteFileDialog";
void DownloadRemoteFileDialog::sl_saveFilenameButtonClicked() {
LastUsedDirHelper lod(DOWNLOAD_REMOTE_FILE_DOMAIN);
QString filename = U2FileDialog::getExistingDirectory(this, tr("Select folder to save"), lod.dir);
if(!filename.isEmpty()) {
ui->saveFilenameLineEdit->setText(filename);
lod.url = filename;
}
}
static const QString DEFAULT_FILENAME = "file.format";
void DownloadRemoteFileDialog::setSaveFilename() {
QString dir = AppContext::getSettings()->getValue(SAVE_DIR, "").value<QString>();
if(dir.isEmpty()) {
dir = LoadRemoteDocumentTask::getDefaultDownloadDirectory();
assert(!dir.isEmpty());
}
ui->saveFilenameLineEdit->setText(QDir::toNativeSeparators(dir));
}
QString DownloadRemoteFileDialog::getResourceId() const
{
return ui->idLineEdit->text().trimmed();
}
QString DownloadRemoteFileDialog::getDBId() const
{
int curIdx = ui->databasesBox->currentIndex();
if (curIdx == -1){
return QString("");
}
return ui->databasesBox->itemData(curIdx).toString();
}
QString DownloadRemoteFileDialog::getFullpath() const {
return ui->saveFilenameLineEdit->text();
}
void DownloadRemoteFileDialog::accept()
{
defaultDB = getDBId();
QString resourceId = getResourceId();
if( resourceId.isEmpty() ) {
QMessageBox::critical(this, L10N::errorTitle(), tr("Resource id is empty!"));
ui->idLineEdit->setFocus();
return;
}
QString fullPath = getFullpath();
if( ui->saveFilenameLineEdit->text().isEmpty() ) {
QMessageBox::critical(this, L10N::errorTitle(), tr("No folder selected for saving file!"));
ui->saveFilenameLineEdit->setFocus();
return;
}
U2OpStatus2Log os;
fullPath = GUrlUtils::prepareDirLocation(fullPath, os);
if (fullPath.isEmpty()) {
QMessageBox::critical(this, L10N::errorTitle(), os.getError());
ui->saveFilenameLineEdit->setFocus();
return;
}
QString dbId = getDBId();
QStringList resIds = resourceId.split(QRegExp("[\\s,;]+"));
QList<Task*> tasks;
QString fileFormat;
if (ui->formatBox->count() > 0) {
fileFormat = ui->formatBox->currentText();
}
QVariantMap hints;
hints.insert(FORCE_DOWNLOAD_SEQUENCE_HINT, ui->chbForceDownloadSequence->isVisible() && ui->chbForceDownloadSequence->isChecked());
int taskCount = 0;
bool addToProject = ui->chbAddToProjectCheck->isChecked();
if (addToProject && resIds.size() >= 100) {
QString message = tr("There are more than 100 files found for download.\nAre you sure you want to open all of them?");
int button = QMessageBox::question(QApplication::activeWindow(), tr("Warning"), message,
tr("Cancel"), tr("Open anyway"), tr("Don't open"));
if (button == 0) {
return; // return to dialog
} else if (button == 2) {
addToProject = false;
}
}
foreach (const QString &resId, resIds) {
LoadRemoteDocumentMode mode = LoadRemoteDocumentMode_LoadOnly;
if (addToProject) {
mode = taskCount < OpenViewTask::MAX_DOC_NUMBER_TO_OPEN_VIEWS ? LoadRemoteDocumentMode_OpenView : LoadRemoteDocumentMode_AddToProject;
}
tasks.append(new LoadRemoteDocumentAndAddToProjectTask(resId, dbId, fullPath, fileFormat, hints, mode));
taskCount++;
}
AppContext::getTaskScheduler()->registerTopLevelTask(new MultiTask(tr("Download remote documents"), tasks));
QDialog::accept();
}
DownloadRemoteFileDialog::~DownloadRemoteFileDialog() {
AppContext::getSettings()->setValue(SAVE_DIR, ui->saveFilenameLineEdit->text());
delete ui;
}
bool DownloadRemoteFileDialog::isNcbiDb(const QString &dbId) const {
return dbId == RemoteDBRegistry::GENBANK_DNA || dbId == RemoteDBRegistry::GENBANK_PROTEIN;
}
void DownloadRemoteFileDialog::sl_onDbChanged(){
QString dbId = getDBId();
QString hint;
QString description;
ui->chbForceDownloadSequence->setVisible(isNcbiDb(dbId));
RemoteDBRegistry& registry = RemoteDBRegistry::getRemoteDBRegistry();
hint = description = registry.getHint(dbId);
setupHintText( hint );
ui->idLineEdit->setToolTip(description);
}
void DownloadRemoteFileDialog::sl_formatChanged(const QString &format) {
ui->chbForceDownloadSequence->setVisible(GENBANK_FORMAT == format);
}
void DownloadRemoteFileDialog::sl_linkActivated( const QString& link ){
if (!link.isEmpty()){
ui->idLineEdit->setText(link);
}
}
void DownloadRemoteFileDialog::setupHintText( const QString &text ) {
SAFE_POINT( NULL != ui && NULL != ui->hintLabel, "Invalid dialog content!", );
const QString hintStart( tr( "Hint: " ) );
const QString hintSample = ( text.isEmpty( ) ? tr( "Use database unique identifier." ) : text )
+ "<br>";
const QString hintEnd( tr( "You can download multiple items by separating IDs with space "
"or semicolon." ) );
ui->hintLabel->setText( hintStart + hintSample + hintEnd );
}
} //namespace<|fim▁end|> | * along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, |
<|file_name|>jquery.mobile.js<|end_file_name|><|fim▁begin|>//>>excludeStart("jqmBuildExclude", pragmas.jqmBuildExclude);
//>>group: exclude
define([
"require",
"./widgets/loader",
"./events/navigate",
"./navigation/path",
"./navigation/history",
"./navigation/navigator",
"./navigation/method",
"./transitions/handlers",
"./transitions/visuals",
"./animationComplete",
"./navigation",
"./degradeInputs",
"./widgets/page.dialog",
"./widgets/dialog",
"./widgets/collapsible",
"./widgets/collapsibleSet",
"./fieldContain",
"./grid",
"./widgets/navbar",
"./widgets/listview",
"./widgets/listview.autodividers",
"./widgets/listview.hidedividers",
"./nojs",
"./widgets/forms/checkboxradio",
"./widgets/forms/button",
"./widgets/forms/slider",
"./widgets/forms/slider.tooltip",
"./widgets/forms/flipswitch",
"./widgets/forms/rangeslider",
"./widgets/forms/textinput",
"./widgets/forms/clearButton",<|fim▁hole|> "./buttonMarkup",
"./widgets/controlgroup",
"./links",
"./widgets/toolbar",
"./widgets/fixedToolbar",
"./widgets/fixedToolbar.workarounds",
"./widgets/popup",
"./widgets/popup.arrow",
"./widgets/panel",
"./widgets/table",
"./widgets/table.columntoggle",
"./widgets/table.reflow",
"./widgets/filterable",
"./widgets/filterable.backcompat",
"./widgets/tabs",
"./zoom",
"./zoom/iosorientationfix"
], function( require ) {
require( [ "./init" ], function() {} );
});
//>>excludeEnd("jqmBuildExclude");<|fim▁end|> | "./widgets/forms/autogrow",
"./widgets/forms/select.custom",
"./widgets/forms/select", |
<|file_name|>davfs.py<|end_file_name|><|fim▁begin|>'''
Datastore via remote webdav connection
'''
from __future__ import unicode_literals
from future import standard_library
standard_library.install_aliases()
import os
import tarfile
import logging
from fs.contrib.davfs import DAVFS
from urllib.parse import urlparse
from contextlib import closing
from sumatra.core import component
from .archivingfs import ArchivingFileSystemDataStore, ArchivedDataFile, TIMESTAMP_FORMAT
class DavFsDataItem(ArchivedDataFile):
"""Base class for data item classes, that may represent files or database records."""
def __init__(self, path, store):
# needs to be first cause _get_info is called in Base __init__
self.store = store
super(DavFsDataItem, self).__init__(path, store)
def get_content(self, max_length=None):<|fim▁hole|> content = f.read(max_length)
else:
content = f.read()
f.close()
return content
# mandatory repeat
content = property(fget=get_content)
def _get_info(self):
obj = self.store.dav_fs.open(self.tarfile_path, 'rb')
with closing(tarfile.open(fileobj=obj)) as data_archive:
return data_archive.getmember(self.path)
return tarfile.TarInfo()
@component
class DavFsDataStore(ArchivingFileSystemDataStore):
"""ArchivingFileSystemDataStore that archives to webdav storage"""
data_item_class = DavFsDataItem
def __init__(self, root, dav_url, dav_user=None, dav_pw=None):
super(DavFsDataStore, self).__init__(root)
parsed = urlparse(dav_url)
self.dav_user = dav_user or parsed.username
self.dav_pw = dav_pw or parsed.password
self.dav_url = parsed.geturl()
self.dav_fs = DAVFS(url=self.dav_url, credentials={'username': self.dav_user, 'password': self.dav_pw})
def __getstate__(self):
return {'root': self.root, 'dav_url': self.dav_url, 'dav_user': self.dav_user, 'dav_pw': self.dav_pw}
def find_new_data(self, timestamp):
"""Finds newly created/changed data items"""
new_files = self._find_new_data_files(timestamp)
label = timestamp.strftime(TIMESTAMP_FORMAT)
archive_paths = self._archive(label, new_files)
return [DavFsDataItem(path, self).generate_key()
for path in archive_paths]
def _archive(self, label, files, delete_originals=True):
"""
Archives files and, by default, deletes the originals.
"""
fs = self.dav_fs
if not fs.isdir(self.archive_store):
fs.makedir(self.archive_store, recursive=True)
tf_obj = fs.open(os.path.join(self.archive_store, label + ".tar.gz"), mode='wb')
with tarfile.open(fileobj=tf_obj, mode='w:gz') as tf:
logging.info("Archiving data to file %s" % tf.name)
# Add data files
archive_paths = []
for file_path in files:
archive_path = os.path.join(label, file_path)
tf.add(os.path.join(self.root, file_path), archive_path)
archive_paths.append(archive_path)
tf.close()
tf_obj.close()
# Delete original files.
if delete_originals:
for file_path in files:
os.remove(os.path.join(self.root, file_path))
self._last_label = label # useful for testing
return archive_paths<|fim▁end|> | obj = self.store.dav_fs.open(self.tarfile_path, 'rb')
with closing(tarfile.open(fileobj=obj)) as data_archive:
f = data_archive.extractfile(self.path)
if max_length: |
<|file_name|>helpers.js<|end_file_name|><|fim▁begin|>'use strict'
const _ = require('lodash')
module.exports = {<|fim▁hole|> const qs = {}
_.forEach(url.split('?').pop().split('&'), s => {
if (!s) return
const kv = s.split('=')
if (kv[0]) {
qs[kv[0]] = decodeURIComponent(kv[1])
}
})
return qs
},
toQueryString(o) {
return _.keys(o).map(k => k + '=' + encodeURIComponent(o[k])).join('&')
},
isMobile(v) {
return /^1[358]\d{9}$/.test(v)
},
getRandomStr() {
return (1e32 * Math.random()).toString(36).slice(0, 16)
}
}<|fim▁end|> | getQueryString(url) { |
<|file_name|>IndexedTextFile.cpp<|end_file_name|><|fim▁begin|>/*
* This file is part of Dune Legacy.
*
* Dune Legacy is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* Dune Legacy is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Dune Legacy. If not, see <http://www.gnu.org/licenses/>.
*/
#include <FileClasses/IndexedTextFile.h>
#include <SDL_endian.h>
#include <string>
#include <algorithm>
#include <stdexcept>
IndexedTextFile::IndexedTextFile(SDL_RWops* RWop) {
int IndexedTextFilesize;
unsigned char* Filedata;
Uint16* Index;
if(RWop == NULL) {
throw std::invalid_argument("IndexedTextFile:IndexedTextFile(): RWop == NULL!");
}
IndexedTextFilesize = SDL_RWseek(RWop,0,SEEK_END);
if(IndexedTextFilesize <= 0) {
throw std::runtime_error("IndexedTextFile:IndexedTextFile(): Cannot determine size of this file!");
}
if(IndexedTextFilesize < 2) {
throw std::runtime_error("IndexedTextFile:IndexedTextFile(): No valid indexed textfile: File too small!");
}
<|fim▁hole|> throw std::runtime_error("IndexedTextFile:IndexedTextFile(): Seeking in this indexed textfile failed!");
}
if( (Filedata = (unsigned char*) malloc(IndexedTextFilesize)) == NULL) {
throw std::bad_alloc();
}
if(SDL_RWread(RWop, Filedata, IndexedTextFilesize, 1) != 1) {
free(Filedata);
throw std::runtime_error("IndexedTextFile:IndexedTextFile(): Reading this indexed textfile failed!");
}
numIndexedStrings = (SDL_SwapLE16(((Uint16*) Filedata)[0]))/2 - 1;
Index = (Uint16*) Filedata;
for(unsigned int i=0; i <= numIndexedStrings; i++) {
Index[i] = SDL_SwapLE16(Index[i]);
}
IndexedStrings = new std::string[numIndexedStrings];
for(unsigned int i=0; i < numIndexedStrings;i++) {
IndexedStrings[i] = (const char*) (Filedata+Index[i]);
// Now convert DOS ASCII to ANSI ASCII
replace(IndexedStrings[i].begin(),IndexedStrings[i].end(), '\x0D','\x0A'); // '\r' -> '\n'
replace(IndexedStrings[i].begin(),IndexedStrings[i].end(), '\x84','\xE4'); // german umlaut "ae"
replace(IndexedStrings[i].begin(),IndexedStrings[i].end(), '\x94','\xF6'); // german umlaut "oe"
replace(IndexedStrings[i].begin(),IndexedStrings[i].end(), '\x81','\xFC'); // german umlaut "ue"
replace(IndexedStrings[i].begin(),IndexedStrings[i].end(), '\x8E','\xC4'); // german umlaut "AE"
replace(IndexedStrings[i].begin(),IndexedStrings[i].end(), '\x99','\xD6'); // german umlaut "OE"
replace(IndexedStrings[i].begin(),IndexedStrings[i].end(), '\x9A','\xDC'); // german umlaut "UE"
replace(IndexedStrings[i].begin(),IndexedStrings[i].end(), '\xE1','\xDF'); // german umlaut "ss"
}
free(Filedata);
}
IndexedTextFile::~IndexedTextFile() {
delete [] IndexedStrings;
}<|fim▁end|> | if(SDL_RWseek(RWop,0,SEEK_SET) != 0) { |
<|file_name|>MantidPlotProjectSerialiseTest.py<|end_file_name|><|fim▁begin|>"""
Test of basic project saving and loading
"""
import mantidplottests
from mantidplottests import *
import shutil
import numpy as np
import re
from PyQt4 import QtGui, QtCore
class MantidPlotProjectSerialiseTest(unittest.TestCase):
def setUp(self):
self._project_name = "MantidPlotTestProject"
self._project_folder = os.path.join(os.path.expanduser("~"),
self._project_name)
file_name = "%s.mantid" % self._project_name
self._project_file = os.path.join(self._project_folder, file_name)
def tearDown(self):
# Clean up project files
if os.path.isdir(self._project_folder):
remove_folder(self._project_folder)
clear_mantid()
def test_project_file_with_no_data(self):
workspace_name = "fake_workspace"
create_dummy_workspace(workspace_name)
saveProjectAs(self._project_folder)
self.assertTrue(os.path.isdir(self._project_folder))
self.assertTrue(os.path.isfile(self._project_file))
file_text = "MantidPlot 0.9.5 project file\n" \
"<scripting-lang>\tPython\n" \
"<windows>\t0\n" \
"<mantidworkspaces>\n" \
"WorkspaceNames\tfake_workspace\n" \
"</mantidworkspaces>"
exp_contents = parse_project_file(file_text)
contents = read_project_file(self._project_folder)
self.assertEqual(contents, exp_contents)
<|fim▁hole|> def test_project_file_with_plotted_spectrum(self):
workspace_name = "fake_workspace"
create_dummy_workspace(workspace_name)
plotSpectrum(workspace_name, 1)
saveProjectAs(self._project_folder)
self.assert_project_files_saved(workspace_name)
contents = read_project_file(self._project_folder)
# Check corrent number of windows
self.assertEqual(int(contents['<windows>']), 1)
# Check workspace list was written
workspace_list = contents['mantidworkspaces']['WorkspaceNames']
self.assertEqual(workspace_list, workspace_name)
# Check plot was written
plot_titles = contents['multiLayer']['graph']['PlotTitle']
self.assertEqual(len(plot_titles), 3)
self.assertEqual(plot_titles[0], workspace_name)
def test_project_file_1D_plot_with_labels_modified(self):
workspace_name = "fake_workspace"
create_dummy_workspace(workspace_name)
plotSpectrum(workspace_name, [0, 1])
# modify axes labels
graph = windows()[0]
layer = graph.layer(1)
# call using threadsafe_call to ensure things are executed on the GUI
# thread, otherwise we get segfaults.
threadsafe_call(layer.setTitle, "Hello World")
threadsafe_call(layer.setAxisTitle, 0, "Y Axis Modified")
threadsafe_call(layer.setAxisTitle, 2, "X Axis Modified")
saveProjectAs(self._project_folder)
self.assert_project_files_saved(workspace_name)
contents = read_project_file(self._project_folder)
# Check corrent number of windows
self.assertEqual(int(contents['<windows>']), 1)
# Check plot title is correct
plot_title = contents['multiLayer']['graph']['PlotTitle']
self.assertEqual(len(plot_title), 3)
self.assertEqual(plot_title[0], "Hello World")
# Check axes titles are correct
axes_titles = contents['multiLayer']['graph']['AxesTitles']
self.assertEqual(len(axes_titles), 2)
self.assertEqual(axes_titles[0], 'X Axis Modified')
self.assertEqual(axes_titles[1], 'Y Axis Modified')
def test_project_file_1D_plot_with_error_bars(self):
workspace_name = "fake_workspace"
create_dummy_workspace(workspace_name)
plotSpectrum(workspace_name, 0, error_bars=True)
saveProjectAs(self._project_folder)
self.assert_project_files_saved(workspace_name)
contents = read_project_file(self._project_folder)
error_bars = contents['multiLayer']['graph']['MantidYErrors']['1']
self.assertEqual(len(error_bars), 5)
def test_project_file_1D_plot_with_axes_scaling(self):
workspace_name = "fake_workspace"
create_dummy_workspace(workspace_name)
plotSpectrum(workspace_name, 0)
# modify axes scales
graph = windows()[0]
layer = graph.layer(1)
# call using threadsafe_call to ensure things are executed on the GUI
# thread. Otherwise we get segfaults.
threadsafe_call(layer.setAxisScale, 0, 10, 10)
threadsafe_call(layer.logYlinX)
saveProjectAs(self._project_folder)
self.assert_project_files_saved(workspace_name)
contents = read_project_file(self._project_folder)
# Check axis scales are as expected
scales = contents['multiLayer']['graph']['scale']
scale1, scale2, scale3, scale4 = scales[0], scales[1], scales[2], scales[3]
self.assertAlmostEqual(float(scale1[1]), 110.6670313)
self.assertEqual(int(scale1[2]), 1000)
self.assertAlmostEqual(float(scale2[1]), 110.6670313)
self.assertEqual(int(scale2[2]), 1000)
self.assertEqual(int(scale3[1]), 0)
self.assertEqual(int(scale3[2]), 12)
self.assertEqual(int(scale4[1]), 0)
self.assertEqual(int(scale4[2]), 12)
def test_serialise_with_no_data(self):
workspace_name = "fake_workspace"
create_dummy_workspace(workspace_name)
self.save_and_reopen_project()
# Check that objects were reloaded
self.assertEqual(rootFolder().name(), self._project_name)
self.assertEqual(len(windows()), 0)
self.assertEqual(len(mtd.getObjectNames()), 1)
def test_serialise_1D_plot_with_plotted_spectrum(self):
workspace_name = "fake_workspace"
create_dummy_workspace(workspace_name)
plotSpectrum(workspace_name, 1)
self.save_and_reopen_project()
# Check that objects were reloaded
self.assertEqual(rootFolder().name(), self._project_name)
self.assertEqual(len(windows()), 1)
self.assertEqual(len(mtd.getObjectNames()), 1)
def test_serialise_1D_plot_with_two_plot_windows(self):
create_dummy_workspace("ws1")
create_dummy_workspace("ws2")
plotSpectrum("ws1", 1)
plotSpectrum("ws2", 1)
self.save_and_reopen_project()
# Check that objects were reloaded
self.assertEqual(rootFolder().name(), self._project_name)
self.assertEqual(len(windows()), 2)
self.assertEqual(len(mtd.getObjectNames()), 2)
# Check both windows are graph objects
for window in windows():
# slight hack as 'type' only returns
# an MDIWindow instance
self.assertTrue('Graph' in str(window))
def test_serialise_1D_plot_with_one_plot_and_multiple_spectra(self):
workspace_name = "fake_workspace"
create_dummy_workspace(workspace_name)
plotSpectrum(workspace_name, [0, 1])
self.save_and_reopen_project()
self.assertEqual(rootFolder().name(), self._project_name)
self.assertEqual(len(mtd.getObjectNames()), 1)
self.assertEqual(len(windows()), 1)
graph = windows()[0]
layer = graph.layer(1)
# Check graph and layer exist
self.assertTrue('Graph' in str(graph))
self.assertTrue(layer is not None)
# Check plot curves exist
curve1 = layer.curve(0)
curve2 = layer.curve(1)
self.assertTrue('QwtPlotCurve', str(type(curve1)))
self.assertTrue('QwtPlotCurve', str(type(curve2)))
def test_serialise_waterfall_plot(self):
workspace_name = "fake_workspace"
create_dummy_workspace(workspace_name)
plotSpectrum(workspace_name, [0, 1], waterfall=True)
self.save_and_reopen_project()
# Check that objects were reloaded
self.assertEqual(rootFolder().name(), self._project_name)
self.assertEqual(len(windows()), 1)
self.assertEqual(len(mtd.getObjectNames()), 1)
# Check window exists
graph = windows()[0]
self.assertTrue('Graph' in str(graph))
# Check plot curves exist
layer = graph.layer(1)
curve1 = layer.curve(0)
curve2 = layer.curve(1)
self.assertTrue('QwtPlotCurve', str(type(curve1)))
self.assertTrue('QwtPlotCurve', str(type(curve2)))
def test_serialise_2D_plot(self):
workspace_name = "fake_workspace"
create_dummy_workspace(workspace_name)
plot2D(workspace_name)
self.save_and_reopen_project()
# Check that objects were reloaded
self.assertEqual(rootFolder().name(), self._project_name)
self.assertEqual(len(windows()), 1)
self.assertEqual(len(mtd.getObjectNames()), 1)
# Check window exists
graph = windows()[0]
self.assertTrue('Graph' in str(graph))
def test_save_instrument_view(self):
workspace_name = 'fake_workspace'
instrument_name = 'IRIS'
# make a workspace with an instrument
CreateSampleWorkspace(OutputWorkspace=workspace_name)
LoadInstrument(Workspace=workspace_name, MonitorList='1,2',
InstrumentName=instrument_name, RewriteSpectraMap=True)
window = getInstrumentView(workspace_name)
render_tab = window.getTab("Render")
# range options
render_tab.setMinValue(1.25)
render_tab.setMaxValue(1.75)
render_tab.setRange(1.35,1.85)
render_tab.showAxes(True)
# display options
render_tab.displayDetectorsOnly(True)
render_tab.setColorMapAutoscaling(True)
render_tab.setSurfaceType(InstrumentWidgetRenderTab.CYLINDRICAL_Y)
render_tab.flipUnwrappedView(True)
# pick tab
pick_tab = window.getTab(InstrumentWidget.PICK)
pick_tab.selectTool(InstrumentWidgetPickTab.PeakSelect)
# mask tab
mask_tab = window.getTab(InstrumentWidget.MASK)
mask_tab.setMode(InstrumentWidgetMaskTab.Group)
mask_tab.selectTool(InstrumentWidgetMaskTab.DrawEllipse)
tree_tab = window.getTab(InstrumentWidget.TREE)
tree_tab.selectComponentByName("graphite")
saveProjectAs(self._project_folder)
self.assert_project_files_saved(workspace_name)
contents = read_project_file(self._project_folder)
window_options = contents['instrumentwindow']
self.assertEquals(int(window_options['SurfaceType']), 2)
self.assertEquals(int(window_options['CurrentTab']), 0)
# render tab options
render_options = contents['instrumentwindow']['tabs']['rendertab']
self.assertEqual(bool(render_options["DisplayDetectorsOnly"]), True)
self.assertEqual(bool(render_options["AutoScaling"]), True)
self.assertEqual(bool(render_options["FlipView"]), True)
# pick tab options
pick_options = contents['instrumentwindow']['tabs']['picktab']
self.assertEqual(bool(pick_options['ActiveTools'][9]), True)
# mask tab options
mask_options = contents['instrumentwindow']['tabs']['masktab']
self.assertEqual(bool(mask_options['ActiveType'][1]), True)
self.assertEqual(bool(mask_options['ActiveTools'][2]), True)
def assert_project_files_saved(self, workspace_name):
"""Check files were written to project folder """
file_name = '%s.nxs' % workspace_name
file_path = os.path.join(self._project_folder, file_name)
self.assertTrue(os.path.isdir(self._project_folder))
self.assertTrue(os.path.isfile(self._project_file))
self.assertTrue(os.path.isfile(file_path))
def save_and_reopen_project(self):
"""Save project and clear mantid then reopen the project """
saveProjectAs(self._project_folder)
clear_mantid()
openProject(self._project_file)
def clear_mantid():
"""Clear plots and workspaces from Mantid.
This will also start a new project and remove any previous
project data
"""
# Remove windows and plots
for window in windows():
window.confirmClose(False)
window.close()
QtCore.QCoreApplication.processEvents()
# Clear workspaces
mtd.clear()
# Start a blank project to remove anything else
newProject()
def create_dummy_workspace(ws_name):
""" Create a dummy mantid workspace with some data """
X1 = np.linspace(0, 10, 100)
Y1 = 1000*(np.sin(X1)**2) + X1*10
X1 = np.append(X1, 10.1)
X2 = np.linspace(2, 12, 100)
Y2 = 500*(np.cos(X2/2.)**2) + 20
X2 = np.append(X2, 12.10)
X = np.append(X1, X2)
Y = np.append(Y1, Y2)
E = np.sqrt(Y)
CreateWorkspace(OutputWorkspace=ws_name, DataX=list(X),
DataY=list(Y), DataE=list(E), NSpec=2,
UnitX="TOF", YUnitLabel="Counts",
WorkspaceTitle="Faked data Workspace")
def remove_folder(folder_name):
""" Remove a project folder after a test """
if not os.path.isdir(folder_name):
raise IOError('Path is not a directory')
try:
shutil.rmtree(folder_name)
except:
raise IOError('Could not clean up folder after test')
def get_project_file_contents(folder_name):
""" Get the contents of a Mantid project file given the folder """
if not os.path.isdir(folder_name):
raise IOError('Path is not a directory')
project_name = os.path.basename(folder_name) + '.mantid'
project_file = os.path.join(folder_name, project_name)
with open(project_file, 'r') as file_handle:
contents = file_handle.read()
return contents
def parse_project_file(contents, pattern=""):
""" Create a dictionary of the Mantid project file entries """
if pattern == "":
pattern_str = "<(?P<tag>[a-zA-Z]*)>(.*)</(?P=tag)>"
pattern = re.compile(pattern_str, flags=re.MULTILINE | re.DOTALL)
match = re.findall(pattern, contents)
contents = re.sub(pattern, '', contents)
data = {}
# recursively parse sections
if len(match) > 0:
data = {}
for x, y in match:
data[x] = y
for key in data.keys():
data[key] = parse_project_file(data[key], pattern)
# parse individual property lines
lines = contents.strip().split('\n')
for line in lines:
properties = line.strip().split('\t')
key = properties[0]
values = properties[1:]
if key in data.keys():
# if it already exists then add multiple entries as a dictionary
# with numberical keys corresponding to the order added
if not isinstance(data[key], dict):
data[key] = {0: data[key]}
data[key][max(data[key])+1] = values
elif len(properties) == 2:
data[key] = values[0]
else:
data[key] = values
return data
def read_project_file(folder_name):
""" Read and parse a Mantid project file """
contents = get_project_file_contents(folder_name)
return parse_project_file(contents)
# Run the unit tests
mantidplottests.runTests(MantidPlotProjectSerialiseTest)<|fim▁end|> | |
<|file_name|>trumbowyg.fontfamily.js<|end_file_name|><|fim▁begin|>(function ($) {
'use strict';
$.extend(true, $.trumbowyg, {
langs: {
// jshint camelcase:false
en: {
fontFamily: 'Font'
},
es: {
fontFamily: 'Fuente'
},
da: {
fontFamily: 'Skrifttype'
},
fr: {
fontFamily: 'Police'
},
de: {
fontFamily: 'Schriftart'
},
nl: {
fontFamily: 'Lettertype'
},
tr: {
fontFamily: 'Yazı Tipi'
},
zh_tw: {
fontFamily: '字體',
},
pt_br: {
fontFamily: 'Fonte',
}
}
});
// jshint camelcase:true
var defaultOptions = {
fontList: [
{name: 'Arial', family: 'Arial, Helvetica, sans-serif'},
{name: 'Arial Black', family: '\'Arial Black\', Gadget, sans-serif'},
{name: 'Comic Sans', family: '\'Comic Sans MS\', Textile, cursive, sans-serif'},
{name: 'Courier New', family: '\'Courier New\', Courier, monospace'},
{name: 'Georgia', family: 'Georgia, serif'},
{name: 'Impact', family: 'Impact, Charcoal, sans-serif'},
{name: 'Lucida Console', family: '\'Lucida Console\', Monaco, monospace'},
{name: 'Lucida Sans', family: '\'Lucida Sans Uncide\', \'Lucida Grande\', sans-serif'},
{name: 'Palatino', family: '\'Palatino Linotype\', \'Book Antiqua\', Palatino, serif'},
{name: 'Tahoma', family: 'Tahoma, Geneva, sans-serif'},
{name: 'Times New Roman', family: '\'Times New Roman\', Times, serif'},
{name: 'Trebuchet', family: '\'Trebuchet MS\', Helvetica, sans-serif'},
{name: 'Verdana', family: 'Verdana, Geneva, sans-serif'}
]
};
// Add dropdown with web safe fonts
$.extend(true, $.trumbowyg, {
plugins: {<|fim▁hole|> trumbowyg.o.plugins.fontfamily || {}
);
trumbowyg.addBtnDef('fontfamily', {
dropdown: buildDropdown(trumbowyg),
hasIcon: false,
text: trumbowyg.lang.fontFamily
});
}
}
}
});
function buildDropdown(trumbowyg) {
var dropdown = [];
$.each(trumbowyg.o.plugins.fontfamily.fontList, function (index, font) {
trumbowyg.addBtnDef('fontfamily_' + index, {
title: '<span style="font-family: ' + font.family + ';">' + font.name + '</span>',
hasIcon: false,
fn: function () {
trumbowyg.execCmd('fontName', font.family, true);
}
});
dropdown.push('fontfamily_' + index);
});
return dropdown;
}
})(jQuery);<|fim▁end|> | fontfamily: {
init: function (trumbowyg) {
trumbowyg.o.plugins.fontfamily = $.extend(true, {},
defaultOptions, |
<|file_name|>0027_auto_20150220_0305.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('statmaps', '0026_populate_cogatlas'),
]
operations = [
migrations.AddField(
model_name='statisticmap',
name='cognitive_paradigm_cogatlas',
field=models.CharField(help_text=b"Task (or lack of it) performed by the subjects in the scanner described using <a href='http://www.cognitiveatlas.org/'>Cognitive Atlas</a> terms", max_length=200, null=True, verbose_name=b'Cognitive Paradigm'),
preserve_default=True,
),
migrations.AlterField(
model_name='statisticmap',
name='modality',<|fim▁hole|> ),
]<|fim▁end|> | field=models.CharField(help_text=b'Brain imaging procedure that was used to acquire the data.', max_length=200, verbose_name=b'Modality & Acquisition Type', choices=[(b'fMRI-BOLD', b'fMRI-BOLD'), (b'fMRI-CBF', b'fMRI-CBF'), (b'fMRI-CBV', b'fMRI-CBV'), (b'Diffusion MRI', b'Diffusion MRI'), (b'Structural MRI', b'Structural MRI'), (b'PET FDG', b'PET FDG'), (b'PET [15O]-water', b'PET [15O]-water'), (b'PET other', b'PET other'), (b'MEG', b'MEG'), (b'EEG', b'EEG'), (b'Other', b'Other')]),
preserve_default=True, |
<|file_name|>header.js<|end_file_name|><|fim▁begin|>'use strict';
// Disable eval and Buffer.
window.eval = global.eval = global.Buffer = function() {
throw new Error("Can't use eval and Buffer.");
}
const Electron = require('electron')
const IpcRenderer = Electron.ipcRenderer;
var Urlin = null; // element of input text
window.addEventListener('load', ()=> {
Urlin = document.getElementById('input-url');
Urlin.addEventListener("keypress", (event)=>{
if(13!=event.keyCode) return;<|fim▁hole|>
IpcRenderer.on('url-input', (event, s_url)=>{
Urlin.value = s_url;
});<|fim▁end|> | Urlin.blur();
IpcRenderer.sendToHost('url-input', Urlin.value);
}, false);
}, false); |
<|file_name|>CylinderZoneEditWidget.hpp<|end_file_name|><|fim▁begin|>/*
Copyright_License {
XCSoar Glide Computer - http://www.xcsoar.org/
Copyright (C) 2000-2013 The XCSoar Project
A detailed list of copyright holders can be found in the file "AUTHORS".
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
}
*/
#ifndef XCSOAR_CYLINDER_ZONE_EDIT_WIDGET_HPP
#define XCSOAR_CYLINDER_ZONE_EDIT_WIDGET_HPP
#include "ObservationZoneEditWidget.hpp"
#include <assert.h>
class CylinderZone;
class CylinderZoneEditWidget : public ObservationZoneEditWidget {
const bool radius_editable;
public:
CylinderZoneEditWidget(CylinderZone &oz, bool _length_editable);
protected:
const CylinderZone &GetObject() const {
return (const CylinderZone &)ObservationZoneEditWidget::GetObject();
}
CylinderZone &GetObject() {
return (CylinderZone &)ObservationZoneEditWidget::GetObject();
}<|fim▁hole|> const PixelRect &rc) override;
virtual bool Save(bool &changed) override;
};
#endif<|fim▁end|> |
public:
/* virtual methods from class Widget */
virtual void Prepare(ContainerWindow &parent, |
<|file_name|>tplink.py<|end_file_name|><|fim▁begin|>"""
Support for TP-Link routers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.tplink/
"""
import base64
from datetime import datetime
import hashlib
import logging
import re
from aiohttp.hdrs import (
ACCEPT, COOKIE, PRAGMA, REFERER, CONNECTION, KEEP_ALIVE, USER_AGENT,
CONTENT_TYPE, CACHE_CONTROL, ACCEPT_ENCODING, ACCEPT_LANGUAGE)
import requests
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
from homeassistant.const import (
CONF_HOST, CONF_PASSWORD, CONF_USERNAME, HTTP_HEADER_X_REQUESTED_WITH)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['tplink==0.2.1']
_LOGGER = logging.getLogger(__name__)
HTTP_HEADER_NO_CACHE = 'no-cache'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string
})
def get_scanner(hass, config):
"""
Validate the configuration and return a TP-Link scanner.
The default way of integrating devices is to use a pypi
package, The TplinkDeviceScanner has been refactored
to depend on a pypi package, the other implementations
should be gradually migrated in the pypi package
"""
for cls in [
TplinkDeviceScanner, Tplink5DeviceScanner, Tplink4DeviceScanner,
Tplink3DeviceScanner, Tplink2DeviceScanner, Tplink1DeviceScanner
]:
scanner = cls(config[DOMAIN])
if scanner.success_init:
return scanner
return None
class TplinkDeviceScanner(DeviceScanner):
"""Queries the router for connected devices."""
def __init__(self, config):
"""Initialize the scanner."""
from tplink.tplink import TpLinkClient
host = config[CONF_HOST]
password = config[CONF_PASSWORD]
username = config[CONF_USERNAME]
self.success_init = False
try:
self.tplink_client = TpLinkClient(
password, host=host, username=username)
self.last_results = {}
self.success_init = self._update_info()
except requests.exceptions.ConnectionError:
_LOGGER.debug("ConnectionError in TplinkDeviceScanner")
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return self.last_results.keys()
def get_device_name(self, device):
"""Get the name of the device."""
return self.last_results.get(device)
def _update_info(self):
"""Ensure the information from the TP-Link router is up to date.
Return boolean if scanning successful.
"""
_LOGGER.info("Loading wireless clients...")
result = self.tplink_client.get_connected_devices()
if result:
self.last_results = result
return True
return False
class Tplink1DeviceScanner(DeviceScanner):
"""This class queries a wireless router running TP-Link firmware."""
def __init__(self, config):
"""Initialize the scanner."""
host = config[CONF_HOST]
username, password = config[CONF_USERNAME], config[CONF_PASSWORD]
self.parse_macs = re.compile('[0-9A-F]{2}-[0-9A-F]{2}-[0-9A-F]{2}-' +
'[0-9A-F]{2}-[0-9A-F]{2}-[0-9A-F]{2}')
self.host = host
self.username = username
self.password = password
self.last_results = {}
self.success_init = False
try:
self.success_init = self._update_info()
except requests.exceptions.ConnectionError:
_LOGGER.debug("ConnectionError in Tplink1DeviceScanner")
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return self.last_results
def get_device_name(self, device):
"""Get firmware doesn't save the name of the wireless device."""
return None
def _update_info(self):
"""Ensure the information from the TP-Link router is up to date.
Return boolean if scanning successful.
"""
_LOGGER.info("Loading wireless clients...")
url = 'http://{}/userRpm/WlanStationRpm.htm'.format(self.host)
referer = 'http://{}'.format(self.host)
page = requests.get(
url, auth=(self.username, self.password),
headers={REFERER: referer}, timeout=4)
result = self.parse_macs.findall(page.text)
if result:
self.last_results = [mac.replace("-", ":") for mac in result]
return True
return False
class Tplink2DeviceScanner(Tplink1DeviceScanner):
"""This class queries a router with newer version of TP-Link firmware."""
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return self.last_results.keys()
def get_device_name(self, device):
"""Get firmware doesn't save the name of the wireless device."""
return self.last_results.get(device)
def _update_info(self):
"""Ensure the information from the TP-Link router is up to date.
Return boolean if scanning successful.
"""
_LOGGER.info("Loading wireless clients...")
url = 'http://{}/data/map_access_wireless_client_grid.json' \
.format(self.host)
referer = 'http://{}'.format(self.host)
# Router uses Authorization cookie instead of header
# Let's create the cookie
username_password = '{}:{}'.format(self.username, self.password)
b64_encoded_username_password = base64.b64encode(
username_password.encode('ascii')
).decode('ascii')
cookie = 'Authorization=Basic {}' \
.format(b64_encoded_username_password)
response = requests.post(
url, headers={REFERER: referer, COOKIE: cookie},
timeout=4)
try:
result = response.json().get('data')
except ValueError:
_LOGGER.error("Router didn't respond with JSON. "
"Check if credentials are correct.")
return False
if result:
self.last_results = {
device['mac_addr'].replace('-', ':'): device['name']
for device in result
}
return True
return False
class Tplink3DeviceScanner(Tplink1DeviceScanner):
"""This class queries the Archer C9 router with version 150811 or high."""
def __init__(self, config):
"""Initialize the scanner."""
self.stok = ''
self.sysauth = ''
super(Tplink3DeviceScanner, self).__init__(config)
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
self._log_out()
return self.last_results.keys()
def get_device_name(self, device):
"""Get the firmware doesn't save the name of the wireless device.
We are forced to use the MAC address as name here.
"""
return self.last_results.get(device)
def _get_auth_tokens(self):
"""Retrieve auth tokens from the router."""
_LOGGER.info("Retrieving auth tokens...")
url = 'http://{}/cgi-bin/luci/;stok=/login?form=login' \
.format(self.host)
referer = 'http://{}/webpages/login.html'.format(self.host)
# If possible implement RSA encryption of password here.
response = requests.post(
url, params={'operation': 'login', 'username': self.username,
'password': self.password},
headers={REFERER: referer}, timeout=4)
try:
self.stok = response.json().get('data').get('stok')
_LOGGER.info(self.stok)
regex_result = re.search(
'sysauth=(.*);', response.headers['set-cookie'])
self.sysauth = regex_result.group(1)
_LOGGER.info(self.sysauth)
return True
except (ValueError, KeyError):
_LOGGER.error("Couldn't fetch auth tokens! Response was: %s",
response.text)
return False
def _update_info(self):
"""Ensure the information from the TP-Link router is up to date.
Return boolean if scanning successful.
"""
if (self.stok == '') or (self.sysauth == ''):
self._get_auth_tokens()
_LOGGER.info("Loading wireless clients...")
url = ('http://{}/cgi-bin/luci/;stok={}/admin/wireless?'
'form=statistics').format(self.host, self.stok)
referer = 'http://{}/webpages/index.html'.format(self.host)
response = requests.post(
url, params={'operation': 'load'}, headers={REFERER: referer},
cookies={'sysauth': self.sysauth}, timeout=5)
try:
json_response = response.json()
if json_response.get('success'):
result = response.json().get('data')
else:
if json_response.get('errorcode') == 'timeout':
_LOGGER.info("Token timed out. Relogging on next scan")
self.stok = ''
self.sysauth = ''
return False
_LOGGER.error(<|fim▁hole|> except ValueError:
_LOGGER.error("Router didn't respond with JSON. "
"Check if credentials are correct")
return False
if result:
self.last_results = {
device['mac'].replace('-', ':'): device['mac']
for device in result
}
return True
return False
def _log_out(self):
_LOGGER.info("Logging out of router admin interface...")
url = ('http://{}/cgi-bin/luci/;stok={}/admin/system?'
'form=logout').format(self.host, self.stok)
referer = 'http://{}/webpages/index.html'.format(self.host)
requests.post(
url, params={'operation': 'write'}, headers={REFERER: referer},
cookies={'sysauth': self.sysauth})
self.stok = ''
self.sysauth = ''
class Tplink4DeviceScanner(Tplink1DeviceScanner):
"""This class queries an Archer C7 router with TP-Link firmware 150427."""
def __init__(self, config):
"""Initialize the scanner."""
self.credentials = ''
self.token = ''
super(Tplink4DeviceScanner, self).__init__(config)
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return self.last_results
def get_device_name(self, device):
"""Get the name of the wireless device."""
return None
def _get_auth_tokens(self):
"""Retrieve auth tokens from the router."""
_LOGGER.info("Retrieving auth tokens...")
url = 'http://{}/userRpm/LoginRpm.htm?Save=Save'.format(self.host)
# Generate md5 hash of password. The C7 appears to use the first 15
# characters of the password only, so we truncate to remove additional
# characters from being hashed.
password = hashlib.md5(self.password.encode('utf')[:15]).hexdigest()
credentials = '{}:{}'.format(self.username, password).encode('utf')
# Encode the credentials to be sent as a cookie.
self.credentials = base64.b64encode(credentials).decode('utf')
# Create the authorization cookie.
cookie = 'Authorization=Basic {}'.format(self.credentials)
response = requests.get(url, headers={COOKIE: cookie})
try:
result = re.search(r'window.parent.location.href = '
r'"https?:\/\/.*\/(.*)\/userRpm\/Index.htm";',
response.text)
if not result:
return False
self.token = result.group(1)
return True
except ValueError:
_LOGGER.error("Couldn't fetch auth tokens")
return False
def _update_info(self):
"""Ensure the information from the TP-Link router is up to date.
Return boolean if scanning successful.
"""
if (self.credentials == '') or (self.token == ''):
self._get_auth_tokens()
_LOGGER.info("Loading wireless clients...")
mac_results = []
# Check both the 2.4GHz and 5GHz client list URLs
for clients_url in ('WlanStationRpm.htm', 'WlanStationRpm_5g.htm'):
url = 'http://{}/{}/userRpm/{}' \
.format(self.host, self.token, clients_url)
referer = 'http://{}'.format(self.host)
cookie = 'Authorization=Basic {}'.format(self.credentials)
page = requests.get(url, headers={
COOKIE: cookie,
REFERER: referer,
})
mac_results.extend(self.parse_macs.findall(page.text))
if not mac_results:
return False
self.last_results = [mac.replace("-", ":") for mac in mac_results]
return True
class Tplink5DeviceScanner(Tplink1DeviceScanner):
"""This class queries a TP-Link EAP-225 AP with newer TP-Link FW."""
def scan_devices(self):
"""Scan for new devices and return a list with found MAC IDs."""
self._update_info()
return self.last_results.keys()
def get_device_name(self, device):
"""Get firmware doesn't save the name of the wireless device."""
return None
def _update_info(self):
"""Ensure the information from the TP-Link AP is up to date.
Return boolean if scanning successful.
"""
_LOGGER.info("Loading wireless clients...")
base_url = 'http://{}'.format(self.host)
header = {
USER_AGENT:
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12;"
" rv:53.0) Gecko/20100101 Firefox/53.0",
ACCEPT: "application/json, text/javascript, */*; q=0.01",
ACCEPT_LANGUAGE: "Accept-Language: en-US,en;q=0.5",
ACCEPT_ENCODING: "gzip, deflate",
CONTENT_TYPE: "application/x-www-form-urlencoded; charset=UTF-8",
HTTP_HEADER_X_REQUESTED_WITH: "XMLHttpRequest",
REFERER: "http://{}/".format(self.host),
CONNECTION: KEEP_ALIVE,
PRAGMA: HTTP_HEADER_NO_CACHE,
CACHE_CONTROL: HTTP_HEADER_NO_CACHE,
}
password_md5 = hashlib.md5(
self.password.encode('utf')).hexdigest().upper()
# Create a session to handle cookie easier
session = requests.session()
session.get(base_url, headers=header)
login_data = {"username": self.username, "password": password_md5}
session.post(base_url, login_data, headers=header)
# A timestamp is required to be sent as get parameter
timestamp = int(datetime.now().timestamp() * 1e3)
client_list_url = '{}/data/monitor.client.client.json'.format(
base_url)
get_params = {
'operation': 'load',
'_': timestamp,
}
response = session.get(
client_list_url, headers=header, params=get_params)
session.close()
try:
list_of_devices = response.json()
except ValueError:
_LOGGER.error("AP didn't respond with JSON. "
"Check if credentials are correct")
return False
if list_of_devices:
self.last_results = {
device['MAC'].replace('-', ':'): device['DeviceName']
for device in list_of_devices['data']
}
return True
return False<|fim▁end|> | "An unknown error happened while fetching data")
return False |
<|file_name|>16270000.jsonp.js<|end_file_name|><|fim▁begin|><|fim▁hole|>jsonp({"cep":"16270000","cidade":"Glic\u00e9rio","uf":"SP","estado":"S\u00e3o Paulo"});<|fim▁end|> | |
<|file_name|>LabeledSelect.tsx<|end_file_name|><|fim▁begin|>//
// LESERKRITIKK v2 (aka Reader Critics)
// Copyright (C) 2017 DB Medialab/Aller Media AS, Oslo, Norway
// https://github.com/dbmedialab/reader-critics/
//
// This program is free software: you can redistribute it and/or modify it under
// the terms of the GNU General Public License as published by the Free Software
// Foundation, either version 3 of the License, or (at your option) any later
// version.
//
// This program is distributed in the hope that it will be useful, but WITHOUT
// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
// FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along with
// this program. If not, see <http://www.gnu.org/licenses/>.
//
<|fim▁hole|> onChange: (e) => void;
value: string;
label: string | JSX.Element;
ID: string;
options: IOption[];
name: string;
chosen?: boolean;
defaultOptionText?: string;
disabled?: boolean;
}
export interface IOption {
value: string;
name: string;
}
export class LabeledSelect extends React.Component <ILabeledSelect, any> {
constructor (props: ILabeledSelect) {
super(props);
}
createSelectOptions (options: IOption[]) {
return options.map((option, index) => (
<option value={option.value} key={option.value}>{option.name}</option>
));
}
render () {
const {
ID,
value,
onChange,
name,
label,
options,
chosen = true,
defaultOptionText = '-- None --',
disabled,
} = this.props;
return (
<div className="row">
<Label label={label} ID={ID} />
<div className="small-12 columns">
<select
id={`${ID}-input`}
value={value || ''}
className="small-12 large-12"
onChange={onChange}
name={name}
disabled={ disabled }
>
{defaultOptionText && !chosen && <option value="">{defaultOptionText}</option> }
{options.length && this.createSelectOptions(options)}
</select>
</div>
</div>
);
}
}<|fim▁end|> | import * as React from 'react';
import { Label } from 'admin/components/website/additionalComponents/Label';
export interface ILabeledSelect { |
<|file_name|>websocket_loader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::connector::create_ssl_connector_builder;
use crate::cookie::Cookie;
use crate::fetch::methods::should_be_blocked_due_to_bad_port;
use crate::hosts::replace_host;
use crate::http_loader::HttpState;
use embedder_traits::resources::{self, Resource};
use headers_ext::Host;
use http::header::{self, HeaderMap, HeaderName, HeaderValue};
use http::uri::Authority;<|fim▁hole|>use openssl::ssl::SslStream;
use servo_config::opts;
use servo_url::ServoUrl;
use std::fs;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use url::Url;
use ws::util::TcpStream;
use ws::{
CloseCode, Factory, Handler, Handshake, Message, Request, Response as WsResponse, Sender,
WebSocket,
};
use ws::{Error as WebSocketError, ErrorKind as WebSocketErrorKind, Result as WebSocketResult};
/// A client for connecting to a websocket server
#[derive(Clone)]
struct Client<'a> {
origin: &'a str,
host: &'a Host,
protocols: &'a [String],
http_state: &'a Arc<HttpState>,
resource_url: &'a ServoUrl,
event_sender: &'a IpcSender<WebSocketNetworkEvent>,
protocol_in_use: Option<String>,
}
impl<'a> Factory for Client<'a> {
type Handler = Self;
fn connection_made(&mut self, _: Sender) -> Self::Handler {
self.clone()
}
fn connection_lost(&mut self, _: Self::Handler) {
let _ = self.event_sender.send(WebSocketNetworkEvent::Fail);
}
}
impl<'a> Handler for Client<'a> {
fn build_request(&mut self, url: &Url) -> WebSocketResult<Request> {
let mut req = Request::from_url(url)?;
req.headers_mut()
.push(("Origin".to_string(), self.origin.as_bytes().to_owned()));
req.headers_mut().push((
"Host".to_string(),
format!("{}", self.host).as_bytes().to_owned(),
));
for protocol in self.protocols {
req.add_protocol(protocol);
}
let mut cookie_jar = self.http_state.cookie_jar.write().unwrap();
if let Some(cookie_list) = cookie_jar.cookies_for_url(self.resource_url, CookieSource::HTTP)
{
req.headers_mut()
.push(("Cookie".into(), cookie_list.as_bytes().to_owned()))
}
Ok(req)
}
fn on_open(&mut self, shake: Handshake) -> WebSocketResult<()> {
let mut headers = HeaderMap::new();
for &(ref name, ref value) in shake.response.headers().iter() {
let name = HeaderName::from_bytes(name.as_bytes()).unwrap();
let value = HeaderValue::from_bytes(&value).unwrap();
headers.insert(name, value);
}
let mut jar = self.http_state.cookie_jar.write().unwrap();
// TODO(eijebong): Replace thise once typed headers settled on a cookie impl
for cookie in headers.get_all(header::SET_COOKIE) {
if let Ok(s) = cookie.to_str() {
if let Some(cookie) =
Cookie::from_cookie_string(s.into(), self.resource_url, CookieSource::HTTP)
{
jar.push(cookie, self.resource_url, CookieSource::HTTP);
}
}
}
let _ = self
.event_sender
.send(WebSocketNetworkEvent::ConnectionEstablished {
protocol_in_use: self.protocol_in_use.clone(),
});
Ok(())
}
fn on_message(&mut self, message: Message) -> WebSocketResult<()> {
let message = match message {
Message::Text(message) => MessageData::Text(message),
Message::Binary(message) => MessageData::Binary(message),
};
let _ = self
.event_sender
.send(WebSocketNetworkEvent::MessageReceived(message));
Ok(())
}
fn on_error(&mut self, err: WebSocketError) {
debug!("Error in WebSocket communication: {:?}", err);
let _ = self.event_sender.send(WebSocketNetworkEvent::Fail);
}
fn on_response(&mut self, res: &WsResponse) -> WebSocketResult<()> {
let protocol_in_use = res.protocol()?;
if let Some(protocol_name) = protocol_in_use {
if !self.protocols.is_empty() && !self.protocols.iter().any(|p| protocol_name == (*p)) {
let error = WebSocketError::new(
WebSocketErrorKind::Protocol,
"Protocol in Use not in client-supplied protocol list",
);
return Err(error);
}
self.protocol_in_use = Some(protocol_name.into());
}
Ok(())
}
fn on_close(&mut self, code: CloseCode, reason: &str) {
debug!("Connection closing due to ({:?}) {}", code, reason);
let _ = self.event_sender.send(WebSocketNetworkEvent::Close(
Some(code.into()),
reason.to_owned(),
));
}
fn upgrade_ssl_client(
&mut self,
stream: TcpStream,
url: &Url,
) -> WebSocketResult<SslStream<TcpStream>> {
let certs = match opts::get().certificate_path {
Some(ref path) => fs::read_to_string(path).expect("Couldn't not find certificate file"),
None => resources::read_string(Resource::SSLCertificates),
};
let domain = self
.resource_url
.as_url()
.domain()
.ok_or(WebSocketError::new(
WebSocketErrorKind::Protocol,
format!("Unable to parse domain from {}. Needed for SSL.", url),
))?;
let connector = create_ssl_connector_builder(&certs).build();
connector
.connect(domain, stream)
.map_err(WebSocketError::from)
}
}
pub fn init(
req_init: RequestInit,
resource_event_sender: IpcSender<WebSocketNetworkEvent>,
dom_action_receiver: IpcReceiver<WebSocketDomAction>,
http_state: Arc<HttpState>,
) {
thread::Builder::new()
.name(format!("WebSocket connection to {}", req_init.url))
.spawn(move || {
let protocols = match req_init.mode {
RequestMode::WebSocket { protocols } => protocols.clone(),
_ => panic!("Received a RequestInit with a non-websocket mode in websocket_loader"),
};
let scheme = req_init.url.scheme();
let mut req_url = req_init.url.clone();
if scheme == "ws" {
req_url.as_mut_url().set_scheme("http").unwrap();
} else if scheme == "wss" {
req_url.as_mut_url().set_scheme("https").unwrap();
}
if should_be_blocked_due_to_bad_port(&req_url) {
debug!("Failed to establish a WebSocket connection: port blocked");
let _ = resource_event_sender.send(WebSocketNetworkEvent::Fail);
return;
}
let host = replace_host(req_init.url.host_str().unwrap());
let mut net_url = req_init.url.clone().into_url();
net_url.set_host(Some(&host)).unwrap();
let host = Host::from(
format!(
"{}{}",
req_init.url.host_str().unwrap(),
req_init
.url
.port_or_known_default()
.map(|v| format!(":{}", v))
.unwrap_or("".into())
)
.parse::<Authority>()
.unwrap(),
);
let client = Client {
origin: &req_init.origin.ascii_serialization(),
host: &host,
protocols: &protocols,
http_state: &http_state,
resource_url: &req_init.url,
event_sender: &resource_event_sender,
protocol_in_use: None,
};
let mut ws = WebSocket::new(client).unwrap();
if let Err(e) = ws.connect(net_url) {
debug!("Failed to establish a WebSocket connection: {:?}", e);
return;
};
let ws_sender = ws.broadcaster();
let initiated_close = Arc::new(AtomicBool::new(false));
thread::spawn(move || {
while let Ok(dom_action) = dom_action_receiver.recv() {
match dom_action {
WebSocketDomAction::SendMessage(MessageData::Text(data)) => {
ws_sender.send(Message::text(data)).unwrap();
},
WebSocketDomAction::SendMessage(MessageData::Binary(data)) => {
ws_sender.send(Message::binary(data)).unwrap();
},
WebSocketDomAction::Close(code, reason) => {
if !initiated_close.fetch_or(true, Ordering::SeqCst) {
match code {
Some(code) => ws_sender
.close_with_reason(
code.into(),
reason.unwrap_or("".to_owned()),
)
.unwrap(),
None => ws_sender.close(CloseCode::Status).unwrap(),
};
}
},
}
}
});
if let Err(e) = ws.run() {
debug!("Failed to run WebSocket: {:?}", e);
let _ = resource_event_sender.send(WebSocketNetworkEvent::Fail);
};
})
.expect("Thread spawning failed");
}<|fim▁end|> | use ipc_channel::ipc::{IpcReceiver, IpcSender};
use net_traits::request::{RequestInit, RequestMode};
use net_traits::{CookieSource, MessageData};
use net_traits::{WebSocketDomAction, WebSocketNetworkEvent}; |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(plugin_registrar, quote, rustc_private, box_patterns)]
extern crate rustc_plugin;
#[macro_use] pub extern crate syntax;
extern crate rustc_errors as errors;
extern crate peg;
use syntax::ast;
use syntax::codemap;
use syntax::codemap::FileName;
use syntax::ext::base::{ExtCtxt, MacResult, MacEager, DummyResult};
use syntax::tokenstream::TokenTree;
use syntax::parse;
use syntax::parse::token;
use syntax::symbol::Symbol;
use syntax::fold::Folder;
use syntax::util::small_vector::SmallVector;
use rustc_plugin::Registry;
use std::io::Read;
use std::fs::File;
use std::iter;
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_syntax_extension(
Symbol::intern("peg"),
syntax::ext::base::IdentTT(Box::new(expand_peg_str), None, false));
reg.register_syntax_extension(
Symbol::intern("peg_file"),
syntax::ext::base::IdentTT(Box::new(expand_peg_file), None, false));
}
fn expand_peg_str<'s>(cx: &'s mut ExtCtxt, sp: codemap::Span, ident: ast::Ident, tts: Vec<TokenTree>) -> Box<MacResult + 's> {
let (source, span) = match parse_arg(cx, &tts) {
Some((source, span)) => (source, span),
None => return DummyResult::any(sp),
};
let loc = cx.codemap().lookup_char_pos(span.lo());
let fname = loc.file.name.to_string();
// Make PEG line numbers match source line numbers
let source = iter::repeat('\n').take(loc.line - 1).collect::<String>() + &source;
expand_peg(cx, sp, ident, fname, source)
}
fn expand_peg_file<'s>(cx: &'s mut ExtCtxt, sp: codemap::Span, ident: ast::Ident, tts: Vec<TokenTree>) -> Box<MacResult + 's> {
let fname = match parse_arg(cx, &tts) {
Some((fname, _)) => fname,
None => return DummyResult::any(sp),
};
let path = match cx.codemap().span_to_filename(sp) {
FileName::Real(path) => path.parent().unwrap().join(&fname),
other => {
cx.span_err(sp, &format!("cannot resolve relative path in non-file source `{}`", other));
return DummyResult::any(sp)
},
};
let mut source = String::new();
if let Err(e) = File::open(&path).map(|mut f| f.read_to_string(&mut source)) {
cx.span_err(sp, &e.to_string());
return DummyResult::any(sp);
}
cx.codemap().new_filemap(path.into(), "".to_string());
expand_peg(cx, sp, ident, fname.to_owned(), source)
}
fn expand_peg(cx: &mut ExtCtxt, sp: codemap::Span, ident: ast::Ident, filename: String, source: String) -> Box<MacResult + 'static> {
let code = match peg::compile(filename, source) {
Ok(code) => code,
Err(()) => {
cx.span_err(sp, "Errors above in rust-peg grammar");
return DummyResult::any(sp)
}
};
let mut p = parse::new_parser_from_source_str(&cx.parse_sess, FileName::Custom("peg expansion".into()), code);
let tts = panictry!(p.parse_all_token_trees());
let module = quote_item! { cx,
mod $ident {
$tts
}
}.unwrap();
<|fim▁hole|>}
fn parse_arg(cx: &mut ExtCtxt, tts: &[TokenTree]) -> Option<(String, codemap::Span)> {
use syntax::print::pprust;
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), tts.to_vec());
// The `expand_expr` method is called so that any macro calls in the
// parsed expression are expanded.
let arg = cx.expander().fold_expr(panictry!(parser.parse_expr()));
match arg.node {
ast::ExprKind::Lit(ref spanned) => {
match spanned.node {
ast::LitKind::Str(ref n, _) => {
if !parser.eat(&token::Eof) {
cx.span_err(parser.span,
"expected only one string literal");
return None
}
return Some((n.to_string(), spanned.span))
}
_ => {}
}
}
_ => {}
}
let err = format!("expected string literal but got `{}`",
pprust::expr_to_string(&*arg));
cx.span_err(parser.span, &err);
None
}<|fim▁end|> | MacEager::items(SmallVector::one(module)) |
<|file_name|>alex.py<|end_file_name|><|fim▁begin|>import chainer
import chainer.functions as F
import chainer.links as L
<|fim▁hole|>class Alex(chainer.Chain):
"""Single-GPU AlexNet without partition toward the channel axis."""
def __init__(self, n_class=1000, threshold=0.5, pt_func=None):
self.threshold = threshold
self.pt_func = pt_func
self.n_class = n_class
super(Alex, self).__init__()
with self.init_scope():
self.conv1 = L.Convolution2D(3, 96, 11, stride=4, pad=4)
self.bn1 = L.BatchNormalization(96)
self.conv2 = L.Convolution2D(96, 256, 5, stride=1, pad=1)
self.bn2 = L.BatchNormalization(256)
self.conv3 = L.Convolution2D(256, 384, 3, stride=1, pad=1)
self.conv4 = L.Convolution2D(384, 384, 3, stride=1, pad=1)
self.conv5 = L.Convolution2D(384, 256, 3, stride=1, pad=1)
self.bn5 = L.BatchNormalization(256)
self.fc6 = L.Linear(33280, 4096)
self.fc7 = L.Linear(4096, 4096)
self.fc8 = L.Linear(4096, 2*n_class)
def __call__(self, x, t=None):
n_batch = len(x)
assert n_batch == len(t)
h = F.relu(self.bn1(self.conv1(x)))
h = F.max_pooling_2d(h, 3, stride=2)
h = F.relu(self.bn2(self.conv2(h)))
h = F.max_pooling_2d(h, 3, stride=2)
h = F.relu(self.conv3(h))
h = F.relu(self.conv4(h))
h = F.relu(self.bn5(self.conv5(h)))
h = F.max_pooling_2d(h, 3, stride=3)
if not self.train_conv:
h.unchain_backward()
h = F.dropout(F.relu(self.fc6(h)), ratio=0.5)
h = F.dropout(F.relu(self.fc7(h)), ratio=0.5)
h = self.fc8(h)
h = h.reshape((-1, 2, self.n_class))
h_prob = F.softmax(h, axis=1)[:, 1, :]
self.h_prob = h_prob
if t is None:
assert not chainer.config.train
return
half_n = self.n_class / 2
is_singlearm_mask = t[:, half_n] == -1
# loss for single arm
h_single = h[is_singlearm_mask][:, :, :half_n]
t_single = t[is_singlearm_mask][:, :half_n]
# Requires: https://github.com/chainer/chainer/pull/3310
if h_single.data.shape[0] > 0:
loss_single = F.softmax_cross_entropy(
h_single, t_single, normalize=False)
else:
loss_single = None
# loss for dual arm
h_dual = h[~is_singlearm_mask][:, :, half_n:]
t_dual = t[~is_singlearm_mask][:, half_n:]
# Requires: https://github.com/chainer/chainer/pull/3310
if h_dual.data.shape[0] > 0:
loss_dual = F.softmax_cross_entropy(
h_dual, t_dual, normalize=False)
else:
loss_dual = None
if loss_single is None:
self.loss = loss_dual
elif loss_dual is None:
self.loss = loss_single
else:
self.loss = loss_single + loss_dual
# calculate acc on CPU
h_prob_single = h_prob[is_singlearm_mask][:, :half_n]
h_prob_single = chainer.cuda.to_cpu(h_prob_single.data)
t_single = chainer.cuda.to_cpu(t_single)
h_prob_dual = h_prob[~is_singlearm_mask][:, half_n:]
h_prob_dual = chainer.cuda.to_cpu(h_prob_dual.data)
t_dual = chainer.cuda.to_cpu(t_dual)
label_single = (h_prob_single > self.threshold).astype(self.xp.int32)
label_dual = (h_prob_dual > self.threshold).astype(self.xp.int32)
acc_single = (t_single == label_single).all(axis=1)
acc_single = acc_single.astype(self.xp.int32).flatten()
acc_dual = (t_dual == label_dual).all(axis=1)
acc_dual = acc_dual.astype(self.xp.int32).flatten()
self.acc = self.xp.sum(acc_single) + self.xp.sum(acc_dual)
self.acc = self.acc / float(len(acc_single) + len(acc_dual))
chainer.reporter.report({
'loss': self.loss,
'acc': self.acc,
}, self)
if chainer.config.train:
return self.loss<|fim▁end|> | |
<|file_name|>profile.ts<|end_file_name|><|fim▁begin|>import {Observable} from 'rxjs/Observable';
import {
GraphApiObject,
GraphApiObjectType,
DUMMY_GRAPH_API_OBJECT_TYPE
} from './graph-api-object';
import {ConfService} from './conf.service';
import {GraphApiResponse} from './graph-api-response';
import {Post} from './post';
import {CoverPhoto} from './cover-photo';
/*
* Classes related to Facebook profiles.
*
* Profiles are Users, Pages, Groups, Events and Applications.
*/
/*
* A Facebook profile as returned by the Facebook API.
*/
export interface ProfileType extends GraphApiObjectType {
name: string;
}
/*
* A Facebook profile as used internally.
*/
export class Profile extends GraphApiObject {
constructor(kwargs: ProfileType) {
super(kwargs);
}
/*
* The description of this Profile.
*
* This is implemented by the subclasses.
*/
description?: string;<|fim▁hole|>
/*
* The CoverPhoto of this Profile.
*
* This is implemented by the subclasses.
*/
cover?: CoverPhoto;
protected get confService() {
return this.serviceService.confService;
}
/*
* Get the url to the icon for this Profile.
*/
get picture() {
return this.confService.fb.apiUrl + '/' + this.id + '/picture';
}
/*
* Get the Feed of Posts for this Profile.
*
* This is implemented by the subclasses.
*/
get feed(): Observable<GraphApiResponse<Post>> {
return null;
}
}
export interface Profile extends ProfileType {}
/*
* The simplest valid profile.
*
* This exists, so the Users and Pages can use it to build their dummy
* constants.
*/
export const DUMMY_PROFILE_TYPE: ProfileType = {
...DUMMY_GRAPH_API_OBJECT_TYPE,
name: ''
};<|fim▁end|> | |
<|file_name|>dbus.go<|end_file_name|><|fim▁begin|>package main
import (
"fmt"
"github.com/coreos/go-systemd/dbus"
)
type dbusConnectioner interface {
RestartUnit(string, string, chan<- string) (int, error)
}
var newDBusConnection = func() (dbusConnectioner, error) {
return dbus.New()
}
func restartNetworkD(maxRetries int) error {
conn, err := newDBusConnection()
if err != nil {
return err
}
c := make(chan string)
var lastStatus string
for i := 0; i < maxRetries; i++ {
if _, err = conn.RestartUnit("systemd-networkd.service", "replace", c); err != nil {
return err
}
lastStatus = <-c
if lastStatus == "done" {<|fim▁hole|> }
return fmt.Errorf("Failed to restart dbus - last attemt's status was '%s'", lastStatus)
}<|fim▁end|> | return nil
} |
<|file_name|>agent3.py<|end_file_name|><|fim▁begin|>import random
from datetime import datetime
from multiprocessing import Pool
import numpy as np
from scipy.optimize import minimize
def worker_func(args):
self = args[0]
m = args[1]
k = args[2]
r = args[3]
return (self.eval_func(m, k, r) -
self.eval_func(m, k, self.rt) -
self.temporal_diff_sum(m, k)) ** 2
def optimized_func_i_der(args):
"""
The derivative of the optimized function with respect to the
ith component of the vector r
"""
self = args[0]
r = args[1]
i = args[2]
result = 0
M = len(self.data)
for m in range(M):
Nm = self.data[m].shape[0] - 1
for k in range(Nm + 1):
result += ((self.eval_func(m, k, r) -
self.eval_func(m, k, self.rt) -
self.temporal_diff_sum(m, k)) * 2 *
self.eval_func_der(m, k, r, i))
<|fim▁hole|>
def worker_func_der(args):
self = args[0]
m = args[1]
k = args[2]
r = args[3]
i = args[4]
return ((self.eval_func(m, k, r) -
self.eval_func(m, k, self.rt) -
self.temporal_diff_sum(m, k)) * 2 *
self.eval_func_der(m, k, r, i))
class Agent:
num_features = 22
def __init__(self):
self.lf = 0.2 # Learning factor lambda
self.data = [] # The features' values for all the games
self.rewards = [] # Reward values for moving from 1 state to the next
self.rt = np.array([])
self.max_iter = 50
def set_learning_factor(self, learning_factor):
assert(learning_factor >= 0 and learning_factor <= 1)
self.lf = learning_factor
def set_rt(self, rt):
assert(len(rt) == self.num_features)
self.rt = rt
def set_iter(self, max_iter):
self.max_iter = max_iter
def set_data(self, data):
self.data = []
self.rewards = []
for game in data:
game = np.vstack((game, np.zeros(self.num_features + 1)))
self.data.append(game[:, :-1])
self.rewards.append(game[:, -1:])
def eval_func(self, m, k, r):
"""
The evaluation function value for the set of weights (vector) r
at the mth game and kth board state """
return np.dot(r, self.data[m][k])
def eval_func_der(self, m, k, r, i):
"""
Find the derivative of the evaluation function with respect
to the ith component of the vector r
"""
return self.data[m][k][i]
def get_reward(self, m, s):
"""
Get reward for moving from state s to state (s + 1)
"""
return self.rewards[m][s + 1][0]
def temporal_diff(self, m, s):
"""
The temporal diffence value for state s to state (s+1) in the mth game
"""
return (self.get_reward(m, s) + self.eval_func(m, s + 1, self.rt) -
self.eval_func(m, s, self.rt))
def temporal_diff_sum(self, m, k):
Nm = self.data[m].shape[0] - 1
result = 0
for s in range(k, Nm):
result += self.lf**(s - k) * self.temporal_diff(m, s)
return result
def optimized_func(self, r):
result = 0
M = len(self.data)
pool = Pool(processes=4)
for m in range(M):
Nm = self.data[m].shape[0] - 1
k_args = range(Nm + 1)
self_args = [self] * len(k_args)
m_args = [m] * len(k_args)
r_args = [r] * len(k_args)
result += sum(pool.map(worker_func,
zip(self_args, m_args, k_args, r_args)))
return result
def optimized_func_i_der(self, r, i):
"""
The derivative of the optimized function with respect to the
ith component of the vector r
"""
result = 0
M = len(self.data)
for m in range(M):
Nm = self.data[m].shape[0] - 1
for k in range(Nm + 1):
result += ((self.eval_func(m, k, r) -
self.eval_func(m, k, self.rt) -
self.temporal_diff_sum(m, k)) * 2 *
self.eval_func_der(m, k, r, i))
return result
def optimized_func_der(self, r):
p = Pool(processes=4)
self_args = [self] * len(r)
i_args = range(len(r))
r_args = [r] * len(r)
return np.array(p.map(optimized_func_i_der,
zip(self_args, r_args, i_args)))
def callback(self, r):
print("Iteration %d completed at %s" %
(self.cur_iter, datetime.now().strftime("%d/%m/%Y %H:%M:%S")))
self.cur_iter += 1
def compute_next_rt(self):
print("Start computing at %s" %
(datetime.now().strftime("%d/%m/%Y %H:%M:%S")))
self.cur_iter = 1
r0 = np.array([random.randint(-10, 10)
for i in range(self.num_features)])
res = minimize(self.optimized_func, r0, method='BFGS',
jac=self.optimized_func_der,
options={'maxiter': self.max_iter, 'disp': True},
callback=self.callback)
return res.x<|fim▁end|> | return result
|
<|file_name|>controllers.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
/* Controllers */
var pollsControllers = angular.module('pollsControllers', []);
var author = 'Patrick Nicholls';
pollsControllers.controller('PollListCtrl', ['$scope', '$http',
function ($scope, $http) {
var resource = "/~pjn59/365/polls/index.php/services/polls";
$scope.polls = undefined;
$scope.author = author;
$http.get(resource)
.success(function(data){
$scope.polls = data;
})
.error(function(){
console.log("Couldn't get data");
});
}]);
pollsControllers.controller('PollDetailCtrl', ['$scope', '$routeParams', '$http',
function($scope, $routeParams, $http) {
$scope.pollId = $routeParams.pollId;
$scope.title = undefined;
$scope.quesiton = undefined;
$scope.choice = undefined;
var base_url = "/~pjn59/365/polls/index.php/services/";<|fim▁hole|> .success(function(data){
console.log(data);
var choices = [];
for (var i = 0; i < data.choices.length; i++) {
choices[i] = {
'choice': data.choices[i],
'votes' : parseInt(data.votes[i])
};
}
$scope.choices = choices;
$scope.question = data.question;
$scope.title = data.title;
console.log($scope.choices);
})
.error(function(){
console.log("Couldn't get data");
});
$scope.vote = function() {
//Increment database through PHP somehow
$scope.choices[$scope.choice-1].votes += 1;
$http.post(base_url + "votes/" + $scope.pollId + "/" + $scope.choice)
.success(function(data){
console.log("Vote succeeded")
})
.error(function(){
console.log("Vote unsuccessful");
});
};
$scope.reset = function() {
for (var i = 0; i < $scope.choices.length; i++) {
$scope.choices[i].votes = 0;
}
$http.delete(base_url + "votes/" + $scope.pollId)
.success(function(data){
console.log("Reset succeeded")
})
.error(function(){
console.log("Reset unsuccessful");
});
}
}]);
pollsControllers.controller('AboutCtrl', ['$scope',
function ($scope) {
$scope.author = author;
}]);
}())<|fim▁end|> |
$http.get(base_url + "polls/" + $scope.pollId) |
<|file_name|>runner.js<|end_file_name|><|fim▁begin|>"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() {
this.constructor = d;
}
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var q = require('q');
var util = require('util');
var events_1 = require('events');
var helper = require('./util');
var logger2_1 = require('./logger2');
var driverProviders_1 = require('./driverProviders');
var plugins_1 = require('./plugins');
var protractor = require('./protractor'), webdriver = require('selenium-webdriver');
var logger = new logger2_1.Logger('runner');
/*
* Runner is responsible for starting the execution of a test run and triggering
* setup, teardown, managing config, etc through its various dependencies.
*
* The Protractor Runner is a node EventEmitter with the following events:
* - testPass
* - testFail
* - testsDone
*
* @param {Object} config
* @constructor
*/
var Runner = (function (_super) {
__extends(Runner, _super);
function Runner(config) {
_super.call(this);
/**
* Responsible for cleaning up test run and exiting the process.
* @private
* @param {int} Standard unix exit code
*/
this.exit_ = function (exitCode) {
return helper
.runFilenameOrFn_(this.config_.configDir, this.config_.onCleanUp, [exitCode])
.then(function (returned) {
if (typeof returned === 'number') {
return returned;
}
else {
return exitCode;
}
});
};
this.config_ = config;
if (config.v8Debug) {
// Call this private function instead of sending SIGUSR1 because Windows.
process['_debugProcess'](process.pid);
}
if (config.nodeDebug) {
process['_debugProcess'](process.pid);
var flow = webdriver.promise.controlFlow();
flow.execute(function () {
var nodedebug = require('child_process').fork('debug', ['localhost:5858']);
process.on('exit', function () {
nodedebug.kill('SIGTERM');
});
nodedebug.on('exit', function () {
process.exit(1);
});
}, 'start the node debugger');
flow.timeout(1000, 'waiting for debugger to attach');
}
if (config.capabilities && config.capabilities.seleniumAddress) {
config.seleniumAddress = config.capabilities.seleniumAddress;
}
this.loadDriverProvider_(config);
this.setTestPreparer(config.onPrepare);
}
/**
* Registrar for testPreparers - executed right before tests run.
* @public
* @param {string/Fn} filenameOrFn
*/
Runner.prototype.setTestPreparer = function (filenameOrFn) {
this.preparer_ = filenameOrFn;
};
/**
* Executor of testPreparer<|fim▁hole|> * @public
* @return {q.Promise} A promise that will resolve when the test preparers
* are finished.
*/
Runner.prototype.runTestPreparer = function () {
return helper.runFilenameOrFn_(this.config_.configDir, this.preparer_);
};
/**
* Grab driver provider based on type
* @private
*
* Priority
* 1) if directConnect is true, use that
* 2) if seleniumAddress is given, use that
* 3) if a Sauce Labs account is given, use that
* 4) if a seleniumServerJar is specified, use that
* 5) try to find the seleniumServerJar in protractor/selenium
*/
Runner.prototype.loadDriverProvider_ = function (config) {
this.config_ = config;
if (this.config_.directConnect) {
this.driverprovider_ = new driverProviders_1.Direct(this.config_);
}
else if (this.config_.seleniumAddress) {
if (this.config_.seleniumSessionId) {
this.driverprovider_ = new driverProviders_1.AttachSession(this.config_);
}
else {
this.driverprovider_ = new driverProviders_1.Hosted(this.config_);
}
}
else if (this.config_.browserstackUser && this.config_.browserstackKey) {
this.driverprovider_ = new driverProviders_1.BrowserStack(this.config_);
}
else if (this.config_.sauceUser && this.config_.sauceKey) {
this.driverprovider_ = new driverProviders_1.Sauce(this.config_);
}
else if (this.config_.seleniumServerJar) {
this.driverprovider_ = new driverProviders_1.Local(this.config_);
}
else if (this.config_.mockSelenium) {
this.driverprovider_ = new driverProviders_1.Mock(this.config_);
}
else {
this.driverprovider_ = new driverProviders_1.Local(this.config_);
}
};
/**
* Getter for the Runner config object
* @public
* @return {Object} config
*/
Runner.prototype.getConfig = function () {
return this.config_;
};
/**
* Get the control flow used by this runner.
* @return {Object} WebDriver control flow.
*/
Runner.prototype.controlFlow = function () {
return webdriver.promise.controlFlow();
};
/**
* Sets up convenience globals for test specs
* @private
*/
Runner.prototype.setupGlobals_ = function (browser_) {
// Keep $, $$, element, and by/By under the global protractor namespace
protractor.browser = browser_;
protractor.$ = browser_.$;
protractor.$$ = browser_.$$;
protractor.element = browser_.element;
protractor.by = protractor.By;
if (!this.config_.noGlobals) {
// Export protractor to the global namespace to be used in tests.
global.browser = browser_;
global.$ = browser_.$;
global.$$ = browser_.$$;
global.element = browser_.element;
global.by = global.By = protractor.By;
}
global.protractor = protractor;
if (!this.config_.skipSourceMapSupport) {
// Enable sourcemap support for stack traces.
require('source-map-support').install();
}
// Required by dart2js machinery.
// https://code.google.com/p/dart/source/browse/branches/bleeding_edge/dart/sdk/lib/js/dart2js/js_dart2js.dart?spec=svn32943&r=32943#487
global.DartObject = function (o) {
this.o = o;
};
};
/**
* Create a new driver from a driverProvider. Then set up a
* new protractor instance using this driver.
* This is used to set up the initial protractor instances and any
* future ones.
*
* @param {?Plugin} The plugin functions
*
* @return {Protractor} a protractor instance.
* @public
*/
Runner.prototype.createBrowser = function (plugins) {
var _this = this;
var config = this.config_;
var driver = this.driverprovider_.getNewDriver();
var browser_ = protractor.wrapDriver(driver, config.baseUrl, config.rootElement, config.untrackOutstandingTimeouts);
browser_.params = config.params;
if (plugins) {
browser_.plugins_ = plugins;
}
if (config.getPageTimeout) {
browser_.getPageTimeout = config.getPageTimeout;
}
if (config.allScriptsTimeout) {
browser_.allScriptsTimeout = config.allScriptsTimeout;
}
if (config.debuggerServerPort) {
browser_.debuggerServerPort_ = config.debuggerServerPort;
}
if (config.useAllAngular2AppRoots) {
browser_.useAllAngular2AppRoots();
}
browser_.ready =
driver.manage().timeouts().setScriptTimeout(config.allScriptsTimeout);
browser_.getProcessedConfig =
function () {
return webdriver.promise.fulfilled(config);
};
browser_.forkNewDriverInstance =
function (opt_useSameUrl, opt_copyMockModules) {
var newBrowser = _this.createBrowser(plugins);
if (opt_copyMockModules) {
newBrowser.mockModules_ = browser_.mockModules_;
}
if (opt_useSameUrl) {
browser_.driver.getCurrentUrl().then(function (url) {
newBrowser.get(url);
});
}
return newBrowser;
};
browser_.restart = function () {
// Note: because tests are not paused at this point, any async
// calls here are not guaranteed to complete before the tests resume.
_this.driverprovider_.quitDriver(browser_.driver);
// Copy mock modules, but do not navigate to previous URL.
browser_ = browser_.forkNewDriverInstance(false, true);
_this.setupGlobals_(browser_);
};
return browser_;
};
/**
* Final cleanup on exiting the runner.
*
* @return {q.Promise} A promise which resolves on finish.
* @private
*/
Runner.prototype.shutdown_ = function () {
return q.all(this.driverprovider_.getExistingDrivers().map(this.driverprovider_.quitDriver.bind(this.driverprovider_)));
};
/**
* The primary workhorse interface. Kicks off the test running process.
*
* @return {q.Promise} A promise which resolves to the exit code of the tests.
* @public
*/
Runner.prototype.run = function () {
var _this = this;
var testPassed;
var plugins = new plugins_1.Plugins(this.config_);
var pluginPostTestPromises;
var browser_;
var results;
if (this.config_.framework !== 'explorer' && !this.config_.specs.length) {
throw new Error('Spec patterns did not match any files.');
}
// 1) Setup environment
// noinspection JSValidateTypes
return this.driverprovider_.setupEnv()
.then(function () {
// 2) Create a browser and setup globals
browser_ = _this.createBrowser(plugins);
_this.setupGlobals_(browser_);
return browser_.ready.then(browser_.getSession)
.then(function (session) {
logger.debug('WebDriver session successfully started with capabilities ' +
util.inspect(session.getCapabilities()));
}, function (err) {
logger.error('Unable to start a WebDriver session.');
throw err;
});
// 3) Setup plugins
})
.then(function () {
return plugins.setup();
// 4) Execute test cases
})
.then(function () {
// Do the framework setup here so that jasmine and mocha globals are
// available to the onPrepare function.
var frameworkPath = '';
if (_this.config_.framework === 'jasmine' ||
_this.config_.framework === 'jasmine2') {
frameworkPath = './frameworks/jasmine.js';
}
else if (_this.config_.framework === 'mocha') {
frameworkPath = './frameworks/mocha.js';
}
else if (_this.config_.framework === 'debugprint') {
// Private framework. Do not use.
frameworkPath = './frameworks/debugprint.js';
}
else if (_this.config_.framework === 'explorer') {
// Private framework. Do not use.
frameworkPath = './frameworks/explorer.js';
}
else if (_this.config_.framework === 'custom') {
if (!_this.config_.frameworkPath) {
throw new Error('When config.framework is custom, ' +
'config.frameworkPath is required.');
}
frameworkPath = _this.config_.frameworkPath;
}
else {
throw new Error('config.framework (' + _this.config_.framework +
') is not a valid framework.');
}
if (_this.config_.restartBrowserBetweenTests) {
var restartDriver = function () {
browser_.restart();
};
_this.on('testPass', restartDriver);
_this.on('testFail', restartDriver);
}
// We need to save these promises to make sure they're run, but we
// don't
// want to delay starting the next test (because we can't, it's just
// an event emitter).
pluginPostTestPromises = [];
_this.on('testPass', function (testInfo) {
pluginPostTestPromises.push(plugins.postTest(true, testInfo));
});
_this.on('testFail', function (testInfo) {
pluginPostTestPromises.push(plugins.postTest(false, testInfo));
});
logger.debug('Running with spec files ' + _this.config_.specs);
return require(frameworkPath).run(_this, _this.config_.specs);
// 5) Wait for postTest plugins to finish
})
.then(function (testResults) {
results = testResults;
return q.all(pluginPostTestPromises);
// 6) Teardown plugins
})
.then(function () {
return plugins.teardown();
// 7) Teardown
})
.then(function () {
results = helper.joinTestLogs(results, plugins.getResults());
_this.emit('testsDone', results);
testPassed = results.failedCount === 0;
if (_this.driverprovider_.updateJob) {
return _this.driverprovider_.updateJob({'passed': testPassed})
.then(function () {
return _this.driverprovider_.teardownEnv();
});
}
else {
return _this.driverprovider_.teardownEnv();
}
// 8) Let plugins do final cleanup
})
.then(function () {
return plugins.postResults();
// 9) Exit process
})
.then(function () {
var exitCode = testPassed ? 0 : 1;
return _this.exit_(exitCode);
})
.fin(function () {
return _this.shutdown_();
});
};
return Runner;
}(events_1.EventEmitter));
exports.Runner = Runner;<|fim▁end|> | |
<|file_name|>admin_navigation.js<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2016 PencilBlue, LLC
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
//dependencies
var util = require('./util.js');
module.exports = function AdminNavigationModule(pb) {
//PB dependencies
var SecurityService = pb.SecurityService;
var GLOBAL_SITE = pb.SiteService.GLOBAL_SITE;
/**
* Provides function to construct the structure needed to display the navigation
* in the Admin section of the application.
*
* @module Services
* @submodule Admin
* @class AdminNavigation
* @constructor
*/
function AdminNavigation() {}
/**
*
* @private
* @static
* @property additions
* @type {Array}
*/
AdminNavigation.additions = {};
/**
*
* @private
* @static
* @property childrenAdditions
* @type {Object}
*/
AdminNavigation.childrenAdditions = {};
/**
*
* @private
* @static
* @readonly
* @property MULTISITE_NAV
* @return {Array}
*/
var MULTISITE_NAV = Object.freeze({
id: 'site_entity',
title: 'admin.MANAGE_SITES',
icon: 'sitemap',
href: '/admin/sites',
access: SecurityService.ACCESS_ADMINISTRATOR
}
);
/**
*
* @private
* @static
* @readonly
* @property CONTENT_NAV
* @return {Array}
*/
var CONTENT_NAV = Object.freeze({
id: 'content',
title: 'generic.CONTENT',
icon: 'quote-right',
href: '#',
access: SecurityService.ACCESS_WRITER,
children: [
{
id: 'navigation',
title: 'generic.NAVIGATION',
icon: 'th-large',
href: '/admin/content/navigation',
access: SecurityService.ACCESS_EDITOR
},
{
id: 'topics',
title: 'admin.TOPICS',
icon: 'tags',
href: '/admin/content/topics',
access: SecurityService.ACCESS_EDITOR
},
{
id: 'pages',
title: 'admin.PAGES',
icon: 'file-o',
href: '/admin/content/pages',
access: SecurityService.ACCESS_EDITOR
},
{
id: 'articles',
title: 'admin.ARTICLES',
icon: 'files-o',
href: '/admin/content/articles',
access: SecurityService.ACCESS_WRITER
},
{
id: 'media',
title: 'admin.MEDIA',
icon: 'camera',
href: '/admin/content/media',
access: SecurityService.ACCESS_WRITER
},
{
id: 'comments',
title: 'generic.COMMENTS',
icon: 'comments',
href: '/admin/content/comments',
access: SecurityService.ACCESS_EDITOR
},
{
id: 'custom_objects',
title: 'admin.CUSTOM_OBJECTS',
icon: 'cubes',
href: '/admin/content/objects/types',
access: SecurityService.ACCESS_EDITOR
}
]
});
var PLUGINS_NAV = Object.freeze({
id: 'plugins',
title: 'admin.PLUGINS',
icon: 'puzzle-piece',
href: '#',
access: SecurityService.ACCESS_ADMINISTRATOR,
children: [
{
divider: true,
id: 'manage',
title: 'generic.MANAGE',
icon: 'upload',
href: '/admin/plugins'
},
{
id: 'themes',
title: 'admin.THEMES',
icon: 'magic',
href: '/admin/themes'
}
]
});
var USERS_NAV = Object.freeze({
id: 'users',
title: 'admin.USERS',
icon: 'users',
href: '#',
access: SecurityService.ACCESS_EDITOR,
children: [
{
id: 'manage',
title: 'generic.MANAGE',
icon: 'users',
href: '/admin/users',
access: SecurityService.ACCESS_EDITOR
},
{
id: 'permissions',
title: 'generic.PERMISSIONS',
icon: 'lock',
href: '/admin/users/permissions',
access: SecurityService.ACCESS_ADMINISTRATOR
}
]
});
var VIEW_SITE_NAV = Object.freeze({
id: 'view_site',
title: 'admin.VIEW_SITE',
icon: 'desktop',
href: '/',
access: SecurityService.ACCESS_WRITER
});
var LOGOUT_NAV = Object.freeze({
id: 'logout',
title: 'generic.LOGOUT',
icon: 'power-off',
href: '/actions/logout',
access: SecurityService.ACCESS_WRITER
});
function buildSettingsNavigation(site) {
var settingsNav = {
id: 'settings',
title: 'admin.SETTINGS',
icon: 'cogs',
href: '#',
access: SecurityService.ACCESS_ADMINISTRATOR,
children: [
{
id: 'site_settings',
title: 'admin.SITE_SETTINGS',
icon: 'cog',
href: '/admin/site_settings',
access: SecurityService.ACCESS_ADMINISTRATOR
},
{
id: 'content_settings',
title: 'admin.CONTENT',
icon: 'quote-right',
href: '/admin/site_settings/content',
access: SecurityService.ACCESS_ADMINISTRATOR
},
{
id: 'email_settings',
title: 'users.EMAIL',
icon: 'envelope',
href: '/admin/site_settings/email',
access: SecurityService.ACCESS_ADMINISTRATOR
}
]
};
if (pb.SiteService.isGlobal(site)) {
settingsNav.children.push({
id: 'library_settings',
title: 'site_settings.LIBRARIES',
icon: 'book',
href: '/admin/site_settings/libraries',
access: SecurityService.ACCESS_ADMINISTRATOR
});
}
return Object.freeze(settingsNav);
}
function getDefaultNavigation(site) {
return util.clone([CONTENT_NAV, PLUGINS_NAV, USERS_NAV, buildSettingsNavigation(site), VIEW_SITE_NAV, LOGOUT_NAV]);
}
function getMultiSiteNavigation() {
return util.clone([MULTISITE_NAV]);
}
function getGlobalScopeNavigation(site) {
return util.clone([PLUGINS_NAV, USERS_NAV, buildSettingsNavigation(site), LOGOUT_NAV]);
}
/**
*
* @private
* @static
* @method getAdditions
* @return {Array}
*/
function getAdditions(site) {
return getAdditionsInScope(AdminNavigation.additions, site);
}
/**
*
* @private
* @static
* @method getChildrenAdditions
* @return {Object}
*/
function getChildrenAdditions(site) {
return getAdditionsInScope(AdminNavigation.childrenAdditions, site);
}
/**
* @private
* @method getAdditionsInScope
* @param {Object} additions
* @param {String} site
*/
function getAdditionsInScope(additions, site) {
if (additions[site]) {
return util.clone(additions[site]);
}
else if (additions[pb.SiteService.GLOBAL_SITE]) {
return util.clone(additions[pb.SiteService.GLOBAL_SITE]);
}
return util.clone(additions);
}
/**
*
* @private
* @static
* @method buildNavigation
* @return {Array}
*/
function buildNavigation(site) {
var i;
var navigation = [];
var additions = getAdditions(site);
var childrenAdditions = getChildrenAdditions(site);
if (pb.config.multisite.enabled) {
var multiSiteAdditions = getMultiSiteNavigation();
util.arrayPushAll(multiSiteAdditions, navigation);
}
if (pb.config.multisite.enabled && pb.SiteService.isGlobal(site)) {
// Don't include content or view site in the nav for multitenancy global scope.
util.arrayPushAll(getGlobalScopeNavigation(site), navigation);
}
else {
var defaultNavigation = getDefaultNavigation(site);
util.arrayPushAll(defaultNavigation, navigation);
}
util.arrayPushAll(additions, navigation);
//retrieve the nav items to iterate over
var ids = Object.keys(childrenAdditions);
if (ids.length === 0) {
return navigation;
}
//convert to hash to create quick lookup
var lookup = util.arrayToHash(navigation, function(navigation, i) {
return navigation[i].id;
});
//add additions
Object.keys(childrenAdditions).forEach(function(id) {
var children = childrenAdditions[id];
//find the nav that the children should be added to
var nav = lookup[id];
if (!nav) {
return;
}
if (!util.isArray(nav.children)) {
nav.children = [];
}
util.arrayPushAll(children, nav.children);
});
return navigation;
}
/**<|fim▁hole|> * @method localizeNavigation
* @param {Array} navigation
* @param {Localization} ls
* @return {Array}
*/
function localizeNavigation(navigation, ls) {
navigation.forEach(function(nav) {
nav.title = ls.g(nav.title);
if(util.isArray(nav.children)) {
nav.children = localizeNavigation(nav.children, ls);
}
});
return navigation;
}
/**
* @private
* @static
* @method isDuplicate
* @param {String} id
* @param {Array} navigation
* @return {boolean}
*/
function isDuplicate(id, navigation, site) {
if (!navigation) {
navigation = buildNavigation(site);
}
for (var i = 0; i < navigation.length; i++) {
var node = navigation[i];
if (node.id === id) {
return true;
}
if (node.children && isDuplicate(id, node.children, site)) {
return true;
}
}
return false;
}
function exists(id, site) {
var isGlobal = pb.SiteService.isGlobal(site);
var nav = buildNavigation(site);
return isDuplicate(id, nav) ||
(!isGlobal && isDuplicate(id, buildNavigation(pb.SiteService.GLOBAL_SITE)));
}
/**
* @private
* @static
* @method isDefaultNode
* @param {String} id
* @return {Boolean}
*/
function isDefaultNode(id, site) {
return isDuplicate(id, getDefaultNavigation(site));
}
/**
* Retrive the admin navigation hierarchy
* @static
* @method get
* @param {object} session
* @param {array} activeMenuItems Array of nav item names that are active
* @param {Object} ls Localization service
* @return {object} Admin navigation
*/
AdminNavigation.get = function (session, activeMenuItems, ls, site) {
var navigation = AdminNavigation.removeUnauthorized(
session,
buildNavigation(site),
activeMenuItems
);
return localizeNavigation(navigation, ls);
};
AdminNavigation.addChild = function(parentId, node) {
AdminNavigation.addChildToSite(parentId, node, pb.SiteService.GLOBAL_SITE);
}
/**
* Adds a new child node to an existing top level node
* @static
* @method addChildToSite
* @param {String} parentId
* @param {Object} node
* @param {String} site - site unique id
* @return {Boolean}
*/
AdminNavigation.addChildToSite = function (parentId, node, site) {
if (util.isNullOrUndefined(site)) {
site = GLOBAL_SITE;
}
if (exists(node.id, site)) {
return false;
}
var additionsMap;
if (!(site in AdminNavigation.childrenAdditions)) {
additionsMap = AdminNavigation.childrenAdditions[site] = {};
} else {
additionsMap = AdminNavigation.childrenAdditions[site];
}
if (!additionsMap[parentId]) {
additionsMap[parentId] = [];
}
additionsMap[parentId].push(node);
return true;
};
/**
* Adds a new top level node
* @static
* @method add
* @param {Object} node
* @param {String} [site='global']
* @return {Boolean}
*/
AdminNavigation.add = function(node, site) {
if (util.isNullOrUndefined(site)) {
site = GLOBAL_SITE;
}
if (exists(node.id, site)) {
return false;
}
if (!(site in AdminNavigation.additions)) {
AdminNavigation.additions[site] = [];
}
AdminNavigation.additions[site].push(node);
return true;
};
/**
* Adds a new top level node
* @static
* @method addToSite
* @param {Object} node
* @param {String} site
* @return {Boolean}
*/
AdminNavigation.addToSite = function (node, site) {
return AdminNavigation.add(node, site);
};
/**
* Remove a navigation node
* @static
* @method remove
* @param id
* @param {String} [site='global']
* @return {boolean}
*/
AdminNavigation.remove = function(id, site) {
if (util.isNullOrUndefined(site)) {
site = GLOBAL_SITE;
}
if (!isDuplicate(id, buildNavigation(site))) {
return false;
}
if (isDefaultNode(id)) {
pb.log.warn("Admin Navigation: Attempting to remove default Node %s", id);
return false;
}
function removeNode(id, navigation) {
for (var i = 0; i < navigation.length; i++) {
if (navigation[i].id === id) {
navigation.splice(i, 1);
return navigation;
}
if (navigation[i].children) {
navigation[i].children = removeNode(id, navigation[i].children);
}
}
return navigation;
}
AdminNavigation.additions[site] = removeNode(id, AdminNavigation.additions[site]);
var childAdditionsMap = AdminNavigation.childrenAdditions[site];
util.forEach(childAdditionsMap, function(value, key) {
if(key === id){
delete childAdditionsMap[key];
}else {
childAdditionsMap[key] = removeNode(id, value);
}
});
return true;
};
/**
* Remove a navigation node
* @static
* @method removeFromSite
* @param id
* @param {String} site
* @return {boolean}
*/
AdminNavigation.removeFromSite = function (id, site) {
return AdminNavigation.remove(id, site);
};
/**
* @static
* @method removeUnauthorized
* @param {Object} session
* @param {Array} adminNavigation
* @param {Array} activeItems
* @return {Array}
*/
AdminNavigation.removeUnauthorized = function (session, adminNavigation, activeItems) {
for (var i = 0; i < adminNavigation.length; i++) {
if (typeof adminNavigation[i].access !== 'undefined') {
if (!pb.security.isAuthorized(session, {admin_level: adminNavigation[i].access})) {
adminNavigation.splice(i, 1);
i--;
continue;
}
}
for (var o = 0; o < activeItems.length; o++) {
if (activeItems[o] === adminNavigation[i].id) {
adminNavigation[i].active = 'active';
break;
}
}
if (typeof adminNavigation[i].children !== 'undefined') {
if (adminNavigation[i].children.length > 0) {
adminNavigation[i].dropdown = 'dropdown';
for (var j = 0; j < adminNavigation[i].children.length; j++) {
if (typeof adminNavigation[i].children[j].access !== 'undefined') {
if (!pb.security.isAuthorized(session, {admin_level: adminNavigation[i].children[j].access})) {
adminNavigation[i].children.splice(j, 1);
j--;
continue;
}
}
for (var p = 0; p < activeItems.length; p++) {
if (activeItems[p] == adminNavigation[i].children[j].id) {
adminNavigation[i].children[j].active = 'active';
break;
}
}
}
}
}
}
return adminNavigation;
};
//exports
return AdminNavigation;
};<|fim▁end|> | * @private
* @static |
<|file_name|>app.po.ts<|end_file_name|><|fim▁begin|>import { browser, by, element } from 'protractor';
export class GphotoPage {
navigateTo() {
return browser.get('/');
}
getParagraphText() {
return element(by.css('app-root h1')).getText();
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>language.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <[email protected]>
# Copyright 2003-2006 Dirk Meyer <[email protected]>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.<|fim▁hole|># GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
import re
from six import string_types
__all__ = ['resolve']
def resolve(code):
"""
Transform the given (2- or 3-letter) language code to a human readable
language name. The return value is a 2-tuple containing the given
language code and the language name. If the language code cannot be
resolved, name will be 'Unknown (<code>)'.
"""
if not code:
return None, None
if not isinstance(code, string_types):
raise ValueError('Invalid language code specified by parser')
# Take up to 3 letters from the code.
code = re.split(r'[^a-z]', code.lower())[0][:3]
for spec in codes:
if code in spec[:-1]:
return code, spec[-1]
return code, u'Unknown (%r)' % code
# Parsed from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
codes = (
('aar', 'aa', u'Afar'),
('abk', 'ab', u'Abkhazian'),
('ace', u'Achinese'),
('ach', u'Acoli'),
('ada', u'Adangme'),
('ady', u'Adyghe'),
('afa', u'Afro-Asiatic '),
('afh', u'Afrihili'),
('afr', 'af', u'Afrikaans'),
('ain', u'Ainu'),
('aka', 'ak', u'Akan'),
('akk', u'Akkadian'),
('alb', 'sq', u'Albanian'),
('ale', u'Aleut'),
('alg', u'Algonquian languages'),
('alt', u'Southern Altai'),
('amh', 'am', u'Amharic'),
('ang', u'English, Old '),
('anp', u'Angika'),
('apa', u'Apache languages'),
('ara', 'ar', u'Arabic'),
('arc', u'Official Aramaic '),
('arg', 'an', u'Aragonese'),
('arm', 'hy', u'Armenian'),
('arn', u'Mapudungun'),
('arp', u'Arapaho'),
('art', u'Artificial '),
('arw', u'Arawak'),
('asm', 'as', u'Assamese'),
('ast', u'Asturian'),
('ath', u'Athapascan languages'),
('aus', u'Australian languages'),
('ava', 'av', u'Avaric'),
('ave', 'ae', u'Avestan'),
('awa', u'Awadhi'),
('aym', 'ay', u'Aymara'),
('aze', 'az', u'Azerbaijani'),
('bad', u'Banda languages'),
('bai', u'Bamileke languages'),
('bak', 'ba', u'Bashkir'),
('bal', u'Baluchi'),
('bam', 'bm', u'Bambara'),
('ban', u'Balinese'),
('baq', 'eu', u'Basque'),
('bas', u'Basa'),
('bat', u'Baltic '),
('bej', u'Beja'),
('bel', 'be', u'Belarusian'),
('bem', u'Bemba'),
('ben', 'bn', u'Bengali'),
('ber', u'Berber '),
('bho', u'Bhojpuri'),
('bih', 'bh', u'Bihari'),
('bik', u'Bikol'),
('bin', u'Bini'),
('bis', 'bi', u'Bislama'),
('bla', u'Siksika'),
('bnt', u'Bantu '),
('bos', 'bs', u'Bosnian'),
('bra', u'Braj'),
('bre', 'br', u'Breton'),
('btk', u'Batak languages'),
('bua', u'Buriat'),
('bug', u'Buginese'),
('bul', 'bg', u'Bulgarian'),
('bur', 'my', u'Burmese'),
('byn', u'Blin'),
('cad', u'Caddo'),
('cai', u'Central American Indian '),
('car', u'Galibi Carib'),
('cat', 'ca', u'Catalan'),
('cau', u'Caucasian '),
('ceb', u'Cebuano'),
('cel', u'Celtic '),
('cha', 'ch', u'Chamorro'),
('chb', u'Chibcha'),
('che', 'ce', u'Chechen'),
('chg', u'Chagatai'),
('chi', 'zh', u'Chinese'),
('chk', u'Chuukese'),
('chm', u'Mari'),
('chn', u'Chinook jargon'),
('cho', u'Choctaw'),
('chp', u'Chipewyan'),
('chr', u'Cherokee'),
('chu', 'cu', u'Church Slavic'),
('chv', 'cv', u'Chuvash'),
('chy', u'Cheyenne'),
('cmc', u'Chamic languages'),
('cop', u'Coptic'),
('cor', 'kw', u'Cornish'),
('cos', 'co', u'Corsican'),
('cpe', u'Creoles and pidgins, English based '),
('cpf', u'Creoles and pidgins, French-based '),
('cpp', u'Creoles and pidgins, Portuguese-based '),
('cre', 'cr', u'Cree'),
('crh', u'Crimean Tatar'),
('crp', u'Creoles and pidgins '),
('csb', u'Kashubian'),
('cus', u'Cushitic '),
('cze', 'cs', u'Czech'),
('dak', u'Dakota'),
('dan', 'da', u'Danish'),
('dar', u'Dargwa'),
('day', u'Land Dayak languages'),
('del', u'Delaware'),
('den', u'Slave '),
('dgr', u'Dogrib'),
('din', u'Dinka'),
('div', 'dv', u'Divehi'),
('doi', u'Dogri'),
('dra', u'Dravidian '),
('dsb', u'Lower Sorbian'),
('dua', u'Duala'),
('dum', u'Dutch, Middle '),
('dut', 'nl', u'Dutch'),
('dyu', u'Dyula'),
('dzo', 'dz', u'Dzongkha'),
('efi', u'Efik'),
('egy', u'Egyptian '),
('eka', u'Ekajuk'),
('elx', u'Elamite'),
('eng', 'en', u'English'),
('enm', u'English, Middle '),
('epo', 'eo', u'Esperanto'),
('est', 'et', u'Estonian'),
('ewe', 'ee', u'Ewe'),
('ewo', u'Ewondo'),
('fan', u'Fang'),
('fao', 'fo', u'Faroese'),
('fat', u'Fanti'),
('fij', 'fj', u'Fijian'),
('fil', u'Filipino'),
('fin', 'fi', u'Finnish'),
('fiu', u'Finno-Ugrian '),
('fon', u'Fon'),
('fre', 'fr', u'French'),
('frm', u'French, Middle '),
('fro', u'French, Old '),
('frr', u'Northern Frisian'),
('frs', u'Eastern Frisian'),
('fry', 'fy', u'Western Frisian'),
('ful', 'ff', u'Fulah'),
('fur', u'Friulian'),
('gaa', u'Ga'),
('gay', u'Gayo'),
('gba', u'Gbaya'),
('gem', u'Germanic '),
('geo', 'ka', u'Georgian'),
('ger', 'de', u'German'),
('gez', u'Geez'),
('gil', u'Gilbertese'),
('gla', 'gd', u'Gaelic'),
('gle', 'ga', u'Irish'),
('glg', 'gl', u'Galician'),
('glv', 'gv', u'Manx'),
('gmh', u'German, Middle High '),
('goh', u'German, Old High '),
('gon', u'Gondi'),
('gor', u'Gorontalo'),
('got', u'Gothic'),
('grb', u'Grebo'),
('grc', u'Greek, Ancient '),
('gre', 'el', u'Greek, Modern '),
('grn', 'gn', u'Guarani'),
('gsw', u'Swiss German'),
('guj', 'gu', u'Gujarati'),
('gwi', u"Gwich'in"),
('hai', u'Haida'),
('hat', 'ht', u'Haitian'),
('hau', 'ha', u'Hausa'),
('haw', u'Hawaiian'),
('heb', 'he', u'Hebrew'),
('her', 'hz', u'Herero'),
('hil', u'Hiligaynon'),
('him', u'Himachali'),
('hin', 'hi', u'Hindi'),
('hit', u'Hittite'),
('hmn', u'Hmong'),
('hmo', 'ho', u'Hiri Motu'),
('hsb', u'Upper Sorbian'),
('hun', 'hu', u'Hungarian'),
('hup', u'Hupa'),
('iba', u'Iban'),
('ibo', 'ig', u'Igbo'),
('ice', 'is', u'Icelandic'),
('ido', 'io', u'Ido'),
('iii', 'ii', u'Sichuan Yi'),
('ijo', u'Ijo languages'),
('iku', 'iu', u'Inuktitut'),
('ile', 'ie', u'Interlingue'),
('ilo', u'Iloko'),
('ina', 'ia', u'Interlingua '),
('inc', u'Indic '),
('ind', 'id', u'Indonesian'),
('ine', u'Indo-European '),
('inh', u'Ingush'),
('ipk', 'ik', u'Inupiaq'),
('ira', u'Iranian '),
('iro', u'Iroquoian languages'),
('ita', 'it', u'Italian'),
('jav', 'jv', u'Javanese'),
('jbo', u'Lojban'),
('jpn', 'ja', u'Japanese'),
('jpr', u'Judeo-Persian'),
('jrb', u'Judeo-Arabic'),
('kaa', u'Kara-Kalpak'),
('kab', u'Kabyle'),
('kac', u'Kachin'),
('kal', 'kl', u'Kalaallisut'),
('kam', u'Kamba'),
('kan', 'kn', u'Kannada'),
('kar', u'Karen languages'),
('kas', 'ks', u'Kashmiri'),
('kau', 'kr', u'Kanuri'),
('kaw', u'Kawi'),
('kaz', 'kk', u'Kazakh'),
('kbd', u'Kabardian'),
('kha', u'Khasi'),
('khi', u'Khoisan '),
('khm', 'km', u'Central Khmer'),
('kho', u'Khotanese'),
('kik', 'ki', u'Kikuyu'),
('kin', 'rw', u'Kinyarwanda'),
('kir', 'ky', u'Kirghiz'),
('kmb', u'Kimbundu'),
('kok', u'Konkani'),
('kom', 'kv', u'Komi'),
('kon', 'kg', u'Kongo'),
('kor', 'ko', u'Korean'),
('kos', u'Kosraean'),
('kpe', u'Kpelle'),
('krc', u'Karachay-Balkar'),
('krl', u'Karelian'),
('kro', u'Kru languages'),
('kru', u'Kurukh'),
('kua', 'kj', u'Kuanyama'),
('kum', u'Kumyk'),
('kur', 'ku', u'Kurdish'),
('kut', u'Kutenai'),
('lad', u'Ladino'),
('lah', u'Lahnda'),
('lam', u'Lamba'),
('lao', 'lo', u'Lao'),
('lat', 'la', u'Latin'),
('lav', 'lv', u'Latvian'),
('lez', u'Lezghian'),
('lim', 'li', u'Limburgan'),
('lin', 'ln', u'Lingala'),
('lit', 'lt', u'Lithuanian'),
('lol', u'Mongo'),
('loz', u'Lozi'),
('ltz', 'lb', u'Luxembourgish'),
('lua', u'Luba-Lulua'),
('lub', 'lu', u'Luba-Katanga'),
('lug', 'lg', u'Ganda'),
('lui', u'Luiseno'),
('lun', u'Lunda'),
('luo', u'Luo '),
('lus', u'Lushai'),
('mac', 'mk', u'Macedonian'),
('mad', u'Madurese'),
('mag', u'Magahi'),
('mah', 'mh', u'Marshallese'),
('mai', u'Maithili'),
('mak', u'Makasar'),
('mal', 'ml', u'Malayalam'),
('man', u'Mandingo'),
('mao', 'mi', u'Maori'),
('map', u'Austronesian '),
('mar', 'mr', u'Marathi'),
('mas', u'Masai'),
('may', 'ms', u'Malay'),
('mdf', u'Moksha'),
('mdr', u'Mandar'),
('men', u'Mende'),
('mga', u'Irish, Middle '),
('mic', u"Mi'kmaq"),
('min', u'Minangkabau'),
('mis', u'Uncoded languages'),
('mkh', u'Mon-Khmer '),
('mlg', 'mg', u'Malagasy'),
('mlt', 'mt', u'Maltese'),
('mnc', u'Manchu'),
('mni', u'Manipuri'),
('mno', u'Manobo languages'),
('moh', u'Mohawk'),
('mol', 'mo', u'Moldavian'),
('mon', 'mn', u'Mongolian'),
('mos', u'Mossi'),
('mul', u'Multiple languages'),
('mun', u'Munda languages'),
('mus', u'Creek'),
('mwl', u'Mirandese'),
('mwr', u'Marwari'),
('myn', u'Mayan languages'),
('myv', u'Erzya'),
('nah', u'Nahuatl languages'),
('nai', u'North American Indian'),
('nap', u'Neapolitan'),
('nau', 'na', u'Nauru'),
('nav', 'nv', u'Navajo'),
('nbl', 'nr', u'Ndebele, South'),
('nde', 'nd', u'Ndebele, North'),
('ndo', 'ng', u'Ndonga'),
('nds', u'Low German'),
('nep', 'ne', u'Nepali'),
('new', u'Nepal Bhasa'),
('nia', u'Nias'),
('nic', u'Niger-Kordofanian '),
('niu', u'Niuean'),
('nno', 'nn', u'Norwegian Nynorsk'),
('nob', 'nb', u'Bokm\xe5l, Norwegian'),
('nog', u'Nogai'),
('non', u'Norse, Old'),
('nor', 'no', u'Norwegian'),
('nqo', u"N'Ko"),
('nso', u'Pedi'),
('nub', u'Nubian languages'),
('nwc', u'Classical Newari'),
('nya', 'ny', u'Chichewa'),
('nym', u'Nyamwezi'),
('nyn', u'Nyankole'),
('nyo', u'Nyoro'),
('nzi', u'Nzima'),
('oci', 'oc', u'Occitan '),
('oji', 'oj', u'Ojibwa'),
('ori', 'or', u'Oriya'),
('orm', 'om', u'Oromo'),
('osa', u'Osage'),
('oss', 'os', u'Ossetian'),
('ota', u'Turkish, Ottoman '),
('oto', u'Otomian languages'),
('paa', u'Papuan '),
('pag', u'Pangasinan'),
('pal', u'Pahlavi'),
('pam', u'Pampanga'),
('pan', 'pa', u'Panjabi'),
('pap', u'Papiamento'),
('pau', u'Palauan'),
('peo', u'Persian, Old '),
('per', 'fa', u'Persian'),
('phi', u'Philippine '),
('phn', u'Phoenician'),
('pli', 'pi', u'Pali'),
('pol', 'pl', u'Polish'),
('pon', u'Pohnpeian'),
('por', 'pt', u'Portuguese'),
('pra', u'Prakrit languages'),
('pro', u'Proven\xe7al, Old '),
('pus', 'ps', u'Pushto'),
('qaa-qtz', u'Reserved for local use'),
('que', 'qu', u'Quechua'),
('raj', u'Rajasthani'),
('rap', u'Rapanui'),
('rar', u'Rarotongan'),
('roa', u'Romance '),
('roh', 'rm', u'Romansh'),
('rom', u'Romany'),
('rum', 'ro', u'Romanian'),
('run', 'rn', u'Rundi'),
('rup', u'Aromanian'),
('rus', 'ru', u'Russian'),
('sad', u'Sandawe'),
('sag', 'sg', u'Sango'),
('sah', u'Yakut'),
('sai', u'South American Indian '),
('sal', u'Salishan languages'),
('sam', u'Samaritan Aramaic'),
('san', 'sa', u'Sanskrit'),
('sas', u'Sasak'),
('sat', u'Santali'),
('scc', 'sr', u'Serbian'),
('scn', u'Sicilian'),
('sco', u'Scots'),
('scr', 'hr', u'Croatian'),
('sel', u'Selkup'),
('sem', u'Semitic '),
('sga', u'Irish, Old '),
('sgn', u'Sign Languages'),
('shn', u'Shan'),
('sid', u'Sidamo'),
('sin', 'si', u'Sinhala'),
('sio', u'Siouan languages'),
('sit', u'Sino-Tibetan '),
('sla', u'Slavic '),
('slo', 'sk', u'Slovak'),
('slv', 'sl', u'Slovenian'),
('sma', u'Southern Sami'),
('sme', 'se', u'Northern Sami'),
('smi', u'Sami languages '),
('smj', u'Lule Sami'),
('smn', u'Inari Sami'),
('smo', 'sm', u'Samoan'),
('sms', u'Skolt Sami'),
('sna', 'sn', u'Shona'),
('snd', 'sd', u'Sindhi'),
('snk', u'Soninke'),
('sog', u'Sogdian'),
('som', 'so', u'Somali'),
('son', u'Songhai languages'),
('sot', 'st', u'Sotho, Southern'),
('spa', 'es', u'Spanish'),
('srd', 'sc', u'Sardinian'),
('srn', u'Sranan Tongo'),
('srr', u'Serer'),
('ssa', u'Nilo-Saharan '),
('ssw', 'ss', u'Swati'),
('suk', u'Sukuma'),
('sun', 'su', u'Sundanese'),
('sus', u'Susu'),
('sux', u'Sumerian'),
('swa', 'sw', u'Swahili'),
('swe', 'sv', u'Swedish'),
('syc', u'Classical Syriac'),
('syr', u'Syriac'),
('tah', 'ty', u'Tahitian'),
('tai', u'Tai '),
('tam', 'ta', u'Tamil'),
('tat', 'tt', u'Tatar'),
('tel', 'te', u'Telugu'),
('tem', u'Timne'),
('ter', u'Tereno'),
('tet', u'Tetum'),
('tgk', 'tg', u'Tajik'),
('tgl', 'tl', u'Tagalog'),
('tha', 'th', u'Thai'),
('tib', 'bo', u'Tibetan'),
('tig', u'Tigre'),
('tir', 'ti', u'Tigrinya'),
('tiv', u'Tiv'),
('tkl', u'Tokelau'),
('tlh', u'Klingon'),
('tli', u'Tlingit'),
('tmh', u'Tamashek'),
('tog', u'Tonga '),
('ton', 'to', u'Tonga '),
('tpi', u'Tok Pisin'),
('tsi', u'Tsimshian'),
('tsn', 'tn', u'Tswana'),
('tso', 'ts', u'Tsonga'),
('tuk', 'tk', u'Turkmen'),
('tum', u'Tumbuka'),
('tup', u'Tupi languages'),
('tur', 'tr', u'Turkish'),
('tut', u'Altaic '),
('tvl', u'Tuvalu'),
('twi', 'tw', u'Twi'),
('tyv', u'Tuvinian'),
('udm', u'Udmurt'),
('uga', u'Ugaritic'),
('uig', 'ug', u'Uighur'),
('ukr', 'uk', u'Ukrainian'),
('umb', u'Umbundu'),
('und', u'Undetermined'),
('urd', 'ur', u'Urdu'),
('uzb', 'uz', u'Uzbek'),
('vai', u'Vai'),
('ven', 've', u'Venda'),
('vie', 'vi', u'Vietnamese'),
('vol', 'vo', u'Volap\xfck'),
('vot', u'Votic'),
('wak', u'Wakashan languages'),
('wal', u'Walamo'),
('war', u'Waray'),
('was', u'Washo'),
('wel', 'cy', u'Welsh'),
('wen', u'Sorbian languages'),
('wln', 'wa', u'Walloon'),
('wol', 'wo', u'Wolof'),
('xal', u'Kalmyk'),
('xho', 'xh', u'Xhosa'),
('yao', u'Yao'),
('yap', u'Yapese'),
('yid', 'yi', u'Yiddish'),
('yor', 'yo', u'Yoruba'),
('ypk', u'Yupik languages'),
('zap', u'Zapotec'),
('zbl', u'Blissymbols'),
('zen', u'Zenaga'),
('zha', 'za', u'Zhuang'),
('znd', u'Zande languages'),
('zul', 'zu', u'Zulu'),
('zun', u'Zuni'),
('zxx', u'No linguistic content'),
('zza', u'Zaza'),
)<|fim▁end|> | #
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
<|file_name|>backdrop.ts<|end_file_name|><|fim▁begin|>import {Animate} from "../../core/util/animate";
import {ElementRef, ViewEncapsulation, Component, Input, Output, EventEmitter} from "angular2/core";
import {DOM} from "angular2/src/platform/dom/dom_adapter";
/**
* An overlay for content on the page.
* Can optionally dismiss when clicked on.
* Has outputs for show/showing and hide/hiding.
*/
@Component({
selector: 'md-backdrop',
template: '',
encapsulation: ViewEncapsulation.None,
host: {
'class': 'md-backdrop',
'(click)': 'onClick()',
},
})
export class MdBackdrop {
/**
* When true, clicking on the backdrop will close it
*/
@Input()
clickClose: boolean = false;
/**
* When true, disable the parent container scroll while the backdrop is active.
*/
@Input()
hideScroll: boolean = true;
/**
* Emits when the backdrop begins to hide.
*/
@Output()
onHiding: EventEmitter<MdBackdrop> = new EventEmitter<MdBackdrop>(false);
/**
* Emits when the backdrop has finished being hidden.
*/
@Output()
onHidden: EventEmitter<MdBackdrop> = new EventEmitter<MdBackdrop>(false);
/**
* Emits when the backdrop begins to be shown.
*/
@Output()
onShowing: EventEmitter<MdBackdrop> = new EventEmitter<MdBackdrop>();
/**
* Emits when the backdrop has finished being shown.
*/
@Output()
onShown: EventEmitter<MdBackdrop> = new EventEmitter<MdBackdrop>();
constructor(public element: ElementRef) {
}
/**
* The CSS class name to transition on/off when the backdrop is hidden/shown.
*/
@Input()
public transitionClass: string = 'md-active';
/**
* Whether to add the {@see transitionClass} or remove it when the backdrop is shown. The
* opposite will happen when the backdrop is hidden.
*/
@Input()
public transitionAddClass = true;
/**
* Whether the backdrop is visible.
*/
get visible(): boolean {
return this._visible;
}
@Input()
set visible(value: boolean) {
this.toggle(value);
}
private _visible: boolean = false;
private _transitioning: boolean = false;
private _previousOverflow: string = null;
private _body: HTMLBodyElement = DOM.query('body');
onClick() {
if (this.clickClose && !this._transitioning && this.visible) {
this.hide();
}
}
/**
* Hide the backdrop and return a promise that is resolved when the hide animations are
* complete.
*/
hide(): Promise<any> {
return this.toggle(false);
}
/**<|fim▁hole|> * Show the backdrop and return a promise that is resolved when the show animations are
* complete.
*/
show(): Promise<any> {
return this.toggle(true);
}
/**
* Toggle the visibility of the backdrop.
* @param visible whether or not the backdrop should be visible
* @returns {any}
*/
toggle(visible: boolean = !this.visible): any {
if (visible === this._visible) {
return Promise.resolve();
}
let beginEvent = visible ? this.onShowing : this.onHiding;
let endEvent = visible ? this.onShown : this.onHidden;
this._visible = visible;
this._transitioning = true;
beginEvent.emit(this);
let action = visible ?
(this.transitionAddClass ? Animate.enter : Animate.leave) :
(this.transitionAddClass ? Animate.leave : Animate.enter);
// Page scroll
if (visible && this.hideScroll && this.element && !this._previousOverflow) {
let style = DOM.getStyle(this._body, 'overflow');
if (style !== 'hidden') {
this._previousOverflow = style;
DOM.setStyle(this._body, 'overflow', 'hidden');
}
}
else if (!visible && this.hideScroll && this.element && this._previousOverflow !== null) {
DOM.setStyle(this._body, 'overflow', this._previousOverflow);
this._previousOverflow = null;
}
// Animate transition class in/out and then finally emit the completed event.
return action(this.element.nativeElement, this.transitionClass).then(() => {
this._transitioning = false;
endEvent.emit(this);
});
}
}<|fim▁end|> | |
<|file_name|>assignment.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import {SvgIconProps} from '../../SvgIcon';<|fim▁hole|><|fim▁end|> |
export default function Assignment(props: SvgIconProps): React.ReactElement<SvgIconProps>; |
<|file_name|>openrc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import os<|fim▁hole|># With the addition of Keystone, to use an openstack cloud you should
# authenticate against keystone, which returns a **Token** and **Service
# Catalog**. The catalog contains the endpoint for all services the
# user/tenant has access to - including nova, glance, keystone, swift.
#
# *NOTE*: Using the 2.0 *auth api* does not mean that compute api is 2.0. We
# will use the 1.1 *compute api*
os.environ['OS_AUTH_URL'] = "https://keystone.rc.nectar.org.au:5000/v2.0/"
# With the addition of Keystone we have standardized on the term **tenant**
# as the entity that owns the resources.
os.environ['OS_TENANT_ID'] = "123456789012345678901234567890"
os.environ['OS_TENANT_NAME'] = "tenant_name"
# In addition to the owning entity (tenant), openstack stores the entity
# performing the action as the **user**.
os.environ['OS_USERNAME'] = "[email protected]"
# With Keystone you pass the keystone password.
os.environ['OS_PASSWORD'] = "????????????????????"<|fim▁end|> | |
<|file_name|>beam_centre_model_test.py<|end_file_name|><|fim▁begin|>from __future__ import (absolute_import, division, print_function)
import unittest
import sys
from sans.gui_logic.models.beam_centre_model import BeamCentreModel
from sans.common.enums import FindDirectionEnum, SANSInstrument
if sys.version_info.major == 3:
from unittest import mock
else:
import mock
class BeamCentreModelTest(unittest.TestCase):
def setUp(self):
self.result = {'pos1':300, 'pos2':-300}
self.centre_finder_instance = mock.MagicMock(return_value = self.result)<|fim▁hole|> self.beam_centre_model = BeamCentreModel(self.SANSCentreFinder)
def test_that_model_initialises_with_correct_values(self):
self.assertEqual(self.beam_centre_model.max_iterations, 10)
self.assertEqual(self.beam_centre_model.r_min, 60)
self.assertEqual(self.beam_centre_model.r_max, 280)
self.assertEqual(self.beam_centre_model.left_right, True)
self.assertEqual(self.beam_centre_model.up_down, True)
self.assertEqual(self.beam_centre_model.tolerance, 0.000125)
self.assertEqual(self.beam_centre_model.lab_pos_1, '')
self.assertEqual(self.beam_centre_model.lab_pos_2, '')
self.assertEqual(self.beam_centre_model.hab_pos_2, '')
self.assertEqual(self.beam_centre_model.hab_pos_1, '')
self.assertEqual(self.beam_centre_model.scale_1, 1000)
self.assertEqual(self.beam_centre_model.scale_2, 1000)
self.assertEqual(self.beam_centre_model.COM, False)
self.assertEqual(self.beam_centre_model.verbose, False)
self.assertEqual(self.beam_centre_model.q_min, 0.01)
self.assertEqual(self.beam_centre_model.q_max, 0.1)
def test_that_can_update_model_values(self):
self.beam_centre_model.scale_2 = 1.0
self.assertEqual(self.beam_centre_model.scale_2, 1.0)
def test_that_correct_values_are_set_for_LARMOR(self):
self.beam_centre_model.reset_to_defaults_for_instrument(SANSInstrument.LARMOR)
self.assertEqual(self.beam_centre_model.scale_1, 1.0)
def test_that_correct_values_are_set_for_LOQ(self):
self.beam_centre_model.reset_to_defaults_for_instrument(SANSInstrument.LOQ)
self.assertEqual(self.beam_centre_model.r_max, 200)
def test_that_find_beam_centre_calls_centre_finder_once_when_COM_is_False(self):
state = mock.MagicMock()
self.beam_centre_model.find_beam_centre(state)
self.SANSCentreFinder.return_value.assert_called_once_with(state, r_min=self.beam_centre_model.r_min,
r_max=self.beam_centre_model.r_max,
max_iter= self.beam_centre_model.max_iterations,
x_start=self.beam_centre_model.lab_pos_1,
y_start=self.beam_centre_model.lab_pos_2,
tolerance=self.beam_centre_model.tolerance,
find_direction=FindDirectionEnum.All,
reduction_method=True,
verbose=False)
self.assertEqual(state.convert_to_q.q_min, self.beam_centre_model.q_min)
self.assertEqual(state.convert_to_q.q_max, self.beam_centre_model.q_max)
def test_that_find_beam_centre_calls_centre_finder_twice_when_COM_is_TRUE(self):
state = mock.MagicMock()
self.beam_centre_model.COM = True
self.beam_centre_model.find_beam_centre(state)
self.assertEqual(self.SANSCentreFinder.return_value.call_count, 2)
self.SANSCentreFinder.return_value.assert_called_with(state, r_min=self.beam_centre_model.r_min,
r_max=self.beam_centre_model.r_max,
max_iter= self.beam_centre_model.max_iterations,
x_start=self.result['pos1'],
y_start=self.result['pos2'],
tolerance=self.beam_centre_model.tolerance,
find_direction=FindDirectionEnum.All,
reduction_method=True,
verbose=False)
self.SANSCentreFinder.return_value.assert_any_call(state, r_min=self.beam_centre_model.r_min,
r_max=self.beam_centre_model.r_max,
max_iter=self.beam_centre_model.max_iterations,
x_start=self.beam_centre_model.lab_pos_1,
y_start=self.beam_centre_model.lab_pos_2,
tolerance=self.beam_centre_model.tolerance,
find_direction=FindDirectionEnum.All,
reduction_method=False)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | self.SANSCentreFinder = mock.MagicMock(return_value = self.centre_finder_instance) |
<|file_name|>muhkuh_split_testdescription.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|> * This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
***************************************************************************/
#include "muhkuh_split_testdescription.h"
#include <wx/filename.h>
#include <wx/log.h>
#include <wx/txtstrm.h>
#include <wx/wfstream.h>
muhkuh_split_testdescription::muhkuh_split_testdescription(void)
{
}
muhkuh_split_testdescription::~muhkuh_split_testdescription(void)
{
}
bool muhkuh_split_testdescription::split(wxString strWorkingFolder)
{
bool fResult;
wxFileName tFileName;
wxString strXmlFullPath;
wxXmlNode *ptNodeTestDescription;
size_t sizSubTestIndex;
wxXmlNode *ptNode;
/* Set the working folder. */
m_strWorkingFolder = strWorkingFolder;
/* Create the full path to the test description. */
tFileName.AssignDir(strWorkingFolder);
tFileName.SetFullName("test_description.xml");
strXmlFullPath = tFileName.GetFullPath();
/* Does the test description exist and is it readable? */
if( tFileName.FileExists()!=true )
{
wxLogError(_("The file %s does not exist!"), strXmlFullPath);
fResult = false;
}
else if( tFileName.IsFileReadable()!=true )
{
wxLogError(_("The file %s can not be read!"), strXmlFullPath);
fResult = false;
}
else
{
/* Ok, we can access the file -> parse the XML tree. */
fResult = m_tXmlDoc.Load(strXmlFullPath);
if( fResult!=true )
{
/* FIXME: How can I get more information what went wrong here? */
wxLogError(_("Failed to load the XML document!"));
}
else
{
/* Search the TestDescription node. */
ptNodeTestDescription = search_node(m_tXmlDoc.GetRoot(), "TestDescription");
if( ptNodeTestDescription==NULL )
{
wxLogError(_("Can not find the TestDescription node!"));
fResult = false;
}
else
{
fResult = generate_description(ptNodeTestDescription);
if( fResult==true )
{
sizSubTestIndex = 0;
/* Add the init code block. */
fResult = subtests_read_test(ptNodeTestDescription, sizSubTestIndex);
if( fResult==true )
{
++sizSubTestIndex;
/* Search all subtests. */
ptNode = ptNodeTestDescription->GetChildren();
while( ptNode!=NULL )
{
if( ptNode->GetType()==wxXML_ELEMENT_NODE && ptNode->GetName()=="Test" )
{
fResult = subtests_read_test(ptNode, sizSubTestIndex);
if( fResult!=true )
{
break;
}
++sizSubTestIndex;
}
ptNode = ptNode->GetNext();
}
}
}
}
}
}
return fResult;
}
bool muhkuh_split_testdescription::generate_description(wxXmlNode *ptNodeTestDescription)
{
wxArrayString astrTestNames;
wxArrayString astrTestVersions;
wxArrayString astrTestDescription;
wxXmlNode *ptNode;
size_t sizSubTestIndex;
wxString strArg;
bool fResult;
/* Get the name and version attribute. */
astrTestNames.Add(ptNodeTestDescription->GetAttribute("name", wxEmptyString));
astrTestVersions.Add(ptNodeTestDescription->GetAttribute("version", wxEmptyString));
/* Search all subtests. */
ptNode = ptNodeTestDescription->GetChildren();
while( ptNode!=NULL )
{
if( ptNode->GetType()==wxXML_ELEMENT_NODE && ptNode->GetName()=="Test" )
{
astrTestNames.Add(ptNode->GetAttribute("name", wxEmptyString));
astrTestVersions.Add(ptNode->GetAttribute("version", wxEmptyString));
}
ptNode = ptNode->GetNext();
}
/* Write all test names and versions to the file "test_description.lua". */
astrTestDescription.Add("_G.__MUHKUH_ALL_TESTS = {\n");
for(sizSubTestIndex=0; sizSubTestIndex<astrTestNames.GetCount(); ++sizSubTestIndex)
{
strArg.Printf("\t[%d] = { [\"name\"]=\"%s\", [\"version\"]=\"%s\" },\n", sizSubTestIndex, astrTestNames.Item(sizSubTestIndex), astrTestVersions.Item(sizSubTestIndex));
astrTestDescription.Add(strArg);
}
astrTestDescription.Add("}\n");
/* Write this to a file. */
fResult = write_textfile(MUHKUH_TESTDESCRIPTION_TYP_DESCRIPTION, 0, astrTestDescription);
return fResult;
}
bool muhkuh_split_testdescription::subtests_read_test(wxXmlNode *ptParent, size_t sizSubTestIndex)
{
bool fResult;
wxXmlNode *ptNode;
wxString strData;
wxArrayString astrParameter;
wxString strParameterName;
wxString strParameterValue;
/* Expect failure. */
fResult = false;
/* Search the code node. */
ptNode = search_node(ptParent->GetChildren(), "Code");
if( ptNode!=NULL )
{
/* Get the node contents. */
strData = ptNode->GetNodeContent();
fResult = write_textfile(MUHKUH_TESTDESCRIPTION_TYP_CODE, sizSubTestIndex, strData);
if( fResult==true )
{
/* Collect all parameters. */
ptNode = ptParent->GetChildren();
while( ptNode!=NULL )
{
if( ptNode->GetType()==wxXML_ELEMENT_NODE && ptNode->GetName()=="Parameter" )
{
/* Get the name parameter. */
if( ptNode->GetAttribute("name", &strParameterName)==false )
{
wxLogError(_("The parameter has no name attribute."));
fResult = false;
break;
}
/* Get the value parameter. */
strParameterValue = ptNode->GetNodeContent();
/* Combine the name and value. */
strData.Printf("_G.__MUHKUH_TEST_PARAMETER[\"%s\"] = \"%s\"\n", strParameterName, strParameterValue);
astrParameter.Add(strData);
}
ptNode = ptNode->GetNext();
}
/* Write all parameters to a file. */
fResult = write_textfile(MUHKUH_TESTDESCRIPTION_TYP_PARAMETER, sizSubTestIndex, astrParameter);
}
}
return fResult;
}
wxXmlNode *muhkuh_split_testdescription::search_node(wxXmlNode *ptNode, wxString strName)
{
while( ptNode!=NULL )
{
if( ptNode->GetType()==wxXML_ELEMENT_NODE && ptNode->GetName()==strName )
{
break;
}
ptNode = ptNode->GetNext();
}
return ptNode;
}
wxString muhkuh_split_testdescription::get_lua_filename(MUHKUH_TESTDESCRIPTION_TYP_T tTyp, size_t sizSubTextIndex)
{
wxFileName tFileName;
wxString strFileName;
/* Construct the name and extension part of the filename. */
switch( tTyp )
{
case MUHKUH_TESTDESCRIPTION_TYP_DESCRIPTION:
strFileName = "test_description.lua";
break;
case MUHKUH_TESTDESCRIPTION_TYP_CODE:
strFileName.Printf("test_description_%d_code.lua", sizSubTextIndex);
break;
case MUHKUH_TESTDESCRIPTION_TYP_PARAMETER:
strFileName.Printf("test_description_%d_par.lua", sizSubTextIndex);
break;
}
/* Construct the complete path. */
tFileName.AssignDir(m_strWorkingFolder);
tFileName.SetFullName(strFileName);
return tFileName.GetFullPath();
}
bool muhkuh_split_testdescription::write_textfile(MUHKUH_TESTDESCRIPTION_TYP_T tTyp, size_t sizSubTextIndex, wxString strContents)
{
bool fResult;
wxString strFileName;
wxFFileOutputStream *ptOutputStream;
wxTextOutputStream *ptTextOutputStream;
/* Create a new file. */
strFileName = get_lua_filename(tTyp, sizSubTextIndex);
ptOutputStream = new wxFFileOutputStream(strFileName, "w");
if( ptOutputStream->IsOk()!=true )
{
wxLogError("Failed to create new file %s!", strFileName);
fResult = false;
}
else
{
/* Create the text output stream. */
ptTextOutputStream = new wxTextOutputStream(*ptOutputStream);
/* Write the complete data to the file. */
ptTextOutputStream->WriteString(strContents);
delete ptTextOutputStream;
ptOutputStream->Close();
fResult = true;
}
delete ptOutputStream;
return fResult;
}
bool muhkuh_split_testdescription::write_textfile(MUHKUH_TESTDESCRIPTION_TYP_T tTyp, size_t sizSubTextIndex, wxArrayString &astrContents)
{
bool fResult;
wxString strFileName;
wxFFileOutputStream *ptOutputStream;
wxTextOutputStream *ptTextOutputStream;
size_t sizStringCnt;
size_t sizStringEnd;
/* Create a new file. */
strFileName = get_lua_filename(tTyp, sizSubTextIndex);
ptOutputStream = new wxFFileOutputStream(strFileName, "w");
if( ptOutputStream->IsOk()!=true )
{
wxLogError("Failed to create new file %s!", strFileName);
fResult = false;
}
else
{
/* Create the text output stream. */
ptTextOutputStream = new wxTextOutputStream(*ptOutputStream);
/* Write the complete data to the file. */
sizStringCnt = 0;
sizStringEnd = astrContents.GetCount();
while( sizStringCnt<sizStringEnd )
{
ptTextOutputStream->WriteString(astrContents.Item(sizStringCnt));
++sizStringCnt;
}
delete ptTextOutputStream;
ptOutputStream->Close();
fResult = true;
}
delete ptOutputStream;
return fResult;
}<|fim▁end|> | /***************************************************************************
* Copyright (C) 2011 by Christoph Thelen *
* [email protected] *
* * |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Microformats2 is a general way to mark up any HTML document with
classes and propeties. This library parses structured data from
a microformatted HTML document and returns a well-formed JSON
dictionary.
"""
from .version import __version__
from .parser import Parser, parse
from .mf_helpers import get_url<|fim▁hole|><|fim▁end|> |
__all__ = ['Parser', 'parse', 'get_url', '__version__'] |
<|file_name|>TauPump.cpp<|end_file_name|><|fim▁begin|>#ifndef _pymoose_TauPump_cpp
#define _pymoose_TauPump_cpp
#include "TauPump.h"
using namespace pymoose;
const std::string TauPump::className_ = "TauPump";
TauPump::TauPump(std::string className, std::string objectName, Id parentId):Neutral(className, objectName, parentId){}
TauPump::TauPump(std::string className, std::string path):Neutral(className, path){}
TauPump::TauPump(std::string className, std::string objectName, PyMooseBase& parent):Neutral(className, objectName, parent){}
TauPump::TauPump(Id id):Neutral(id){}
TauPump::TauPump(std::string path):Neutral(className_, path){}<|fim▁hole|>TauPump::TauPump(const TauPump& src, std::string objectName, Id& parent):Neutral(src, objectName, parent){}
TauPump::TauPump(const TauPump& src, std::string path):Neutral(src, path){}
TauPump::TauPump(const Id& src, std::string name, Id& parent):Neutral(src, name, parent){}
TauPump::TauPump(const Id& src, std::string path):Neutral(src, path){}
TauPump::~TauPump(){}
const std::string& TauPump::getType(){ return className_; }
double TauPump::__get_pumpRate() const
{
double pumpRate;
get < double > (id_(), "pumpRate",pumpRate);
return pumpRate;
}
void TauPump::__set_pumpRate( double pumpRate )
{
set < double > (id_(), "pumpRate", pumpRate);
}
double TauPump::__get_eqConc() const
{
double eqConc;
get < double > (id_(), "eqConc",eqConc);
return eqConc;
}
void TauPump::__set_eqConc( double eqConc )
{
set < double > (id_(), "eqConc", eqConc);
}
double TauPump::__get_TA() const
{
double TA;
get < double > (id_(), "TA",TA);
return TA;
}
void TauPump::__set_TA( double TA )
{
set < double > (id_(), "TA", TA);
}
double TauPump::__get_TB() const
{
double TB;
get < double > (id_(), "TB",TB);
return TB;
}
void TauPump::__set_TB( double TB )
{
set < double > (id_(), "TB", TB);
}
double TauPump::__get_TC() const
{
double TC;
get < double > (id_(), "TC",TC);
return TC;
}
void TauPump::__set_TC( double TC )
{
set < double > (id_(), "TC", TC);
}
double TauPump::__get_TV() const
{
double TV;
get < double > (id_(), "TV",TV);
return TV;
}
void TauPump::__set_TV( double TV )
{
set < double > (id_(), "TV", TV);
}
#endif<|fim▁end|> | TauPump::TauPump(std::string name, Id parentId):Neutral(className_, name, parentId){}
TauPump::TauPump(std::string name, PyMooseBase& parent):Neutral(className_, name, parent){}
TauPump::TauPump(const TauPump& src, std::string objectName, PyMooseBase& parent):Neutral(src, objectName, parent){} |
<|file_name|>sepiasearch.py<|end_file_name|><|fim▁begin|># SPDX-License-Identifier: AGPL-3.0-or-later
"""
SepiaSearch (Videos)
"""
from json import loads
from dateutil import parser, relativedelta
from urllib.parse import urlencode
from datetime import datetime
# about
about = {
"website": 'https://sepiasearch.org',
"wikidata_id": None,
"official_api_documentation": "https://framagit.org/framasoft/peertube/search-index/-/tree/master/server/controllers/api", # NOQA
"use_official_api": True,
"require_api_key": False,
"results": 'JSON',
}
categories = ['videos']
paging = True
time_range_support = True
safesearch = True
supported_languages = [
'en', 'fr', 'ja', 'eu', 'ca', 'cs', 'eo', 'el',
'de', 'it', 'nl', 'es', 'oc', 'gd', 'zh', 'pt',
'sv', 'pl', 'fi', 'ru'
]
base_url = 'https://sepiasearch.org/api/v1/search/videos'
safesearch_table = {
0: 'both',
1: 'false',
2: 'false'
}
time_range_table = {
'day': relativedelta.relativedelta(),
'week': relativedelta.relativedelta(weeks=-1),
'month': relativedelta.relativedelta(months=-1),
'year': relativedelta.relativedelta(years=-1)
}
embedded_url = '<iframe width="540" height="304" src="{url}" frameborder="0" allowfullscreen></iframe>'
def minute_to_hm(minute):
if isinstance(minute, int):
return "%d:%02d" % (divmod(minute, 60))
return None
def request(query, params):
params['url'] = base_url + '?' + urlencode({<|fim▁hole|> 'search': query,
'start': (params['pageno'] - 1) * 10,
'count': 10,
'sort': '-match',
'nsfw': safesearch_table[params['safesearch']]
})
language = params['language'].split('-')[0]
if language in supported_languages:
params['url'] += '&languageOneOf[]=' + language
if params['time_range'] in time_range_table:
time = datetime.now().date() + time_range_table[params['time_range']]
params['url'] += '&startDate=' + time.isoformat()
return params
def response(resp):
results = []
search_results = loads(resp.text)
if 'data' not in search_results:
return []
for result in search_results['data']:
title = result['name']
content = result['description']
thumbnail = result['thumbnailUrl']
publishedDate = parser.parse(result['publishedAt'])
embedded = embedded_url.format(url=result.get('embedUrl'))
author = result.get('account', {}).get('displayName')
length = minute_to_hm(result.get('duration'))
url = result['url']
results.append({'url': url,
'title': title,
'content': content,
'author': author,
'length': length,
'template': 'videos.html',
'publishedDate': publishedDate,
'embedded': embedded,
'thumbnail': thumbnail})
return results<|fim▁end|> | |
<|file_name|>PairwiseAlignment.cc<|end_file_name|><|fim▁begin|>/* Copyright (c) 2006
Colin Dewey (University of Wisconsin-Madison)
[email protected]
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "util/stl.hh"
#include "bio/alignment/PairwiseAlignment.hh"
namespace bio { namespace alignment {
size_t PairwiseAlignment::getNumIdentities() const {
return util::stl::matches(seq1.begin(), seq1.end(), seq2.begin());
}
PairwiseAlignment
PairwiseAlignment::slice(size_t seqNum, size_t start, size_t end) const {
std::string seq = (seqNum == 0 ? seq1 : seq2);
size_t startCol = seq.find_first_not_of('-');
while (start > 0) {
startCol = seq.find_first_not_of('-', startCol + 1);<|fim▁hole|> }
size_t endCol = startCol;
while (end > 0) {
if (end == 1) {
++endCol;
} else {
endCol = seq.find_first_not_of('-', endCol + 1);
}
--end;
}
return PairwiseAlignment(seq1.substr(startCol, endCol - startCol),
seq2.substr(startCol, endCol - startCol));
}
} }<|fim▁end|> | --start;
--end; |
<|file_name|>EntityBullet.java<|end_file_name|><|fim▁begin|>package com.exilegl.ld34.entity.enemy;
import java.util.Random;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.physics.box2d.World;
import com.exilegl.ld34.ai.AiFollowLocation;
import com.exilegl.ld34.entity.Entity;
import com.exilegl.ld34.entity.EntityCoin;
import com.exilegl.ld34.entity.EntityDirection;
import com.exilegl.ld34.entity.EntityPlayer;
import com.exilegl.ld34.map.Map;
import com.exilegl.ld34.sound.Sound;
import com.exilegl.ld34.tile.Tile;
import box2dLight.ChainLight;
import box2dLight.ConeLight;
import box2dLight.PointLight;
public class EntityBullet extends EntityEnemy{
//The bullet's light color
private Color color;
private float distance;
private Vector2 offset;
private float age;
private boolean textured;
private Texture texture;
//Whether or not the bullet kills enemies. If not, it kills non enemies.
private boolean killsEnemies;
private float duration;
private boolean ranged;
public EntityBullet(Vector2 location, Color color, EntityDirection direction, float distance, boolean killsEnemies, float duration, boolean textured, boolean ranged, boolean moveY) {
super(null, location, 10, 10, true);
this.setColor(color);
this.setDistance(distance);
this.offset = new Vector2(0, 0);
float y = (this.getLocation().y);
if(ranged){
Random r = new Random();
if(r.nextBoolean()){
y = (y + 64);
}else{
y = (y - 64);
}
}
if(!moveY){
if(direction == EntityDirection.LEFT){
this.addAction(new AiFollowLocation(this, new Vector2(this.getLocation().x - this.getDistance() * 5, y), this.getSpeed(), false, false));
}else{
this.addAction(new AiFollowLocation(this, new Vector2(this.getLocation().x + this.getDistance() * 5, y), this.getSpeed(), false, false));
}
}else{
if(direction == EntityDirection.LEFT){
this.addAction(new AiFollowLocation(this, new Vector2(this.getLocation().x * 5, y + distance * 3), this.getSpeed(), false, false));
}else{
this.addAction(new AiFollowLocation(this, new Vector2(this.getLocation().x, y - distance * 3), this.getSpeed(), false, false));
}
}
setAge(0);
if(this.isTextured()){
this.setTexture(new Texture(Gdx.files.internal("assets/texture/entity/redbullet.png")));
}
this.setKillsEnemies(killsEnemies);
this.setDuration(duration);
if(textured){
this.setTexture(new Texture(Gdx.files.internal("assets/texture/misc/bullet.png")));
}
this.setTextured(textured);
if(!killsEnemies){
Sound.play(Sound.Enemy_Shoot, 0.5f);
}
this.ranged = ranged;
if(ranged){
this.setDistance(distance * 1.5f);
}
}
@Override
public void update() {
setAge((getAge() + 1 * Gdx.graphics.getDeltaTime()));
if(this.getLight() == null){
this.setLight(new PointLight(this.getMap().getRay(), Map.RAYS, this.getColor(), this.getDistance() / 5, this.getLocation().x, this.getLocation().y));
}
this.flickerLight(20, (int) ((int) getDistance() * 1.5f), (int) getDistance());
this.getLight().setPosition(this.getLocation().x + offset.x, this.getLocation().y + offset.y);
this.setRectangle(new Rectangle(this.getLocation().x, this.getLocation().y, this.getLight().getDistance(), this.getLight().getDistance()));
this.performAi();
if(this.getAge() > this.getDuration()){
this.kill();
}
for(Entity e : this.getMap().getEntities()){
if(this.isKillsEnemies()){
if(e instanceof EntityEnemy && !(e instanceof EntityPlayer) && !(e instanceof EntityBullet) && this.getRectangle().overlaps(e.getRectangle()) && !(e instanceof EntityDeathTile)){
e.kill();
this.kill();
Sound.play(Sound.Kill_Enemy);
}
}else{
try{
if(!(e instanceof EntityEnemy) && !(e instanceof EntityCoin) && this.getRectangle().overlaps(e.getRectangle())){
e.kill();
this.kill();
}
}catch(NullPointerException n){
}
}
if(e instanceof EntityPlayer && this.getRectangle().overlaps(e.getRectangle()) && !this.isKillsEnemies()){
e.kill();
this.kill();
}
}
for(Tile t : this.getMap().getTiles()){
if(t.getType().SOLID){
Rectangle tileRect = new Rectangle(t.getLocation().x, t.getLocation().y, t.getSingleAnimation().getWidth(), t.getSingleAnimation().getHeight());
Rectangle bulletRect = new Rectangle(this.getLocation().x, this.getLocation().y, this.getLight().getDistance(), this.getLight().getDistance());
if(bulletRect.overlaps(tileRect)){
this.kill();
}
}
}
}
@Override
public void draw(SpriteBatch batch){
if(this.isTextured()){
batch.draw(this.getTexture(), this.getLocation().x, this.getLocation().y);
}
}
@Override
public void create(Map map) {
this.setMap(map);
if(isKillsEnemies()){
Vector3 m = new Vector3(Gdx.input.getX(), Gdx.input.getY(), 0);
this.getMap().getCamera().unproject(m);
((AiFollowLocation) this.getActions().get(0)).getDestination().set(m.x, m.y);
}
}
public boolean isTextured() {
return textured;
}
public void setTextured(boolean textured) {
this.textured = textured;
}
public Texture getTexture() {
return texture;
}
public void setTexture(Texture texture) {
this.texture = texture;
}
public float getAge() {
return age;
}
public void setAge(float age) {
this.age = age;
}
public float getDistance() {
return distance;
}
public void setDistance(float distance) {
this.distance = distance;
}
public Color getColor() {
return color;
}
public void setColor(Color color) {
this.color = color;
}
public float getDuration() {
return duration;
}
public void setDuration(float duration) {
this.duration = duration;
}
public boolean isKillsEnemies() {
return killsEnemies;
}
public void setKillsEnemies(boolean killsEnemies) {
this.killsEnemies = killsEnemies;
}
<|fim▁hole|> return this.offset;
}
}<|fim▁end|> | public Vector2 getOffset(){ |
<|file_name|>capitalize_spec.js<|end_file_name|><|fim▁begin|>const {capitalize} = require('../capitalize');
const {expect} = require('chai');
describe('capitalize() Capitalizes the first letter of a string', function() {
it('should capitalize the first letter of a string', function(){
expect(capitalize("github")).to.equal("Github")<|fim▁hole|><|fim▁end|> | })
}) |
<|file_name|>NavDrawerItem.java<|end_file_name|><|fim▁begin|>/* Android IMSI-Catcher Detector | (c) AIMSICD Privacy Project
* -----------------------------------------------------------
* LICENSE: http://git.io/vki47 | TERMS: http://git.io/vki4o
* -----------------------------------------------------------
*/
package com.secupwn.aimsicd.ui.drawer;
import android.support.annotation.DrawableRes;
<|fim▁hole|>public interface NavDrawerItem {
int getId();
String getLabel();
void setLabel(String label);
void setIconId(@DrawableRes int icon);
int getType();
boolean isEnabled();
boolean updateActionBarTitle();
}<|fim▁end|> | |
<|file_name|>entrypoint.go<|end_file_name|><|fim▁begin|>package mocks
import (
cli "github.com/stackanetes/kubernetes-entrypoint/client"
)
type MockEntrypoint struct {
client cli.ClientInterface
namespace string
}
func (m MockEntrypoint) Resolve() {
}
func (m MockEntrypoint) Client() (client cli.ClientInterface) {
return m.client
}
func (m MockEntrypoint) GetNamespace() (namespace string) {<|fim▁hole|>}
func NewEntrypointInNamespace(namespace string) MockEntrypoint {
return MockEntrypoint{
client: NewClient(),
namespace: namespace,
}
}
func NewEntrypoint() MockEntrypoint {
return NewEntrypointInNamespace("test")
}<|fim▁end|> | return m.namespace |
<|file_name|>constraints.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import urllib
from urllib import unquote
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
from swift.common import utils, exceptions
from swift.common.swob import HTTPBadRequest, HTTPLengthRequired, \
HTTPRequestEntityTooLarge, HTTPPreconditionFailed
MAX_FILE_SIZE = 5368709122
MAX_META_NAME_LENGTH = 128
MAX_META_VALUE_LENGTH = 256
MAX_META_COUNT = 90
MAX_META_OVERALL_SIZE = 4096
MAX_HEADER_SIZE = 8192
MAX_OBJECT_NAME_LENGTH = 1024
CONTAINER_LISTING_LIMIT = 10000
ACCOUNT_LISTING_LIMIT = 10000
MAX_ACCOUNT_NAME_LENGTH = 256
MAX_CONTAINER_NAME_LENGTH = 256
# If adding an entry to DEFAULT_CONSTRAINTS, note that
# these constraints are automatically published by the
# proxy server in responses to /info requests, with values
# updated by reload_constraints()
DEFAULT_CONSTRAINTS = {
'max_file_size': MAX_FILE_SIZE,
'max_meta_name_length': MAX_META_NAME_LENGTH,
'max_meta_value_length': MAX_META_VALUE_LENGTH,
'max_meta_count': MAX_META_COUNT,
'max_meta_overall_size': MAX_META_OVERALL_SIZE,
'max_header_size': MAX_HEADER_SIZE,
'max_object_name_length': MAX_OBJECT_NAME_LENGTH,
'container_listing_limit': CONTAINER_LISTING_LIMIT,
'account_listing_limit': ACCOUNT_LISTING_LIMIT,
'max_account_name_length': MAX_ACCOUNT_NAME_LENGTH,
'max_container_name_length': MAX_CONTAINER_NAME_LENGTH,
}
SWIFT_CONSTRAINTS_LOADED = False
OVERRIDE_CONSTRAINTS = {} # any constraints overridden by SWIFT_CONF_FILE
EFFECTIVE_CONSTRAINTS = {} # populated by reload_constraints
def reload_constraints():
"""
Parse SWIFT_CONF_FILE and reset module level global contraint attrs,
populating OVERRIDE_CONSTRAINTS AND EFFECTIVE_CONSTRAINTS along the way.
"""
global SWIFT_CONSTRAINTS_LOADED, OVERRIDE_CONSTRAINTS
SWIFT_CONSTRAINTS_LOADED = False
OVERRIDE_CONSTRAINTS = {}
constraints_conf = ConfigParser()
if constraints_conf.read(utils.SWIFT_CONF_FILE):
SWIFT_CONSTRAINTS_LOADED = True
for name in DEFAULT_CONSTRAINTS:
try:
value = int(constraints_conf.get('swift-constraints', name))
except NoOptionError:
pass
except NoSectionError:
# We are never going to find the section for another option
break
else:
OVERRIDE_CONSTRAINTS[name] = value
for name, default in DEFAULT_CONSTRAINTS.items():
value = OVERRIDE_CONSTRAINTS.get(name, default)
EFFECTIVE_CONSTRAINTS[name] = value
# "globals" in this context is module level globals, always.
globals()[name.upper()] = value
reload_constraints()
# Maximum slo segments in buffer
MAX_BUFFERED_SLO_SEGMENTS = 10000
#: Query string format= values to their corresponding content-type values
FORMAT2CONTENT_TYPE = {'plain': 'text/plain', 'json': 'application/json',
'xml': 'application/xml'}
def check_metadata(req, target_type):
"""
Check metadata sent in the request headers.
:param req: request object
:param target_type: str: one of: object, container, or account: indicates
which type the target storage for the metadata is
:returns: HTTPBadRequest with bad metadata otherwise None
"""
prefix = 'x-%s-meta-' % target_type.lower()
meta_count = 0
meta_size = 0
for key, value in req.headers.iteritems():
if isinstance(value, basestring) and len(value) > MAX_HEADER_SIZE:
return HTTPBadRequest(body='Header value too long: %s' %
key[:MAX_META_NAME_LENGTH],
request=req, content_type='text/plain')
if not key.lower().startswith(prefix):
continue
key = key[len(prefix):]
if not key:
return HTTPBadRequest(body='Metadata name cannot be empty',
request=req, content_type='text/plain')<|fim▁hole|> body='Metadata name too long: %s%s' % (prefix, key),
request=req, content_type='text/plain')
elif len(value) > MAX_META_VALUE_LENGTH:
return HTTPBadRequest(
body='Metadata value longer than %d: %s%s' % (
MAX_META_VALUE_LENGTH, prefix, key),
request=req, content_type='text/plain')
elif meta_count > MAX_META_COUNT:
return HTTPBadRequest(
body='Too many metadata items; max %d' % MAX_META_COUNT,
request=req, content_type='text/plain')
elif meta_size > MAX_META_OVERALL_SIZE:
return HTTPBadRequest(
body='Total metadata too large; max %d'
% MAX_META_OVERALL_SIZE,
request=req, content_type='text/plain')
return None
def check_object_creation(req, object_name):
"""
Check to ensure that everything is alright about an object to be created.
:param req: HTTP request object
:param object_name: name of object to be created
:returns HTTPRequestEntityTooLarge: the object is too large
:returns HTTPLengthRequired: missing content-length header and not
a chunked request
:returns HTTPBadRequest: missing or bad content-type header, or
bad metadata
"""
if req.content_length and req.content_length > MAX_FILE_SIZE:
return HTTPRequestEntityTooLarge(body='Your request is too large.',
request=req,
content_type='text/plain')
if req.content_length is None and \
req.headers.get('transfer-encoding') != 'chunked':
return HTTPLengthRequired(request=req)
if 'X-Copy-From' in req.headers and req.content_length:
return HTTPBadRequest(body='Copy requests require a zero byte body',
request=req, content_type='text/plain')
if len(object_name) > MAX_OBJECT_NAME_LENGTH:
return HTTPBadRequest(body='Object name length of %d longer than %d' %
(len(object_name), MAX_OBJECT_NAME_LENGTH),
request=req, content_type='text/plain')
if 'Content-Type' not in req.headers:
return HTTPBadRequest(request=req, content_type='text/plain',
body='No content type')
if not check_utf8(req.headers['Content-Type']):
return HTTPBadRequest(request=req, body='Invalid Content-Type',
content_type='text/plain')
return check_metadata(req, 'object')
def check_mount(root, drive):
"""
Verify that the path to the device is a mount point and mounted. This
allows us to fast fail on drives that have been unmounted because of
issues, and also prevents us for accidentally filling up the root
partition.
:param root: base path where the devices are mounted
:param drive: drive name to be checked
:returns: True if it is a valid mounted device, False otherwise
"""
if not (urllib.quote_plus(drive) == drive):
return False
path = os.path.join(root, drive)
return utils.ismount(path)
def check_float(string):
"""
Helper function for checking if a string can be converted to a float.
:param string: string to be verified as a float
:returns: True if the string can be converted to a float, False otherwise
"""
try:
float(string)
return True
except ValueError:
return False
def valid_timestamp(request):
"""
Helper function to extract a timestamp from requests that require one.
:param request: the swob request object
:returns: a valid Timestamp instance
:raises: HTTPBadRequest on missing or invalid X-Timestamp
"""
try:
return request.timestamp
except exceptions.InvalidTimestamp as e:
raise HTTPBadRequest(body=str(e), request=request,
content_type='text/plain')
def check_utf8(string):
"""
Validate if a string is valid UTF-8 str or unicode and that it
does not contain any null character.
:param string: string to be validated
:returns: True if the string is valid utf-8 str or unicode and
contains no null characters, False otherwise
"""
if not string:
return False
try:
if isinstance(string, unicode):
string.encode('utf-8')
else:
string.decode('UTF-8')
return '\x00' not in string
# If string is unicode, decode() will raise UnicodeEncodeError
# So, we should catch both UnicodeDecodeError & UnicodeEncodeError
except UnicodeError:
return False
def check_copy_from_header(req):
"""
Validate that the value from x-copy-from header is
well formatted. We assume the caller ensures that
x-copy-from header is present in req.headers.
:param req: HTTP request object
:returns: A tuple with container name and object name
:raise: HTTPPreconditionFailed if x-copy-from value
is not well formatted.
"""
src_header = unquote(req.headers.get('X-Copy-From'))
if not src_header.startswith('/'):
src_header = '/' + src_header
try:
return utils.split_path(src_header, 2, 2, True)
except ValueError:
raise HTTPPreconditionFailed(
request=req,
body='X-Copy-From header must be of the form'
'<container name>/<object name>')<|fim▁end|> | meta_count += 1
meta_size += len(key) + len(value)
if len(key) > MAX_META_NAME_LENGTH:
return HTTPBadRequest( |
<|file_name|>test_frontend.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.api import Environment
from odoo.tools import DEFAULT_SERVER_DATE_FORMAT
from datetime import date, timedelta
import odoo.tests
class TestUi(odoo.tests.HttpCase):
def test_01_pos_basic_order(self):
env = self.env(user=self.env.ref('base.user_admin'))
journal_obj = env['account.journal']
account_obj = env['account.account']
main_company = env.ref('base.main_company')
main_pos_config = env.ref('point_of_sale.pos_config_main')
account_receivable = account_obj.create({'code': 'X1012',
'name': 'Account Receivable - Test',
'user_type_id': env.ref('account.data_account_type_receivable').id,
'reconcile': True})
field = env['ir.model.fields']._get('res.partner', 'property_account_receivable_id')
env['ir.property'].create({'name': 'property_account_receivable_id',
'company_id': main_company.id,
'fields_id': field.id,
'value': 'account.account,' + str(account_receivable.id)})
# test an extra price on an attribute
pear = env.ref('point_of_sale.whiteboard')
attribute_value = env['product.attribute.value'].create({
'name': 'add 2',
'attribute_id': env['product.attribute'].create({
'name': 'add 2',
}).id,
})
env['product.template.attribute.value'].create({
'product_tmpl_id': pear.product_tmpl_id.id,
'price_extra': 2,
'product_attribute_value_id': attribute_value.id,
})
fixed_pricelist = env['product.pricelist'].create({
'name': 'Fixed',
'item_ids': [(0, 0, {
'compute_price': 'fixed',
'fixed_price': 1,
}), (0, 0, {
'compute_price': 'fixed',
'fixed_price': 2,
'applied_on': '0_product_variant',
'product_id': env.ref('point_of_sale.wall_shelf').id,
}), (0, 0, {
'compute_price': 'fixed',
'fixed_price': 13.95, # test for issues like in 7f260ab517ebde634fc274e928eb062463f0d88f
'applied_on': '0_product_variant',
'product_id': env.ref('point_of_sale.small_shelf').id,
})],
})
env['product.pricelist'].create({
'name': 'Percentage',
'item_ids': [(0, 0, {
'compute_price': 'percentage',
'percent_price': 100,
'applied_on': '0_product_variant',
'product_id': env.ref('point_of_sale.wall_shelf').id,
}), (0, 0, {
'compute_price': 'percentage',
'percent_price': 99,
'applied_on': '0_product_variant',
'product_id': env.ref('point_of_sale.small_shelf').id,
}), (0, 0, {
'compute_price': 'percentage',
'percent_price': 0,
'applied_on': '0_product_variant',
'product_id': env.ref('point_of_sale.magnetic_board').id,
})],
})
env['product.pricelist'].create({
'name': 'Formula',
'item_ids': [(0, 0, {
'compute_price': 'formula',
'price_discount': 6,
'price_surcharge': 5,
'applied_on': '0_product_variant',
'product_id': env.ref('point_of_sale.wall_shelf').id,
}), (0, 0, {
# .99 prices
'compute_price': 'formula',
'price_surcharge': -0.01,
'price_round': 1,
'applied_on': '0_product_variant',
'product_id': env.ref('point_of_sale.small_shelf').id,
}), (0, 0, {
'compute_price': 'formula',
'price_min_margin': 10,
'price_max_margin': 100,
'applied_on': '0_product_variant',
'product_id': env.ref('point_of_sale.magnetic_board').id,
}), (0, 0, {
'compute_price': 'formula',
'price_surcharge': 10,
'price_max_margin': 5,
'applied_on': '0_product_variant',
'product_id': env.ref('point_of_sale.monitor_stand').id,
}), (0, 0, {
'compute_price': 'formula',
'price_discount': -100,
'price_min_margin': 5,
'price_max_margin': 20,
'applied_on': '0_product_variant',
'product_id': env.ref('point_of_sale.desk_pad').id,
})],
})
env['product.pricelist'].create({
'name': 'min_quantity ordering',
'item_ids': [(0, 0, {
'compute_price': 'fixed',
'fixed_price': 1,
'applied_on': '0_product_variant',
'min_quantity': 2,
'product_id': env.ref('point_of_sale.wall_shelf').id,
}), (0, 0, {
'compute_price': 'fixed',
'fixed_price': 2,
'applied_on': '0_product_variant',
'min_quantity': 1,
'product_id': env.ref('point_of_sale.wall_shelf').id,
}), (0, 0, {
'compute_price': 'fixed',
'fixed_price': 2,
'applied_on': '0_product_variant',
'min_quantity': 2,
'product_id': env.ref('point_of_sale.product_product_consumable').id,
})],
})
env['product.pricelist'].create({
'name': 'Product template',
'item_ids': [(0, 0, {
'compute_price': 'fixed',
'fixed_price': 1,
'applied_on': '1_product',
'product_tmpl_id': env.ref('point_of_sale.wall_shelf_product_template').id,
}), (0, 0, {
'compute_price': 'fixed',
'fixed_price': 2,
})],
})<|fim▁hole|> env['product.pricelist'].create({
# no category has precedence over category
'name': 'Category vs no category',
'item_ids': [(0, 0, {
'compute_price': 'fixed',
'fixed_price': 1,
'applied_on': '2_product_category',
'categ_id': env.ref('product.product_category_3').id, # All / Saleable / Services
}), (0, 0, {
'compute_price': 'fixed',
'fixed_price': 2,
})],
})
p = env['product.pricelist'].create({
'name': 'Category',
'item_ids': [(0, 0, {
'compute_price': 'fixed',
'fixed_price': 2,
'applied_on': '2_product_category',
'categ_id': env.ref('product.product_category_all').id,
}), (0, 0, {
'compute_price': 'fixed',
'fixed_price': 1,
'applied_on': '2_product_category',
'categ_id': env.ref('product.product_category_3').id, # All / Saleable / Services
})],
})
today = date.today()
one_week_ago = today - timedelta(weeks=1)
two_weeks_ago = today - timedelta(weeks=2)
one_week_from_now = today + timedelta(weeks=1)
two_weeks_from_now = today + timedelta(weeks=2)
public_pricelist = env['product.pricelist'].create({
'name': 'Public Pricelist',
})
env['product.pricelist'].create({
'name': 'Dates',
'item_ids': [(0, 0, {
'compute_price': 'fixed',
'fixed_price': 1,
'date_start': two_weeks_ago.strftime(DEFAULT_SERVER_DATE_FORMAT),
'date_end': one_week_ago.strftime(DEFAULT_SERVER_DATE_FORMAT),
}), (0, 0, {
'compute_price': 'fixed',
'fixed_price': 2,
'date_start': today.strftime(DEFAULT_SERVER_DATE_FORMAT),
'date_end': one_week_from_now.strftime(DEFAULT_SERVER_DATE_FORMAT),
}), (0, 0, {
'compute_price': 'fixed',
'fixed_price': 3,
'date_start': one_week_from_now.strftime(DEFAULT_SERVER_DATE_FORMAT),
'date_end': two_weeks_from_now.strftime(DEFAULT_SERVER_DATE_FORMAT),
})],
})
cost_base_pricelist = env['product.pricelist'].create({
'name': 'Cost base',
'item_ids': [(0, 0, {
'base': 'standard_price',
'compute_price': 'percentage',
'percent_price': 55,
})],
})
pricelist_base_pricelist = env['product.pricelist'].create({
'name': 'Pricelist base',
'item_ids': [(0, 0, {
'base': 'pricelist',
'base_pricelist_id': cost_base_pricelist.id,
'compute_price': 'percentage',
'percent_price': 15,
})],
})
env['product.pricelist'].create({
'name': 'Pricelist base 2',
'item_ids': [(0, 0, {
'base': 'pricelist',
'base_pricelist_id': pricelist_base_pricelist.id,
'compute_price': 'percentage',
'percent_price': 3,
})],
})
env['product.pricelist'].create({
'name': 'Pricelist base rounding',
'item_ids': [(0, 0, {
'base': 'pricelist',
'base_pricelist_id': fixed_pricelist.id,
'compute_price': 'percentage',
'percent_price': 0.01,
})],
})
excluded_pricelist = env['product.pricelist'].create({
'name': 'Not loaded'
})
env.ref('base.res_partner_18').property_product_pricelist = excluded_pricelist
# set the company currency to USD, otherwise it will assume
# euro's. this will cause issues as the sales journal is in
# USD, because of this all products would have a different
# price
main_company.currency_id = env.ref('base.USD')
test_sale_journal = journal_obj.create({'name': 'Sales Journal - Test',
'code': 'TSJ',
'type': 'sale',
'company_id': main_company.id})
all_pricelists = env['product.pricelist'].search([('id', '!=', excluded_pricelist.id)])
all_pricelists.write(dict(currency_id=main_company.currency_id.id))
src_tax = env['account.tax'].create({'name': "SRC", 'amount': 10})
dst_tax = env['account.tax'].create({'name': "DST", 'amount': 5})
env.ref('point_of_sale.letter_tray').taxes_id = [(6, 0, [src_tax.id])]
main_pos_config.write({
'tax_regime_selection': True,
'fiscal_position_ids': [(0, 0, {
'name': "FP-POS-2M",
'tax_ids': [
(0,0,{'tax_src_id': src_tax.id,
'tax_dest_id': src_tax.id}),
(0,0,{'tax_src_id': src_tax.id,
'tax_dest_id': dst_tax.id})]
})],
'journal_id': test_sale_journal.id,
'invoice_journal_id': test_sale_journal.id,
'journal_ids': [(0, 0, {'name': 'Cash Journal - Test',
'code': 'TSC',
'type': 'cash',
'company_id': main_company.id,
'journal_user': True})],
'use_pricelist': True,
'pricelist_id': public_pricelist.id,
'available_pricelist_ids': [(4, pricelist.id) for pricelist in all_pricelists],
})
# Change the default sale pricelist of customers,
# so the js tests can expect deterministically this pricelist when selecting a customer.
field = env['ir.model.fields']._get('res.partner', 'property_product_pricelist')
env['ir.property'].search([
('name', '=', 'property_product_pricelist'),
('fields_id', '=', field.id),
('res_id', '=', False)
]).write({'value_reference': 'product.pricelist,%s' % public_pricelist.id})
# open a session, the /pos/web controller will redirect to it
main_pos_config.open_session_cb()
# needed because tests are run before the module is marked as
# installed. In js web will only load qweb coming from modules
# that are returned by the backend in module_boot. Without
# this you end up with js, css but no qweb.
env['ir.module.module'].search([('name', '=', 'point_of_sale')], limit=1).state = 'installed'
self.start_tour("/pos/web", 'pos_pricelist', login="admin")
self.start_tour("/pos/web", 'pos_basic_order', login="admin")
for order in env['pos.order'].search([]):
self.assertEqual(order.state, 'paid', "Validated order has payment of " + str(order.amount_paid) + " and total of " + str(order.amount_total))<|fim▁end|> | |
<|file_name|>rest_helpers.py<|end_file_name|><|fim▁begin|># Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for rest transports."""
import functools
import operator
def flatten_query_params(obj):
"""Flatten a nested dict into a list of (name,value) tuples.
The result is suitable for setting query params on an http request.
.. code-block:: python
>>> obj = {'a':
... {'b':
... {'c': ['x', 'y', 'z']} },
... 'd': 'uvw', }
>>> flatten_query_params(obj)
[('a.b.c', 'x'), ('a.b.c', 'y'), ('a.b.c', 'z'), ('d', 'uvw')]
Note that, as described in
https://github.com/googleapis/googleapis/blob/48d9fb8c8e287c472af500221c6450ecd45d7d39/google/api/http.proto#L117,
repeated fields (i.e. list-valued fields) may only contain primitive types (not lists or dicts).
This is enforced in this function.
Args:
obj: a nested dictionary (from json), or None
Returns: a list of tuples, with each tuple having a (possibly) multi-part name
and a scalar value.
Raises:
TypeError if obj is not a dict or None
ValueError if obj contains a list of non-primitive values.
"""
if obj is not None and not isinstance(obj, dict):
raise TypeError("flatten_query_params must be called with dict object")
return _flatten(obj, key_path=[])
def _flatten(obj, key_path):
if obj is None:
return []
if isinstance(obj, dict):
return _flatten_dict(obj, key_path=key_path)
if isinstance(obj, list):
return _flatten_list(obj, key_path=key_path)<|fim▁hole|>
def _is_primitive_value(obj):
if obj is None:
return False
if isinstance(obj, (list, dict)):
raise ValueError("query params may not contain repeated dicts or lists")
return True
def _flatten_value(obj, key_path):
return [(".".join(key_path), obj)]
def _flatten_dict(obj, key_path):
items = (_flatten(value, key_path=key_path + [key]) for key, value in obj.items())
return functools.reduce(operator.concat, items, [])
def _flatten_list(elems, key_path):
# Only lists of scalar values are supported.
# The name (key_path) is repeated for each value.
items = (
_flatten_value(elem, key_path=key_path)
for elem in elems
if _is_primitive_value(elem)
)
return functools.reduce(operator.concat, items, [])<|fim▁end|> | return _flatten_value(obj, key_path=key_path) |
<|file_name|>msp.py<|end_file_name|><|fim▁begin|>import sys
import cv2
import helper as hp
class MSP():
name = "MSP"
def __init__(self):
self.__patterns_num = []
self.__patterns_sym = []
self.__labels_num = []
self.__labels_sym = []
msp_num, msp_sym = "msp/num", "msp/sym"
self.__load_num_patterns(msp_num)
self.__load_sym_patterns(msp_sym)
print 'loading MSP...'
def __load_num_patterns(self, input_dir):
paths = hp.get_paths(input_dir)
self.__patterns_num = [hp.get_gray_image(input_dir, path) for path in paths]<|fim▁hole|> def __load_sym_patterns(self, input_dir):
paths = hp.get_paths(input_dir)
self.__patterns_sym = [hp.get_gray_image(input_dir, path) for path in paths]
self.__labels_sym = [hp.get_test(path, "sym")[0] for path in paths]
def __get_mode(self, mode):
if mode == "num":
return self.__labels_num, self.__patterns_num
elif mode == "sym":
return self.__labels_sym, self.__patterns_sym
def rec(self, img, mode):
tmp_max, tmp, rec = sys.maxint, 0, 0
labels, patterns = self.__get_mode(mode)
for pattern, label in zip(patterns, labels):
tmp = cv2.countNonZero(pattern - img)
if tmp < tmp_max: tmp_max, rec = tmp, label
return rec<|fim▁end|> | self.__labels_num = [hp.get_test(path, "num")[0] for path in paths]
|
<|file_name|>reed_5_2_5.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#<|fim▁hole|>from autothreadharness.harness_case import HarnessCase
class REED_5_2_5(HarnessCase):
role = HarnessCase.ROLE_REED
case = '5 2 5'
golden_devices_required = 17
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main()<|fim▁end|> |
import unittest
|
<|file_name|>1Prelude.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2012, Event Store LLP
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// Neither the name of the Event Store LLP nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
"use strict";
// these $ globals are defined by external environment
// they are redefined here to make R# like tools understand them
var _log = $log;
var _load_module = $load_module;
function log(message) {
_log("PROJECTIONS (JS): " + message);
}
function initializeModules() {
// load module load new instance of the given module every time
// this is a responsibility of prelude to manage instances of modules
var modules = _load_module('Modules');
// TODO: replace with createRequire($load_module)
modules.$load_module = _load_module;
return modules;
}
function initializeProjections() {
var projections = _load_module('Projections');
return projections;
}
var modules = initializeModules();
var projections = initializeProjections();
var eventProcessor;
function scope($on, $notify) {<|fim▁hole|> eventProcessor = projections.createEventProcessor(log, $notify);
eventProcessor.register_command_handlers($on);
function queryLog(message) {
if (typeof message === "string")
_log(message);
else
_log(JSON.stringify(message));
}
function translateOn(handlers) {
for (var name in handlers) {
if (name == 0 || name === "$init") {
eventProcessor.on_init_state(handlers[name]);
} else if (name === "$initShared") {
eventProcessor.on_init_shared_state(handlers[name]);
} else if (name === "$any") {
eventProcessor.on_any(handlers[name]);
} else if (name === "$deleted") {
eventProcessor.on_deleted_notification(handlers[name]);
} else if (name === "$created") {
eventProcessor.on_created_notification(handlers[name]);
} else {
eventProcessor.on_event(name, handlers[name]);
}
}
}
function $defines_state_transform() {
eventProcessor.$defines_state_transform();
}
function transformBy(by) {
eventProcessor.chainTransformBy(by);
return {
transformBy: transformBy,
filterBy: filterBy,
outputState: outputState,
outputTo: outputTo,
};
}
function filterBy(by) {
eventProcessor.chainTransformBy(function (s) {
var result = by(s);
return result ? s : null;
});
return {
transformBy: transformBy,
filterBy: filterBy,
outputState: outputState,
outputTo: outputTo,
};
}
function outputTo(resultStream, partitionResultStreamPattern) {
eventProcessor.$defines_state_transform();
eventProcessor.options({
resultStreamName: resultStream,
partitionResultStreamNamePattern: partitionResultStreamPattern,
});
}
function outputState() {
eventProcessor.$outputState();
return {
transformBy: transformBy,
filterBy: filterBy,
outputTo: outputTo,
};
}
function when(handlers) {
translateOn(handlers);
return {
$defines_state_transform: $defines_state_transform,
transformBy: transformBy,
filterBy: filterBy,
outputTo: outputTo,
outputState: outputState,
};
}
function foreachStream() {
eventProcessor.byStream();
return {
when: when,
};
}
function partitionBy(byHandler) {
eventProcessor.partitionBy(byHandler);
return {
when: when,
};
}
function fromCategory(category) {
eventProcessor.fromCategory(category);
return {
partitionBy: partitionBy,
foreachStream: foreachStream,
when: when,
outputState: outputState,
};
}
function fromAll() {
eventProcessor.fromAll();
return {
partitionBy: partitionBy,
when: when,
foreachStream: foreachStream,
outputState: outputState,
};
}
function fromStream(stream) {
eventProcessor.fromStream(stream);
return {
partitionBy: partitionBy,
when: when,
outputState: outputState,
};
}
function fromStreamCatalog(streamCatalog, transformer) {
eventProcessor.fromStreamCatalog(streamCatalog, transformer ? transformer : null);
return {
foreachStream: foreachStream,
};
}
function fromStreamsMatching(filter) {
eventProcessor.fromStreamsMatching(filter);
return {
when: when,
};
}
function fromStreams(streams) {
var arr = Array.isArray(streams) ? streams : arguments;
for (var i = 0; i < arr.length; i++)
eventProcessor.fromStream(arr[i]);
return {
partitionBy: partitionBy,
when: when,
outputState: outputState,
};
}
function emit(streamId, eventName, eventBody, metadata) {
var message = { streamId: streamId, eventName: eventName , body: JSON.stringify(eventBody), metadata: metadata, isJson: true };
eventProcessor.emit(message);
}
function linkTo(streamId, event, metadata) {
var message = { streamId: streamId, eventName: "$>", body: event.sequenceNumber + "@" + event.streamId, metadata: metadata, isJson: false };
eventProcessor.emit(message);
}
function copyTo(streamId, event, metadata) {
var m = {};
var em = event.metadata;
if (em)
for (var p1 in em)
if (p1.indexOf("$") !== 0 || p1 === "$correlationId")
m[p1] = em[p1];
if (metadata)
for (var p2 in metadata)
if (p2.indexOf("$") !== 0)
m[p2] = metadata[p2];
var message = { streamId: streamId, eventName: event.eventType, body: event.bodyRaw, metadata: m };
eventProcessor.emit(message);
}
function linkStreamTo(streamId, linkedStreamId, metadata) {
var message = { streamId: streamId, eventName: "$@", body: linkedStreamId, metadata: metadata, isJson: false };
eventProcessor.emit(message);
}
function options(options_object) {
eventProcessor.options(options_object);
}
return {
log: queryLog,
on_any: eventProcessor.on_any,
on_raw: eventProcessor.on_raw,
fromAll: fromAll,
fromCategory: fromCategory,
fromStream: fromStream,
fromStreams: fromStreams,
fromStreamCatalog: fromStreamCatalog,
fromStreamsMatching: fromStreamsMatching,
options: options,
emit: emit,
linkTo: linkTo,
copyTo: copyTo,
linkStreamTo: linkStreamTo,
require: modules.require,
};
};
scope;<|fim▁end|> | |
<|file_name|>index_mem.go<|end_file_name|><|fim▁begin|>package fileindex
import (
"crypto/sha1"
"fmt"
log "github.com/sirupsen/logrus"
"os"
"path"
"path/filepath"
"time"
)
func NewMemIndex(root string, id string, filter Filter) (Index, error) {
rootPath := filepath.Clean(root)
fi, err := os.Stat(rootPath)
if err != nil {
return nil, err
}
if !fi.IsDir() {
return nil, fmt.Errorf("%v is not a directory")
}
idx := &memIndex{id, rootPath, nil}
go func() {
for {
go idx.update()
<-time.After(300 * time.Second)
}
}()
return idx, nil
}
/* Index Entry */
func entryId(path string, fi os.FileInfo) string {
h := sha1.New()
h.Write([]byte(fmt.Sprintf("%v\n", path)))
h.Write([]byte(fmt.Sprintf("%v\n", fi.Size())))
h.Write([]byte(fmt.Sprintf("%v\n", fi.ModTime())))
h.Write([]byte(fmt.Sprintf("%v\n", fi.IsDir())))
return fmt.Sprintf("%x", h.Sum(nil))
}
type memEntry struct {
index *memIndex
id string
path string
parentId string
isDir bool
}
func newMemEntry(index *memIndex, path string, fi os.FileInfo, parentId string) Entry {
id := entryId(path, fi)
return &memEntry{index, id, path, parentId, fi.IsDir()}
}
func (e *memEntry) Id() string {
return e.id
}
func (e *memEntry) Name() string {
return filepath.Base(e.path)
}
func (e *memEntry) IsDir() bool {
return e.isDir
}
func (e *memEntry) ParentId() string {
return e.parentId
}
func (e *memEntry) Path() string {
return path.Join(e.index.root, e.path)
}
/* Index Entry */
type memIndex struct {
id string
root string
data *memIndexData
}
func (i *memIndex) Id() string {
return i.id
}
func (i *memIndex) Root() string {
return i.id
}
func (i *memIndex) Get(id string) (Entry, error) {
return i.data.entries[id], nil
}
func (i *memIndex) WaitForReady() error {
for {
if i.data != nil {
return nil
}
<-time.After(1 * time.Second)
}
}
func (i *memIndex) List(parent string) ([]Entry, error) {
return i.data.children[parent], nil
}
func (i *memIndex) updateDir(d *memIndexData, path string, parentId string) error {
dir, err := readDirInfo(path)
if err != nil {
return err
}
dirEntry, err := i.entryFromInfo(dir.info, dir.path, parentId)
if err != nil {
return err
}
d.add(dirEntry.(*memEntry))
<|fim▁hole|>
return nil
}
func (i *memIndex) updateChildren(d *memIndexData, dir *dirInfo, parentId string) error {
for _, fi := range dir.children {
if fi.IsDir() {
err := i.updateDir(d, filepath.Join(dir.path, fi.Name()), parentId)
if err != nil {
return err
}
} else {
fileEntry, err := i.entryFromInfo(fi, filepath.Join(dir.path, fi.Name()), parentId)
if err != nil {
return err
}
d.add(fileEntry.(*memEntry))
}
}
return nil
}
func (i *memIndex) entryFromInfo(fi os.FileInfo, path string, parentId string) (Entry, error) {
rp, err := filepath.Rel(i.root, path)
if err != nil {
return nil, fmt.Errorf("Could not determine relative path of %v in %v", path, i)
}
e := newMemEntry(i, rp, fi, parentId)
return e, nil
}
func (i *memIndex) update() {
log.Infof("Starting index scan for %v", i)
d := newMemIndexData()
dir, err := readDirInfo(i.root)
if err != nil {
log.Errorf("Error during index scan for %v: %v", i, err)
return
}
err = i.updateChildren(d, dir, "")
if err != nil {
log.Errorf("Error during index scan for %v: %v", i, err)
return
}
i.data = d
log.Infof("Finished index scan for %v. Found %v entries", i, len(i.data.entries))
}
func (i *memIndex) String() string {
return fmt.Sprintf("filepath.MemIndex(%v)", i.root)
}
/* Index Data */
type memIndexData struct {
entries map[string]Entry
children map[string][]Entry
}
func newMemIndexData() *memIndexData {
return &memIndexData{make(map[string]Entry), make(map[string][]Entry)}
}
func (d *memIndexData) add(e *memEntry) {
log.Tracef("Adding index entry %v", e.Path())
d.entries[e.Id()] = e
d.children[e.ParentId()] = append(d.children[e.ParentId()], e)
}<|fim▁end|> | err = i.updateChildren(d, dir, dirEntry.Id()) |
<|file_name|>merge.js<|end_file_name|><|fim▁begin|>// Ember.merge only supports 2 arguments
// Ember.assign isn't available in older Embers
// Ember.$.extend isn't available in Fastboot
import Ember from 'ember';
export default function(...objects) {
let merged = {};
objects.forEach(obj => {<|fim▁hole|> merged = Ember.merge(merged, obj);
});
return merged;
}<|fim▁end|> | |
<|file_name|>test_conf.py<|end_file_name|><|fim▁begin|># Case Conductor is a Test Case Management system.
# Copyright (C) 2011 uTest Inc.
#
# This file is part of Case Conductor.
#
# Case Conductor is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Case Conductor is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Case Conductor. If not, see <http://www.gnu.org/licenses/>.
from unittest2 import TestCase
from mock import patch
class FakeSettings(object):
def __init__(self, **kwargs):
self.accessed = []
self.failed = []
self.values = {}
self.values.update(kwargs)
def __getattr__(self, attr):
try:
val = self.values[attr]
except KeyError:
self.failed.append(attr)
raise AttributeError
else:
self.accessed.append(attr)
return val
class ConfigurationTest(TestCase):
@property
def cls(self):
from ccui.core.conf import Configuration
return Configuration
def test_default(self):
conf = self.cls(SOME_SETTING="some val")
settings = FakeSettings()
with patch("ccui.core.conf.settings", settings):
val = conf.SOME_SETTING
self.assertEqual(val, "some val")
self.assertEqual(settings.failed, ["SOME_SETTING"])
def test_no_default(self):
from django.core.exceptions import ImproperlyConfigured
conf = self.cls()<|fim▁hole|> conf.SOME_SETTING
self.assertEqual(settings.failed, ["SOME_SETTING"])
def test_exists(self):
conf = self.cls()
settings = FakeSettings(SOME_SETTING="a val")
with patch("ccui.core.conf.settings", settings):
val = conf.SOME_SETTING
self.assertEqual(val, "a val")
self.assertEqual(settings.accessed, ["SOME_SETTING"])
def test_default_is_fallback(self):
conf = self.cls(SOME_SETTING="default val")
settings = FakeSettings(SOME_SETTING="set val")
with patch("ccui.core.conf.settings", settings):
val = conf.SOME_SETTING
self.assertEqual(val, "set val")
self.assertEqual(settings.accessed, ["SOME_SETTING"])<|fim▁end|> |
settings = FakeSettings()
with patch("ccui.core.conf.settings", settings):
with self.assertRaises(ImproperlyConfigured): |
<|file_name|>play.js<|end_file_name|><|fim▁begin|><|fim▁hole|>game.PlayScreen = me.ScreenObject.extend({
/**
* action to perform on state change
*/
onResetEvent: function() {
// reset the score
game.data.score = 0;
me.levelDirector.loadLevel("level01");
this.resetPlayer(0, 420);
// the level i'm going to load
// levelDirector is telling it what to look at
var gameTimerManager = me.pool.pull("GameTimerManager", 0, 0, {});
me.game.world.addChild(gameTimerManager, 0);
// this adds the player to the screen
var heroDeathManager = me.pool.pull("HeroDeathManager", 0, 0, {});
me.game.world.addChild(heroDeathManager, 0);
var experienceManager = me.pool.pull("ExperienceManager", 0, 0, {});
me.game.world.addChild(experienceManager, 0);
me.input.bindKey(me.input.KEY.RIGHT, "right");
me.input.bindKey(me.input.KEY.LEFT, "left");
me.input.bindKey(me.input.KEY.SPACE, "jump");
me.input.bindKey(me.input.KEY.A, "attack");
// add our HUD to the game world
this.HUD = new game.HUD.Container();
me.game.world.addChild(this.HUD);
},
/**
* action to perform when leaving this screen (state change)
*/
onDestroyEvent: function() {
// remove the HUD from the game world
me.game.world.removeChild(this.HUD);
},
resetPlayer: function(x, y){
game.data.player = me.pool.pull("player", x, y, {});
me.game.world.addChild(game.data.player, 5);
}
});
// where the game starts<|fim▁end|> | |
<|file_name|>snapshot.go<|end_file_name|><|fim▁begin|>/* -------------------------------------------------------------------------- */
/* Copyright 2002-2020, OpenNebula Project, OpenNebula Systems */
/* */
/* Licensed under the Apache License, Version 2.0 (the "License"); you may */
/* not use this file except in compliance with the License. You may obtain */
/* a copy of the License at */
/* */
/* http://www.apache.org/licenses/LICENSE-2.0 */
/* */
/* Unless required by applicable law or agreed to in writing, software */
/* distributed under the License is distributed on an "AS IS" BASIS, */
/* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */
/* See the License for the specific language governing permissions and */
/* limitations under the License. */
/*--------------------------------------------------------------------------- */
package shared
// There is two types of snapshots, an user can take snapshot on VM, or on VM disks
// Snapshot is a common part for external snapshot structures
type Snapshot struct {
Children string `xml:"CHILDREN"` //minOccur=0
Active string `xml:"ACTIVE"` //minOccur=0
Date int `xml:"DATE"`
ID int `xml:"ID"`
Name string `xml:"NAME"` //minOccur=0
Parent int `xml:"PARENT"`
Size int `xml:"SIZE"`
}
// DiskSnapshot represent a disk snapshot
type DiskSnapshot struct {
AllowOrphans string `xml:"ALLOW_ORPHANS"`<|fim▁hole|> Snapshots []Snapshot `xml:"SNAPSHOT"`
}<|fim▁end|> | CurrentBase int `xml:"CURRENT_BASE"`
NextSnapshot int `xml:"NEXT_SNAPSHOT"` |
<|file_name|>fieldLabel.tsx<|end_file_name|><|fim▁begin|><|fim▁hole|>
import space from 'app/styles/space';
const shouldForwardProp = p => p !== 'disabled' && isPropValid(p);
const FieldLabel = styled('div', {shouldForwardProp})<{disabled?: boolean}>`
color: ${p => (!p.disabled ? p.theme.textColor : p.theme.disabled)};
display: grid;
grid-gap: ${space(0.5)};
grid-template-columns: repeat(2, max-content);
line-height: 16px;
`;
export default FieldLabel;<|fim▁end|> | import isPropValid from '@emotion/is-prop-valid';
import styled from '@emotion/styled'; |
<|file_name|>UndefinedSampleLengthException.java<|end_file_name|><|fim▁begin|>package dataset;
<|fim▁hole|> private static final long serialVersionUID = 1L;
}<|fim▁end|> |
public class UndefinedSampleLengthException extends Exception { |
<|file_name|>vistoolspline.cpp<|end_file_name|><|fim▁begin|>/************************************************************************
**
** @file vistoolspline.cpp
** @author Roman Telezhynskyi <dismine(at)gmail.com>
** @date 18 8, 2014
**
** @brief
** @copyright
** This source code is part of the Valentina project, a pattern making
** program, whose allow create and modeling patterns of clothing.
** Copyright (C) 2013-2015 Valentina project
** <https://bitbucket.org/dismine/valentina> All Rights Reserved.
**
** Valentina is free software: you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation, either version 3 of the License, or
** (at your option) any later version.
**
** Valentina is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
** GNU General Public License for more details.
**
** You should have received a copy of the GNU General Public License
** along with Valentina. If not, see <http://www.gnu.org/licenses/>.
**
*************************************************************************/
#include "vistoolspline.h"
#include <QLineF>
#include <QPainterPath>
#include <QSharedPointer>
#include <Qt>
#include <new>
#include "../ifc/ifcdef.h"
#include "../vgeometry/vabstractcurve.h"
#include "../vgeometry/vgeometrydef.h"
#include "../vgeometry/vpointf.h"
#include "../vgeometry/vspline.h"
#include "../vpatterndb/vcontainer.h"
#include "../vwidgets/vcontrolpointspline.h"
#include "../vwidgets/scalesceneitems.h"
#include "../visualization.h"
#include "vispath.h"
#include "../vmisc/vmodifierkey.h"
const int EMPTY_ANGLE = -1;
//---------------------------------------------------------------------------------------------------------------------
VisToolSpline::VisToolSpline(const VContainer *data, QGraphicsItem *parent)
: VisPath(data, parent),
object4Id(NULL_ID),
point1(nullptr),
point4(nullptr),
angle1(EMPTY_ANGLE),
angle2(EMPTY_ANGLE),
kAsm1(1),
kAsm2(1),
kCurve(1),
isLeftMousePressed(false),
p2Selected(false),
p3Selected(false),
p2(),
p3(),<|fim▁hole|>{
point1 = InitPoint(supportColor, this);
point4 = InitPoint(supportColor, this); //-V656
auto *controlPoint1 = new VControlPointSpline(1, SplinePointPosition::FirstPoint, this);
controlPoint1->hide();
controlPoints.append(controlPoint1);
auto *controlPoint2 = new VControlPointSpline(1, SplinePointPosition::LastPoint, this);
controlPoint2->hide();
controlPoints.append(controlPoint2);
}
//---------------------------------------------------------------------------------------------------------------------
VisToolSpline::~VisToolSpline()
{
emit ToolTip(QString());
}
//---------------------------------------------------------------------------------------------------------------------
void VisToolSpline::RefreshGeometry()
{
//Radius of point circle, but little bigger. Need handle with hover sizes.
const static qreal radius = ScaledRadius(SceneScale(qApp->getCurrentScene()))*1.5;
if (object1Id > NULL_ID)
{
const auto first = Visualization::data->GeometricObject<VPointF>(object1Id);
DrawPoint(point1, static_cast<QPointF>(*first), supportColor);
if (mode == Mode::Creation)
{
if (isLeftMousePressed && not p2Selected)
{
p2 = Visualization::scenePos;
controlPoints[0]->RefreshCtrlPoint(1, SplinePointPosition::FirstPoint, p2,
static_cast<QPointF>(*first));
if (not controlPoints[0]->isVisible())
{
if (QLineF(static_cast<QPointF>(*first), p2).length() > radius)
{
controlPoints[0]->show();
}
else
{
p2 = static_cast<QPointF>(*first);
}
}
}
else
{
p2Selected = true;
}
}
if (object4Id <= NULL_ID)
{
VSpline spline(*first, p2, Visualization::scenePos, VPointF(Visualization::scenePos));
spline.SetApproximationScale(m_approximationScale);
DrawPath(this, spline.GetPath(), mainColor, lineStyle, Qt::RoundCap);
}
else
{
const auto second = Visualization::data->GeometricObject<VPointF>(object4Id);
DrawPoint(point4, static_cast<QPointF>(*second), supportColor);
if (mode == Mode::Creation)
{
if (isLeftMousePressed && not p3Selected)
{
QLineF ctrlLine (static_cast<QPointF>(*second), Visualization::scenePos);
ctrlLine.setAngle(ctrlLine.angle()+180);
p3 = ctrlLine.p2();
controlPoints[1]->RefreshCtrlPoint(1, SplinePointPosition::LastPoint, p3,
static_cast<QPointF>(*second));
if (not controlPoints[1]->isVisible())
{
if (QLineF(static_cast<QPointF>(*second), p3).length() > radius)
{
controlPoints[1]->show();
}
else
{
p3 = static_cast<QPointF>(*second);
}
}
}
else
{
p3Selected = true;
}
}
if (VFuzzyComparePossibleNulls(angle1, EMPTY_ANGLE) || VFuzzyComparePossibleNulls(angle2, EMPTY_ANGLE))
{
VSpline spline(*first, p2, p3, *second);
spline.SetApproximationScale(m_approximationScale);
DrawPath(this, spline.GetPath(), mainColor, lineStyle, Qt::RoundCap);
}
else
{
VSpline spline(*first, *second, angle1, angle2, kAsm1, kAsm2, kCurve);
spline.SetApproximationScale(m_approximationScale);
DrawPath(this, spline.GetPath(), spline.DirectionArrows(), mainColor, lineStyle, Qt::RoundCap);
Visualization::toolTip = tr("Use <b>%1</b> for sticking angle!")
.arg(VModifierKey::Shift());
emit ToolTip(Visualization::toolTip);
}
}
}
}
//---------------------------------------------------------------------------------------------------------------------
void VisToolSpline::setObject4Id(const quint32 &value)
{
object4Id = value;
}
//---------------------------------------------------------------------------------------------------------------------
void VisToolSpline::SetAngle1(const qreal &value)
{
angle1 = value;
}
//---------------------------------------------------------------------------------------------------------------------
void VisToolSpline::SetAngle2(const qreal &value)
{
angle2 = value;
}
//---------------------------------------------------------------------------------------------------------------------
void VisToolSpline::SetKAsm1(const qreal &value)
{
kAsm1 = value;
}
//---------------------------------------------------------------------------------------------------------------------
void VisToolSpline::SetKAsm2(const qreal &value)
{
kAsm2 = value;
}
//---------------------------------------------------------------------------------------------------------------------
void VisToolSpline::SetKCurve(const qreal &value)
{
kCurve = value;
}
//---------------------------------------------------------------------------------------------------------------------
QPointF VisToolSpline::GetP2() const
{
return p2;
}
//---------------------------------------------------------------------------------------------------------------------
QPointF VisToolSpline::GetP3() const
{
return p3;
}
//---------------------------------------------------------------------------------------------------------------------
void VisToolSpline::MouseLeftPressed()
{
if (mode == Mode::Creation)
{
isLeftMousePressed = true;
}
}
//---------------------------------------------------------------------------------------------------------------------
void VisToolSpline::MouseLeftReleased()
{
if (mode == Mode::Creation)
{
isLeftMousePressed = false;
RefreshGeometry();
}
}<|fim▁end|> | controlPoints() |
<|file_name|>script_msg.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use AnimationState;
use CompositorEvent;
use DocumentState;
use IFrameLoadInfo;
use IFrameLoadInfoWithData;
use LayoutControlMsg;
use LoadData;
use MozBrowserEvent;
use WorkerGlobalScopeInit;
use WorkerScriptLoadOrigin;
use canvas_traits::canvas::CanvasMsg;
use devtools_traits::{ScriptToDevtoolsControlMsg, WorkerId};
use euclid::{Point2D, Size2D, TypedSize2D};
use gfx_traits::Epoch;
use ipc_channel::ipc::{IpcReceiver, IpcSender};
use msg::constellation_msg::{BrowsingContextId, FrameType, PipelineId, TraversalDirection};
use msg::constellation_msg::{Key, KeyModifiers, KeyState};
use net_traits::CoreResourceMsg;
use net_traits::request::RequestInit;
use net_traits::storage_thread::StorageType;
use servo_url::ImmutableOrigin;
use servo_url::ServoUrl;
use style_traits::CSSPixel;
use style_traits::cursor::CursorKind;
use style_traits::viewport::ViewportConstraints;
/// Messages from the layout to the constellation.
#[derive(Deserialize, Serialize)]
pub enum LayoutMsg {
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Inform the constellation of the size of the iframe's viewport.
IFrameSizes(Vec<(BrowsingContextId, TypedSize2D<f32, CSSPixel>)>),
/// Requests that the constellation inform the compositor that it needs to record
/// the time when the frame with the given ID (epoch) is painted.
PendingPaintMetric(PipelineId, Epoch),
/// Requests that the constellation inform the compositor of the a cursor change.
SetCursor(CursorKind),
/// Notifies the constellation that the viewport has been constrained in some manner
ViewportConstrained(PipelineId, ViewportConstraints),
}
/// Whether a DOM event was prevented by web content
#[derive(Deserialize, Serialize)]
pub enum EventResult {
/// Allowed by web content
DefaultAllowed,
/// Prevented by web content
DefaultPrevented,
}
/// A log entry reported to the constellation
/// We don't report all log entries, just serious ones.
/// We need a separate type for this because `LogLevel` isn't serializable.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum LogEntry {
/// Panic, with a reason and backtrace
Panic(String, String),
/// Error, with a reason
Error(String),
/// warning, with a reason
Warn(String),
}
/// Messages from the script to the constellation.
#[derive(Deserialize, Serialize)]
pub enum ScriptMsg {
/// Requests are sent to constellation and fetches are checked manually
/// for cross-origin loads
InitiateNavigateRequest(RequestInit, /* cancellation_chan */ IpcReceiver<()>),
/// Broadcast a storage event to every same-origin pipeline.
/// The strings are key, old value and new value.
BroadcastStorageEvent(StorageType, ServoUrl, Option<String>, Option<String>, Option<String>),
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(AnimationState),
/// Requests that a new 2D canvas thread be created. (This is done in the constellation because
/// 2D canvases may use the GPU and we don't want to give untrusted content access to the GPU.)
CreateCanvasPaintThread(Size2D<i32>, IpcSender<IpcSender<CanvasMsg>>),
/// Notifies the constellation that this frame has received focus.
Focus,
/// Forward an event that was sent to the parent window.
ForwardEvent(PipelineId, CompositorEvent),
/// Requests that the constellation retrieve the current contents of the clipboard
GetClipboardContents(IpcSender<String>),<|fim▁hole|> /// <head> tag finished parsing
HeadParsed,
/// All pending loads are complete, and the `load` event for this pipeline
/// has been dispatched.
LoadComplete,
/// A new load has been requested, with an option to replace the current entry once loaded
/// instead of adding a new entry.
LoadUrl(LoadData, bool),
/// Abort loading after sending a LoadUrl message.
AbortLoadUrl,
/// Post a message to the currently active window of a given browsing context.
PostMessage(BrowsingContextId, Option<ImmutableOrigin>, Vec<u8>),
/// Dispatch a mozbrowser event to the parent of a mozbrowser iframe.
MozBrowserEvent(PipelineId, MozBrowserEvent),
/// HTMLIFrameElement Forward or Back traversal.
TraverseHistory(TraversalDirection),
/// Gets the length of the joint session history from the constellation.
JointSessionHistoryLength(IpcSender<u32>),
/// Favicon detected
NewFavicon(ServoUrl),
/// Status message to be displayed in the chrome, eg. a link URL on mouseover.
NodeStatus(Option<String>),
/// Notification that this iframe should be removed.
/// Returns a list of pipelines which were closed.
RemoveIFrame(BrowsingContextId, IpcSender<Vec<PipelineId>>),
/// Change pipeline visibility
SetVisible(bool),
/// Notifies constellation that an iframe's visibility has been changed.
VisibilityChangeComplete(bool),
/// A load has been requested in an IFrame.
ScriptLoadedURLInIFrame(IFrameLoadInfoWithData),
/// A load of the initial `about:blank` has been completed in an IFrame.
ScriptNewIFrame(IFrameLoadInfo, IpcSender<LayoutControlMsg>),
/// Requests that the constellation set the contents of the clipboard
SetClipboardContents(String),
/// Mark a new document as active
ActivateDocument,
/// Set the document state for a pipeline (used by screenshot / reftests)
SetDocumentState(DocumentState),
/// Update the pipeline Url, which can change after redirections.
SetFinalUrl(ServoUrl),
/// Check if an alert dialog box should be presented
Alert(String, IpcSender<bool>),
/// Set title of current page
/// <https://html.spec.whatwg.org/multipage/#document.title>
SetTitle(Option<String>),
/// Send a key event
SendKeyEvent(Option<char>, Key, KeyState, KeyModifiers),
/// Get Window Informations size and position
GetClientWindow(IpcSender<(Size2D<u32>, Point2D<i32>)>),
/// Move the window to a point
MoveTo(Point2D<i32>),
/// Resize the window to size
ResizeTo(Size2D<u32>),
/// Script has handled a touch event, and either prevented or allowed default actions.
TouchEventProcessed(EventResult),
/// A log entry, with the top-level browsing context id and thread name
LogEntry(Option<String>, LogEntry),
/// Notifies the constellation that this pipeline has exited.
PipelineExited,
/// Send messages from postMessage calls from serviceworker
/// to constellation for storing in service worker manager
ForwardDOMMessage(DOMMessage, ServoUrl),
/// Store the data required to activate a service worker for the given scope
RegisterServiceWorker(ScopeThings, ServoUrl),
/// Enter or exit fullscreen
SetFullscreenState(bool),
/// Get the screen size (pixel)
GetScreenSize(IpcSender<(Size2D<u32>)>),
/// Get the available screen size (pixel)
GetScreenAvailSize(IpcSender<(Size2D<u32>)>),
/// Requests that the compositor shut down.
Exit,
}
/// Entities required to spawn service workers
#[derive(Clone, Deserialize, Serialize)]
pub struct ScopeThings {
/// script resource url
pub script_url: ServoUrl,
/// network load origin of the resource
pub worker_load_origin: WorkerScriptLoadOrigin,
/// base resources required to create worker global scopes
pub init: WorkerGlobalScopeInit,
/// the port to receive devtools message from
pub devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>,
/// service worker id
pub worker_id: WorkerId,
}
/// Message that gets passed to service worker scope on postMessage
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct DOMMessage(pub Vec<u8>);
/// Channels to allow service worker manager to communicate with constellation and resource thread
pub struct SWManagerSenders {
/// sender for communicating with constellation
pub swmanager_sender: IpcSender<SWManagerMsg>,
/// sender for communicating with resource thread
pub resource_sender: IpcSender<CoreResourceMsg>,
}
/// Messages sent to Service Worker Manager thread
#[derive(Deserialize, Serialize)]
pub enum ServiceWorkerMsg {
/// Message to register the service worker
RegisterServiceWorker(ScopeThings, ServoUrl),
/// Timeout message sent by active service workers
Timeout(ServoUrl),
/// Message sent by constellation to forward to a running service worker
ForwardDOMMessage(DOMMessage, ServoUrl),
/// Exit the service worker manager
Exit,
}
/// Messages outgoing from the Service Worker Manager thread to constellation
#[derive(Deserialize, Serialize)]
pub enum SWManagerMsg {
/// Provide the constellation with a means of communicating with the Service Worker Manager
OwnSender(IpcSender<ServiceWorkerMsg>),
}<|fim▁end|> | /// Get the browsing context id for a given pipeline.
GetBrowsingContextId(PipelineId, IpcSender<Option<BrowsingContextId>>),
/// Get the parent info for a given pipeline.
GetParentInfo(PipelineId, IpcSender<Option<(PipelineId, FrameType)>>), |
<|file_name|>word_break.cpp<|end_file_name|><|fim▁begin|>#include "common_header.h"
namespace {
using ArrayType = std::vector<std::string>;
using DictType = std::unordered_set<std::string_view>;
/** Word Break Problem
*
* @reference Word Break Problem | DP-32
* https://www.geeksforgeeks.org/word-break-problem-dp-32/
*
* Given an input string and a dictionary of words, find out if the input string can be
* segmented into a space-separated sequence of dictionary words.
*/
bool WordBreak(const std::string_view a_string, const DictType &words) {
std::vector<bool> word_breaks(a_string.size() + 1, false);
word_breaks[0] = true;
for (std::size_t i = 0; i <= a_string.size(); ++i) {
if (word_breaks[i]) {
for (auto j = i; j <= a_string.size(); ++j) {
if (word_breaks[j] == false and words.find(a_string.substr(i, j - i)) != words.cend()) {
word_breaks[j] = true;
}
}
}
}
return word_breaks[a_string.size()];<|fim▁hole|>}
/**
* @reference Word Break Problem | (Trie solution)
* https://www.geeksforgeeks.org/word-break-problem-trie-solution/
*
* @note This solution replaces unordered_set with Trie.
*/
/**
* @reference Word Break Problem using Backtracking
* https://www.geeksforgeeks.org/word-break-problem-using-backtracking/
*
* Given a valid sentence without any spaces between the words and a dictionary of valid
* English words, find all possible ways to break the sentence in individual dictionary
* words.
*/
void AllWayToWordBreak(const std::string &a_string, const DictType &words,
const std::string &prefix, ArrayType &results) {
for (std::size_t i = 1; i <= a_string.size(); ++i) {
const auto substring = a_string.substr(0, i);
if (words.find(substring) != words.cend()) {
const auto new_prefix = prefix + substring;
if (i == a_string.size()) {
results.emplace_back(new_prefix);
} else {
AllWayToWordBreak(
a_string.substr(i, a_string.size() - i), words, new_prefix + " ", results);
}
}
}
}
inline auto
AllWayToWordBreak(const std::string &a_string, const DictType &words) {
ArrayType results;
AllWayToWordBreak(a_string, words, "", results);
return results;
}
/**
* @reference Minimum Word Break
* https://www.geeksforgeeks.org/minimum-word-break/
*
* Given a string s, break s such that every substring of the partition can be found in
* the dictionary. Return the minimum break needed.
*/
bool MinimumWordBreak(const std::string_view a_string, const DictType &words) {
std::vector<int> word_breaks(a_string.size() + 1, 0);
word_breaks[0] = 1;
for (std::size_t i = 0; i <= a_string.size(); ++i) {
if (word_breaks[i]) {
const auto number_breaks = word_breaks[i] + 1;
for (auto j = i; j <= a_string.size(); ++j) {
if (words.find(a_string.substr(i, j - i)) != words.cend()) {
if ((not word_breaks[j]) or (word_breaks[i] + 1 < word_breaks[j])) {
word_breaks[j] = number_breaks;
}
}
}
}
}
return word_breaks[a_string.size()] - 1;
}
/**
* @reference Gayle Laakmann McDowell. Cracking the Coding Interview, Fifth Edition.
* Questions 17.14.
*
* Given a dictionary (a list of words), design an algorithm to find the optimal way of
* "unconcatenating" a sequence of words. In this case, "optimal" is defined to be the
* parsing which minimizes the number of unrecognized sequences of characters.
*/
}//namespace
const DictType DICTIONARY1 =
{"mobile", "samsung", "sam", "sung", "man", "mango", "icecream", "and", "go", "i", "like", "ice", "cream"};
THE_BENCHMARK(WordBreak, "ilike", DICTIONARY1);
SIMPLE_TEST(WordBreak, TestSAMPLE0, true, "", DICTIONARY1);
SIMPLE_TEST(WordBreak, TestSAMPLE1, true, "ilike", DICTIONARY1);
SIMPLE_TEST(WordBreak, TestSAMPLE2, true, "ilikesamsung", DICTIONARY1);
SIMPLE_TEST(WordBreak, TestSAMPLE3, true, "iiiiiiii", DICTIONARY1);
SIMPLE_TEST(WordBreak, TestSAMPLE4, true, "ilikelikeimangoiii", DICTIONARY1);
SIMPLE_TEST(WordBreak, TestSAMPLE5, true, "samsungandmango", DICTIONARY1);
SIMPLE_TEST(WordBreak, TestSAMPLE6, false, "samsungandmangok", DICTIONARY1);
const ArrayType EXPECTED1 =
{"i like ice cream and man go", "i like ice cream and mango", "i like icecream and man go", "i like icecream and mango"};
const ArrayType EXPECTED2 =
{"i like sam sung mobile", "i like samsung mobile"};
THE_BENCHMARK(AllWayToWordBreak, "ilikeicecreamandmango", DICTIONARY1);
SIMPLE_TEST(AllWayToWordBreak, TestSAMPLE1, EXPECTED1,
"ilikeicecreamandmango", DICTIONARY1);
SIMPLE_TEST(AllWayToWordBreak, TestSAMPLE2, EXPECTED2,
"ilikesamsungmobile", DICTIONARY1);
const DictType DICTIONARY2 =
{"Cat", "Mat", "Ca", "tM", "at", "C", "Dog", "og", "Do"};
THE_BENCHMARK(MinimumWordBreak, "CatMat", DICTIONARY1);
SIMPLE_TEST(MinimumWordBreak, TestSAMPLE0, 0, "", DICTIONARY2);
SIMPLE_TEST(MinimumWordBreak, TestSAMPLE1, 1, "CatMat", DICTIONARY2);
SIMPLE_TEST(MinimumWordBreak, TestSAMPLE2, 2, "CatMatat", DICTIONARY2);
SIMPLE_TEST(MinimumWordBreak, TestSAMPLE3, -1, "samsungandmangok", DICTIONARY2);<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod actor_state;
mod character;
mod openable;
mod respawnable;
mod stats_item;
pub use actor_state::*;<|fim▁hole|>pub use openable::*;
pub use respawnable::*;
pub use stats_item::*;<|fim▁end|> | pub use character::*; |
<|file_name|>runtests.py<|end_file_name|><|fim▁begin|>#
# (c) Simon Marlow 2002
#
import sys
import os
import string
import getopt
import platform
import time
import re
from testutil import *
from testglobals import *
# Readline sometimes spews out ANSI escapes for some values of TERM,
# which result in test failures. Thus set TERM to a nice, simple, safe
# value.
os.environ['TERM'] = 'vt100'
if sys.platform == "cygwin":
cygwin = True
else:
cygwin = False
global config
config = getConfig() # get it from testglobals
# -----------------------------------------------------------------------------
# cmd-line options
long_options = [
"config=", # config file
"rootdir=", # root of tree containing tests (default: .)
"output-summary=", # file in which to save the (human-readable) summary
"only=", # just this test (can be give multiple --only= flags)
"way=", # just this way
"skipway=", # skip this way
"threads=", # threads to run simultaneously
]
opts, args = getopt.getopt(sys.argv[1:], "e:", long_options)
for opt,arg in opts:
if opt == '--config':
execfile(arg)
# -e is a string to execute from the command line. For example:
# testframe -e 'config.compiler=ghc-5.04'
if opt == '-e':
exec arg
if opt == '--rootdir':
config.rootdirs.append(arg)
if opt == '--output-summary':
config.output_summary = arg
if opt == '--only':
config.only.append(arg)
if opt == '--way':
if (arg not in config.run_ways and arg not in config.compile_ways and arg not in config.other_ways):
sys.stderr.write("ERROR: requested way \'" +
arg + "\' does not exist\n")
sys.exit(1)
config.cmdline_ways = [arg] + config.cmdline_ways
if (arg in config.other_ways):
config.run_ways = [arg] + config.run_ways
config.compile_ways = [arg] + config.compile_ways
if opt == '--skipway':
if (arg not in config.run_ways and arg not in config.compile_ways and arg not in config.other_ways):
sys.stderr.write("ERROR: requested way \'" +
arg + "\' does not exist\n")
sys.exit(1)
config.other_ways = filter(neq(arg), config.other_ways)
config.run_ways = filter(neq(arg), config.run_ways)<|fim▁hole|> config.threads = int(arg)
config.use_threads = 1
if config.use_threads == 1:
# Trac #1558 says threads don't work in python 2.4.4, but do
# in 2.5.2. Probably >= 2.5 is sufficient, but let's be
# conservative here.
# Some versions of python have things like '1c1' for some of
# these components (see trac #3091), but int() chokes on the
# 'c1', so we drop it.
(maj, min, pat) = platform.python_version_tuple()
# We wrap maj, min, and pat in str() to work around a bug in python
# 2.6.1
maj = int(re.sub('[^0-9].*', '', str(maj)))
min = int(re.sub('[^0-9].*', '', str(min)))
pat = int(re.sub('[^0-9].*', '', str(pat)))
if (maj, min, pat) < (2, 5, 2):
print "Warning: Ignoring request to use threads as python version < 2.5.2"
config.use_threads = 0
if windows:
print "Warning: Ignoring request to use threads as running on Windows"
config.use_threads = 0
# Try to use UTF8
if windows:
import ctypes
if cygwin:
# Is this actually right? Which calling convention does it use?
# As of the time of writing, ctypes.windll doesn't exist in the
# cygwin python, anyway.
mydll = ctypes.cdll
else:
mydll = ctypes.windll
# This actually leaves the terminal in codepage 65001 (UTF8) even
# after python terminates. We ought really remember the old codepage
# and set it back.
if mydll.kernel32.SetConsoleCP(65001) == 0:
raise Exception("Failure calling SetConsoleCP(65001)")
if mydll.kernel32.SetConsoleOutputCP(65001) == 0:
raise Exception("Failure calling SetConsoleOutputCP(65001)")
else:
# Try and find a utf8 locale to use
# First see if we already have a UTF8 locale
h = os.popen('locale | grep LC_CTYPE | grep -i utf', 'r')
v = h.read()
h.close()
if v == '':
# We don't, so now see if 'locale -a' works
h = os.popen('locale -a', 'r')
v = h.read()
h.close()
if v != '':
# If it does then use the first utf8 locale that is available
h = os.popen('locale -a | grep -i "utf8\|utf-8" 2>/dev/null', 'r')
v = h.readline().strip()
h.close()
if v != '':
os.environ['LC_ALL'] = v
print "setting LC_ALL to", v
else:
print 'WARNING: No UTF8 locale found.'
print 'You may get some spurious test failures.'
# This has to come after arg parsing as the args can change the compiler
get_compiler_info()
# Can't import this earlier as we need to know if threading will be
# enabled or not
from testlib import *
# On Windows we need to set $PATH to include the paths to all the DLLs
# in order for the dynamic library tests to work.
if windows or darwin:
pkginfo = getStdout([config.ghc_pkg, 'dump'])
topdir = re.sub('\\\\','/',getStdout([config.compiler, '--print-libdir'])).rstrip()
for line in pkginfo.split('\n'):
if line.startswith('library-dirs:'):
path = line.rstrip()
path = re.sub('^library-dirs: ', '', path)
path = re.sub('\\$topdir', topdir, path)
if path.startswith('"'):
path = re.sub('^"(.*)"$', '\\1', path)
path = re.sub('\\\\(.)', '\\1', path)
if windows:
if cygwin:
# On cygwin we can't put "c:\foo" in $PATH, as : is a
# field separator. So convert to /cygdrive/c/foo instead.
# Other pythons use ; as the separator, so no problem.
path = re.sub('([a-zA-Z]):', '/cygdrive/\\1', path)
path = re.sub('\\\\', '/', path)
os.environ['PATH'] = os.pathsep.join([path, os.environ.get("PATH", "")])
else:
# darwin
os.environ['DYLD_LIBRARY_PATH'] = os.pathsep.join([path, os.environ.get("DYLD_LIBRARY_PATH", "")])
global testopts_local
testopts_local.x = TestOptions()
global thisdir_testopts
thisdir_testopts = getThisDirTestOpts()
if config.use_threads:
t.lock = threading.Lock()
t.thread_pool = threading.Condition(t.lock)
t.running_threads = 0
# if timeout == -1 then we try to calculate a sensible value
if config.timeout == -1:
config.timeout = int(read_no_crs(config.top + '/timeout/calibrate.out'))
print 'Timeout is ' + str(config.timeout)
# -----------------------------------------------------------------------------
# The main dude
if config.rootdirs == []:
config.rootdirs = ['.']
t_files = findTFiles(config.rootdirs)
print 'Found', len(t_files), '.T files...'
t = getTestRun()
# Avoid cmd.exe built-in 'date' command on Windows
if not windows:
t.start_time = chop(os.popen('date').read())
else:
t.start_time = 'now'
print 'Beginning test run at', t.start_time
# set stdout to unbuffered (is this the best way to do it?)
sys.stdout.flush()
sys.stdout = os.fdopen(sys.__stdout__.fileno(), "w", 0)
# First collect all the tests to be run
for file in t_files:
print '====> Scanning', file
newTestDir(os.path.dirname(file))
try:
execfile(file)
except:
print '*** framework failure: found an error while executing ', file, ':'
t.n_framework_failures = t.n_framework_failures + 1
traceback.print_exc()
# Now run all the tests
if config.use_threads:
t.running_threads=0
for oneTest in allTests:
oneTest()
if config.use_threads:
t.thread_pool.acquire()
while t.running_threads>0:
t.thread_pool.wait()
t.thread_pool.release()
summary(t, sys.stdout)
if config.output_summary != '':
summary(t, open(config.output_summary, 'w'))
sys.exit(0)<|fim▁end|> | config.compile_ways = filter(neq(arg), config.compile_ways)
if opt == '--threads': |
<|file_name|>HTMLAnchorElement.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 1999 Lars Knoll ([email protected])
* (C) 1999 Antti Koivisto ([email protected])
* (C) 2000 Simon Hausmann <[email protected]>
* Copyright (C) 2003, 2006, 2007, 2008, 2009, 2010 Apple Inc. All rights reserved.
* (C) 2006 Graham Dennis ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "config.h"
#include "HTMLAnchorElement.h"
#ifdef ARTEMIS
#include "ScriptEventListener.h"
#endif
#include "Attribute.h"
#include "DNS.h"
#include "EventNames.h"
#include "Frame.h"
#include "FrameLoaderClient.h"
#include "FrameLoaderTypes.h"
#include "HTMLImageElement.h"
#include "HTMLNames.h"
#include "HTMLParserIdioms.h"
#include "KeyboardEvent.h"
#include "MouseEvent.h"
#include "Page.h"
#include "PingLoader.h"
#include "RenderImage.h"
#include "SecurityOrigin.h"
#include "SecurityPolicy.h"
#include "Settings.h"
namespace WebCore {
using namespace HTMLNames;
HTMLAnchorElement::HTMLAnchorElement(const QualifiedName& tagName, Document* document)
: HTMLElement(tagName, document)
, m_hasRootEditableElementForSelectionOnMouseDown(false)
, m_wasShiftKeyDownOnMouseDown(false)
, m_linkRelations(0)
, m_cachedVisitedLinkHash(0)
{
}
PassRefPtr<HTMLAnchorElement> HTMLAnchorElement::create(Document* document)
{
return adoptRef(new HTMLAnchorElement(aTag, document));
}
PassRefPtr<HTMLAnchorElement> HTMLAnchorElement::create(const QualifiedName& tagName, Document* document)
{
return adoptRef(new HTMLAnchorElement(tagName, document));
}
HTMLAnchorElement::~HTMLAnchorElement()
{
clearRootEditableElementForSelectionOnMouseDown();
}
// This function does not allow leading spaces before the port number.
static unsigned parsePortFromStringPosition(const String& value, unsigned portStart, unsigned& portEnd)
{
portEnd = portStart;
while (isASCIIDigit(value[portEnd]))
++portEnd;
return value.substring(portStart, portEnd - portStart).toUInt();
}
bool HTMLAnchorElement::supportsFocus() const
{
if (rendererIsEditable())
return HTMLElement::supportsFocus();
// If not a link we should still be able to focus the element if it has tabIndex.
return isLink() || HTMLElement::supportsFocus();
}
bool HTMLAnchorElement::isMouseFocusable() const
{
// Anchor elements should be mouse focusable, https://bugs.webkit.org/show_bug.cgi?id=26856
#if !PLATFORM(GTK) && !PLATFORM(QT) && !PLATFORM(EFL)
if (isLink())
// Only allow links with tabIndex or contentEditable to be mouse focusable.
return HTMLElement::supportsFocus();
#endif
// Allow tab index etc to control focus.
return HTMLElement::isMouseFocusable();
}
bool HTMLAnchorElement::isKeyboardFocusable(KeyboardEvent* event) const
{
if (!isLink())
return HTMLElement::isKeyboardFocusable(event);
if (!isFocusable())
return false;
if (!document()->frame())
return false;
if (!document()->frame()->eventHandler()->tabsToLinks(event))
return false;
return hasNonEmptyBoundingBox();
}
static void appendServerMapMousePosition(String& url, Event* event)
{
if (!event->isMouseEvent())
return;
ASSERT(event->target());
Node* target = event->target()->toNode();
ASSERT(target);
if (!target->hasTagName(imgTag))
return;
HTMLImageElement* imageElement = static_cast<HTMLImageElement*>(event->target()->toNode());
if (!imageElement || !imageElement->isServerMap())
return;
RenderImage* renderer = toRenderImage(imageElement->renderer());
if (!renderer)
return;
// FIXME: This should probably pass true for useTransforms.
FloatPoint absolutePosition = renderer->absoluteToLocal(FloatPoint(static_cast<MouseEvent*>(event)->pageX(), static_cast<MouseEvent*>(event)->pageY()));
int x = absolutePosition.x();
int y = absolutePosition.y();
url += "?";
url += String::number(x);
url += ",";
url += String::number(y);
}
void HTMLAnchorElement::defaultEventHandler(Event* event)
{
if (isLink()) {
if (focused() && isEnterKeyKeydownEvent(event) && treatLinkAsLiveForEventType(NonMouseEvent)) {
event->setDefaultHandled();
dispatchSimulatedClick(event);
return;
}
if (isLinkClick(event) && treatLinkAsLiveForEventType(eventType(event))) {
handleClick(event);
return;
}
if (rendererIsEditable()) {
// This keeps track of the editable block that the selection was in (if it was in one) just before the link was clicked
// for the LiveWhenNotFocused editable link behavior
if (event->type() == eventNames().mousedownEvent && event->isMouseEvent() && static_cast<MouseEvent*>(event)->button() != RightButton && document()->frame() && document()->frame()->selection()) {
setRootEditableElementForSelectionOnMouseDown(document()->frame()->selection()->rootEditableElement());
m_wasShiftKeyDownOnMouseDown = static_cast<MouseEvent*>(event)->shiftKey();
} else if (event->type() == eventNames().mouseoverEvent) {
// These are cleared on mouseover and not mouseout because their values are needed for drag events,
// but drag events happen after mouse out events.
clearRootEditableElementForSelectionOnMouseDown();
m_wasShiftKeyDownOnMouseDown = false;
}
}
}
HTMLElement::defaultEventHandler(event);
}
void HTMLAnchorElement::setActive(bool down, bool pause)
{
if (rendererIsEditable()) {
EditableLinkBehavior editableLinkBehavior = EditableLinkDefaultBehavior;
if (Settings* settings = document()->settings())
editableLinkBehavior = settings->editableLinkBehavior();
switch (editableLinkBehavior) {
default:
case EditableLinkDefaultBehavior:
case EditableLinkAlwaysLive:
break;
case EditableLinkNeverLive:
return;
// Don't set the link to be active if the current selection is in the same editable block as
// this link
case EditableLinkLiveWhenNotFocused:
if (down && document()->frame() && document()->frame()->selection()->rootEditableElement() == rootEditableElement())
return;
break;
case EditableLinkOnlyLiveWithShiftKey:
return;
}
}
ContainerNode::setActive(down, pause);
}
void HTMLAnchorElement::parseAttribute(Attribute* attr)
{
if (attr->name() == hrefAttr) {
bool wasLink = isLink();
setIsLink(!attr->isNull());
if (wasLink != isLink())
setNeedsStyleRecalc();
if (isLink()) {
String parsedURL = stripLeadingAndTrailingHTMLSpaces(attr->value());
if (document()->isDNSPrefetchEnabled()) {
if (protocolIs(parsedURL, "http") || protocolIs(parsedURL, "https") || parsedURL.startsWith("//"))
prefetchDNS(document()->completeURL(parsedURL).host());
}
if (document()->page() && !document()->page()->javaScriptURLsAreAllowed() && protocolIsJavaScript(parsedURL)) {
clearIsLink();
// FIXME: This is horribly factored.
if (Attribute* hrefAttribute = getAttributeItem(hrefAttr))
hrefAttribute->setValue(nullAtom);
}
#ifdef ARTEMIS
else if(protocolIsJavaScript(parsedURL) && !hasEventListeners(eventNames().clickEvent)) {
// Don't set the onclick event handler if it already has one.
setAttributeEventListener(eventNames().clickEvent, createAttributeEventListener(this, attr));
}
#endif
}
invalidateCachedVisitedLinkHash();
} else if (attr->name() == nameAttr || attr->name() == titleAttr) {
// Do nothing.
} else if (attr->name() == relAttr)
setRel(attr->value());
else
HTMLElement::parseAttribute(attr);
}
void HTMLAnchorElement::accessKeyAction(bool sendMouseEvents)
{
// send the mouse button events if the caller specified sendMouseEvents
dispatchSimulatedClick(0, sendMouseEvents);
}
bool HTMLAnchorElement::isURLAttribute(Attribute *attr) const
{
return attr->name() == hrefAttr || HTMLElement::isURLAttribute(attr);
}
bool HTMLAnchorElement::canStartSelection() const
{
// FIXME: We probably want this same behavior in SVGAElement too
if (!isLink())
return HTMLElement::canStartSelection();
return rendererIsEditable();
}
bool HTMLAnchorElement::draggable() const
{
// Should be draggable if we have an href attribute.
const AtomicString& value = getAttribute(draggableAttr);
if (equalIgnoringCase(value, "true"))
return true;
if (equalIgnoringCase(value, "false"))
return false;
return hasAttribute(hrefAttr);
}
KURL HTMLAnchorElement::href() const
{
return document()->completeURL(stripLeadingAndTrailingHTMLSpaces(getAttribute(hrefAttr)));
}
void HTMLAnchorElement::setHref(const AtomicString& value)
{
setAttribute(hrefAttr, value);
}
bool HTMLAnchorElement::hasRel(uint32_t relation) const
{
return m_linkRelations & relation;
}
void HTMLAnchorElement::setRel(const String& value)
{
m_linkRelations = 0;
SpaceSplitString newLinkRelations(value, true);
// FIXME: Add link relations as they are implemented
if (newLinkRelations.contains("noreferrer"))
m_linkRelations |= RelationNoReferrer;
}
const AtomicString& HTMLAnchorElement::name() const
{
return getNameAttribute();
}
short HTMLAnchorElement::tabIndex() const<|fim▁hole|>}
String HTMLAnchorElement::target() const
{
return getAttribute(targetAttr);
}
String HTMLAnchorElement::hash() const
{
String fragmentIdentifier = href().fragmentIdentifier();
return fragmentIdentifier.isEmpty() ? emptyString() : "#" + fragmentIdentifier;
}
void HTMLAnchorElement::setHash(const String& value)
{
KURL url = href();
if (value[0] == '#')
url.setFragmentIdentifier(value.substring(1));
else
url.setFragmentIdentifier(value);
setHref(url.string());
}
String HTMLAnchorElement::host() const
{
const KURL& url = href();
if (url.hostEnd() == url.pathStart())
return url.host();
if (isDefaultPortForProtocol(url.port(), url.protocol()))
return url.host();
return url.host() + ":" + String::number(url.port());
}
void HTMLAnchorElement::setHost(const String& value)
{
if (value.isEmpty())
return;
KURL url = href();
if (!url.canSetHostOrPort())
return;
size_t separator = value.find(':');
if (!separator)
return;
if (separator == notFound)
url.setHostAndPort(value);
else {
unsigned portEnd;
unsigned port = parsePortFromStringPosition(value, separator + 1, portEnd);
if (!port) {
// http://dev.w3.org/html5/spec/infrastructure.html#url-decomposition-idl-attributes
// specifically goes against RFC 3986 (p3.2) and
// requires setting the port to "0" if it is set to empty string.
url.setHostAndPort(value.substring(0, separator + 1) + "0");
} else {
if (isDefaultPortForProtocol(port, url.protocol()))
url.setHostAndPort(value.substring(0, separator));
else
url.setHostAndPort(value.substring(0, portEnd));
}
}
setHref(url.string());
}
String HTMLAnchorElement::hostname() const
{
return href().host();
}
void HTMLAnchorElement::setHostname(const String& value)
{
// Before setting new value:
// Remove all leading U+002F SOLIDUS ("/") characters.
unsigned i = 0;
unsigned hostLength = value.length();
while (value[i] == '/')
i++;
if (i == hostLength)
return;
KURL url = href();
if (!url.canSetHostOrPort())
return;
url.setHost(value.substring(i));
setHref(url.string());
}
String HTMLAnchorElement::pathname() const
{
return href().path();
}
void HTMLAnchorElement::setPathname(const String& value)
{
KURL url = href();
if (!url.canSetPathname())
return;
if (value[0] == '/')
url.setPath(value);
else
url.setPath("/" + value);
setHref(url.string());
}
String HTMLAnchorElement::port() const
{
if (href().hasPort())
return String::number(href().port());
return emptyString();
}
void HTMLAnchorElement::setPort(const String& value)
{
KURL url = href();
if (!url.canSetHostOrPort())
return;
// http://dev.w3.org/html5/spec/infrastructure.html#url-decomposition-idl-attributes
// specifically goes against RFC 3986 (p3.2) and
// requires setting the port to "0" if it is set to empty string.
unsigned port = value.toUInt();
if (isDefaultPortForProtocol(port, url.protocol()))
url.removePort();
else
url.setPort(port);
setHref(url.string());
}
String HTMLAnchorElement::protocol() const
{
return href().protocol() + ":";
}
void HTMLAnchorElement::setProtocol(const String& value)
{
KURL url = href();
url.setProtocol(value);
setHref(url.string());
}
String HTMLAnchorElement::search() const
{
String query = href().query();
return query.isEmpty() ? emptyString() : "?" + query;
}
String HTMLAnchorElement::origin() const
{
RefPtr<SecurityOrigin> origin = SecurityOrigin::create(href());
return origin->toString();
}
void HTMLAnchorElement::setSearch(const String& value)
{
KURL url = href();
String newSearch = (value[0] == '?') ? value.substring(1) : value;
// Make sure that '#' in the query does not leak to the hash.
url.setQuery(newSearch.replace('#', "%23"));
setHref(url.string());
}
String HTMLAnchorElement::text()
{
return innerText();
}
String HTMLAnchorElement::toString() const
{
return href().string();
}
bool HTMLAnchorElement::isLiveLink() const
{
return isLink() && treatLinkAsLiveForEventType(m_wasShiftKeyDownOnMouseDown ? MouseEventWithShiftKey : MouseEventWithoutShiftKey);
}
void HTMLAnchorElement::sendPings(const KURL& destinationURL)
{
if (!hasAttribute(pingAttr) || !document()->settings()->hyperlinkAuditingEnabled())
return;
SpaceSplitString pingURLs(getAttribute(pingAttr), false);
for (unsigned i = 0; i < pingURLs.size(); i++)
PingLoader::sendPing(document()->frame(), document()->completeURL(pingURLs[i]), destinationURL);
}
void HTMLAnchorElement::handleClick(Event* event)
{
event->setDefaultHandled();
Frame* frame = document()->frame();
if (!frame)
return;
String url = stripLeadingAndTrailingHTMLSpaces(fastGetAttribute(hrefAttr));
appendServerMapMousePosition(url, event);
KURL kurl = document()->completeURL(url);
#if ENABLE(DOWNLOAD_ATTRIBUTE)
if (hasAttribute(downloadAttr)) {
ResourceRequest request(kurl);
if (!hasRel(RelationNoReferrer)) {
String referrer = SecurityPolicy::generateReferrerHeader(document()->referrerPolicy(), kurl, frame->loader()->outgoingReferrer());
if (!referrer.isEmpty())
request.setHTTPReferrer(referrer);
frame->loader()->addExtraFieldsToMainResourceRequest(request);
}
frame->loader()->client()->startDownload(request, fastGetAttribute(downloadAttr));
} else
#endif
frame->loader()->urlSelected(kurl, target(), event, false, false, hasRel(RelationNoReferrer) ? NeverSendReferrer : MaybeSendReferrer);
sendPings(kurl);
}
HTMLAnchorElement::EventType HTMLAnchorElement::eventType(Event* event)
{
if (!event->isMouseEvent())
return NonMouseEvent;
return static_cast<MouseEvent*>(event)->shiftKey() ? MouseEventWithShiftKey : MouseEventWithoutShiftKey;
}
bool HTMLAnchorElement::treatLinkAsLiveForEventType(EventType eventType) const
{
if (!rendererIsEditable())
return true;
Settings* settings = document()->settings();
if (!settings)
return true;
switch (settings->editableLinkBehavior()) {
case EditableLinkDefaultBehavior:
case EditableLinkAlwaysLive:
return true;
case EditableLinkNeverLive:
return false;
// If the selection prior to clicking on this link resided in the same editable block as this link,
// and the shift key isn't pressed, we don't want to follow the link.
case EditableLinkLiveWhenNotFocused:
return eventType == MouseEventWithShiftKey || (eventType == MouseEventWithoutShiftKey && rootEditableElementForSelectionOnMouseDown() != rootEditableElement());
case EditableLinkOnlyLiveWithShiftKey:
return eventType == MouseEventWithShiftKey;
}
ASSERT_NOT_REACHED();
return false;
}
bool isEnterKeyKeydownEvent(Event* event)
{
return event->type() == eventNames().keydownEvent && event->isKeyboardEvent() && static_cast<KeyboardEvent*>(event)->keyIdentifier() == "Enter";
}
bool isMiddleMouseButtonEvent(Event* event)
{
return event->isMouseEvent() && static_cast<MouseEvent*>(event)->button() == MiddleButton;
}
bool isLinkClick(Event* event)
{
return event->type() == eventNames().clickEvent && (!event->isMouseEvent() || static_cast<MouseEvent*>(event)->button() != RightButton);
}
void handleLinkClick(Event* event, Document* document, const String& url, const String& target, bool hideReferrer)
{
event->setDefaultHandled();
Frame* frame = document->frame();
if (!frame)
return;
frame->loader()->urlSelected(document->completeURL(url), target, event, false, false, hideReferrer ? NeverSendReferrer : MaybeSendReferrer);
}
#if ENABLE(MICRODATA)
String HTMLAnchorElement::itemValueText() const
{
return getURLAttribute(hrefAttr);
}
void HTMLAnchorElement::setItemValueText(const String& value, ExceptionCode&)
{
setAttribute(hrefAttr, value);
}
#endif
typedef HashMap<const HTMLAnchorElement*, RefPtr<Element> > RootEditableElementMap;
static RootEditableElementMap& rootEditableElementMap()
{
DEFINE_STATIC_LOCAL(RootEditableElementMap, map, ());
return map;
}
Element* HTMLAnchorElement::rootEditableElementForSelectionOnMouseDown() const
{
if (!m_hasRootEditableElementForSelectionOnMouseDown)
return 0;
return rootEditableElementMap().get(this).get();
}
void HTMLAnchorElement::clearRootEditableElementForSelectionOnMouseDown()
{
if (!m_hasRootEditableElementForSelectionOnMouseDown)
return;
rootEditableElementMap().remove(this);
m_hasRootEditableElementForSelectionOnMouseDown = false;
}
void HTMLAnchorElement::setRootEditableElementForSelectionOnMouseDown(Element* element)
{
if (!element) {
clearRootEditableElementForSelectionOnMouseDown();
return;
}
rootEditableElementMap().set(this, element);
m_hasRootEditableElementForSelectionOnMouseDown = true;
}
}<|fim▁end|> | {
// Skip the supportsFocus check in HTMLElement.
return Element::tabIndex(); |
<|file_name|>RS485.java<|end_file_name|><|fim▁begin|>/*-------------------------------------------------------------------------
| RXTX License v 2.1 - LGPL v 2.1 + Linking Over Controlled Interface.
| RXTX is a native interface to serial ports in java.
| Copyright 1997-2007 by Trent Jarvi [email protected] and others who
| actually wrote it. See individual source files for more information.
|
| A copy of the LGPL v 2.1 may be found at
| http://www.gnu.org/licenses/lgpl.txt on March 4th 2007. A copy is
| here for your convenience.
|
| This library is free software; you can redistribute it and/or
| modify it under the terms of the GNU Lesser General Public
| License as published by the Free Software Foundation; either
| version 2.1 of the License, or (at your option) any later version.
|
| This library is distributed in the hope that it will be useful,
| but WITHOUT ANY WARRANTY; without even the implied warranty of
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
| Lesser General Public License for more details.
|
| An executable that contains no derivative of any portion of RXTX, but
| is designed to work with RXTX by being dynamically linked with it,
| is considered a "work that uses the Library" subject to the terms and
| conditions of the GNU Lesser General Public License.
|
| The following has been added to the RXTX License to remove
| any confusion about linking to RXTX. We want to allow in part what
| section 5, paragraph 2 of the LGPL does not permit in the special<|fim▁hole|>| Java Specification Request or standards body defined interface in the
| future as another exception but one is not currently available.
|
| http://www.fsf.org/licenses/gpl-faq.html#LinkingOverControlledInterface
|
| As a special exception, the copyright holders of RXTX give you
| permission to link RXTX with independent modules that communicate with
| RXTX solely through the Sun Microsytems CommAPI interface version 2,
| regardless of the license terms of these independent modules, and to copy
| and distribute the resulting combined work under terms of your choice,
| provided that every copy of the combined work is accompanied by a complete
| copy of the source code of RXTX (the version of RXTX used to produce the
| combined work), being distributed under the terms of the GNU Lesser General
| Public License plus this exception. An independent module is a
| module which is not derived from or based on RXTX.
|
| Note that people who make modified versions of RXTX are not obligated
| to grant this special exception for their modified versions; it is
| their choice whether to do so. The GNU Lesser General Public License
| gives permission to release a modified version without this exception; this
| exception also makes it possible to release a modified version which
| carries forward this exception.
|
| You should have received a copy of the GNU Lesser General Public
| License along with this library; if not, write to the Free
| Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
| All trademarks belong to their respective owners.
--------------------------------------------------------------------------*/
package gnu.io;
import java.io.*;
import java.util.*;
import java.lang.Math;
/**
* @author Trent Jarvi
* @version %I%, %G%
* @since JDK1.0
*/
final class RS485 extends RS485Port {
static
{
//System.loadLibrary( "rxtxRS485" );
Initialize();
}
/** Initialize the native library */
private native static void Initialize();
/** Actual RS485Port wrapper class */
/** Open the named port */
public RS485( String name ) throws PortInUseException {
fd = open( name );
}
private native int open( String name ) throws PortInUseException;
/** File descriptor */
private int fd;
/** DSR flag **/
static boolean dsrFlag = false;
/** Output stream */
private final RS485OutputStream out = new RS485OutputStream();
public OutputStream getOutputStream() { return out; }
/** Input stream */
private final RS485InputStream in = new RS485InputStream();
public InputStream getInputStream() { return in; }
/** Set the RS485Port parameters */
public void setRS485PortParams( int b, int d, int s, int p )
throws UnsupportedCommOperationException
{
nativeSetRS485PortParams( b, d, s, p );
speed = b;
dataBits = d;
stopBits = s;
parity = p;
}
/** Set the native RS485 port parameters */
private native void nativeSetRS485PortParams( int speed, int dataBits,
int stopBits, int parity ) throws UnsupportedCommOperationException;
/** Line speed in bits-per-second */
private int speed=9600;
public int getBaudRate() { return speed; }
/** Data bits port parameter */
private int dataBits=DATABITS_8;
public int getDataBits() { return dataBits; }
/** Stop bits port parameter */
private int stopBits=RS485Port.STOPBITS_1;
public int getStopBits() { return stopBits; }
/** Parity port parameter */
private int parity= RS485Port.PARITY_NONE;
public int getParity() { return parity; }
/** Flow control */
private int flowmode = RS485Port.FLOWCONTROL_NONE;
public void setFlowControlMode( int flowcontrol ) {
try { setflowcontrol( flowcontrol ); }
catch( IOException e ) {
e.printStackTrace();
return;
}
flowmode=flowcontrol;
}
public int getFlowControlMode() { return flowmode; }
native void setflowcontrol( int flowcontrol ) throws IOException;
/*
linux/drivers/char/n_hdlc.c? FIXME
[email protected]
*/
/** Receive framing control
*/
public void enableReceiveFraming( int f )
throws UnsupportedCommOperationException
{
throw new UnsupportedCommOperationException( "Not supported" );
}
public void disableReceiveFraming() {}
public boolean isReceiveFramingEnabled() { return false; }
public int getReceiveFramingByte() { return 0; }
/** Receive timeout control */
private int timeout = 0;
public native int NativegetReceiveTimeout();
public native boolean NativeisReceiveTimeoutEnabled();
public native void NativeEnableReceiveTimeoutThreshold(int time, int threshold,int InputBuffer);
public void disableReceiveTimeout(){
enableReceiveTimeout(0);
}
public void enableReceiveTimeout( int time ){
if( time >= 0 ) {
timeout = time;
NativeEnableReceiveTimeoutThreshold( time , threshold, InputBuffer );
}
else {
System.out.println("Invalid timeout");
}
}
public boolean isReceiveTimeoutEnabled(){
return(NativeisReceiveTimeoutEnabled());
}
public int getReceiveTimeout(){
return(NativegetReceiveTimeout( ));
}
/** Receive threshold control */
private int threshold = 0;
public void enableReceiveThreshold( int thresh ){
if(thresh >=0)
{
threshold=thresh;
NativeEnableReceiveTimeoutThreshold(timeout, threshold, InputBuffer);
}
else /* invalid thresh */
{
System.out.println("Invalid Threshold");
}
}
public void disableReceiveThreshold() {
enableReceiveThreshold(0);
}
public int getReceiveThreshold(){
return threshold;
}
public boolean isReceiveThresholdEnabled(){
return(threshold>0);
}
/** Input/output buffers */
/** FIXME I think this refers to
FOPEN(3)/SETBUF(3)/FREAD(3)/FCLOSE(3)
[email protected]
These are native stubs...
*/
private int InputBuffer=0;
private int OutputBuffer=0;
public void setInputBufferSize( int size )
{
InputBuffer=size;
}
public int getInputBufferSize()
{
return(InputBuffer);
}
public void setOutputBufferSize( int size )
{
OutputBuffer=size;
}
public int getOutputBufferSize()
{
return(OutputBuffer);
}
/** Line status methods */
public native boolean isDTR();
public native void setDTR( boolean state );
public native void setRTS( boolean state );
private native void setDSR( boolean state );
public native boolean isCTS();
public native boolean isDSR();
public native boolean isCD();
public native boolean isRI();
public native boolean isRTS();
/** Write to the port */
public native void sendBreak( int duration );
private native void writeByte( int b ) throws IOException;
private native void writeArray( byte b[], int off, int len )
throws IOException;
private native void drain() throws IOException;
/** RS485 read methods */
private native int nativeavailable() throws IOException;
private native int readByte() throws IOException;
private native int readArray( byte b[], int off, int len )
throws IOException;
/** RS485 Port Event listener */
private RS485PortEventListener SPEventListener;
/** Thread to monitor data */
private MonitorThread monThread;
/** Process RS485PortEvents */
native void eventLoop();
private int dataAvailable=0;
public void sendEvent( int event, boolean state ) {
switch( event ) {
case RS485PortEvent.DATA_AVAILABLE:
dataAvailable=1;
if( monThread.Data ) break;
return;
case RS485PortEvent.OUTPUT_BUFFER_EMPTY:
if( monThread.Output ) break;
return;
/*
if( monThread.DSR ) break;
return;
if (isDSR())
{
if (!dsrFlag)
{
dsrFlag = true;
RS485PortEvent e = new RS485PortEvent(this, RS485PortEvent.DSR, !dsrFlag, dsrFlag );
}
}
else if (dsrFlag)
{
dsrFlag = false;
RS485PortEvent e = new RS485PortEvent(this, RS485PortEvent.DSR, !dsrFlag, dsrFlag );
}
*/
case RS485PortEvent.CTS:
if( monThread.CTS ) break;
return;
case RS485PortEvent.DSR:
if( monThread.DSR ) break;
return;
case RS485PortEvent.RI:
if( monThread.RI ) break;
return;
case RS485PortEvent.CD:
if( monThread.CD ) break;
return;
case RS485PortEvent.OE:
if( monThread.OE ) break;
return;
case RS485PortEvent.PE:
if( monThread.PE ) break;
return;
case RS485PortEvent.FE:
if( monThread.FE ) break;
return;
case RS485PortEvent.BI:
if( monThread.BI ) break;
return;
default:
System.err.println("unknown event:"+event);
return;
}
RS485PortEvent e = new RS485PortEvent(this, event, !state, state );
if( SPEventListener != null ) SPEventListener.RS485Event( e );
}
/** Add an event listener */
public void addEventListener( RS485PortEventListener lsnr )
throws TooManyListenersException
{
if( SPEventListener != null ) throw new TooManyListenersException();
SPEventListener = lsnr;
monThread = new MonitorThread();
monThread.start();
}
/** Remove the RS485 port event listener */
public void removeEventListener() {
SPEventListener = null;
if( monThread != null ) {
monThread.interrupt();
monThread = null;
}
}
public void notifyOnDataAvailable( boolean enable ) { monThread.Data = enable; }
public void notifyOnOutputEmpty( boolean enable ) { monThread.Output = enable; }
public void notifyOnCTS( boolean enable ) { monThread.CTS = enable; }
public void notifyOnDSR( boolean enable ) { monThread.DSR = enable; }
public void notifyOnRingIndicator( boolean enable ) { monThread.RI = enable; }
public void notifyOnCarrierDetect( boolean enable ) { monThread.CD = enable; }
public void notifyOnOverrunError( boolean enable ) { monThread.OE = enable; }
public void notifyOnParityError( boolean enable ) { monThread.PE = enable; }
public void notifyOnFramingError( boolean enable ) { monThread.FE = enable; }
public void notifyOnBreakInterrupt( boolean enable ) { monThread.BI = enable; }
/** Close the port */
private native void nativeClose();
public void close() {
setDTR(false);
setDSR(false);
nativeClose();
super.close();
fd = 0;
}
/** Finalize the port */
protected void finalize() {
if( fd > 0 ) close();
}
/** Inner class for RS485OutputStream */
class RS485OutputStream extends OutputStream {
public void write( int b ) throws IOException {
writeByte( b );
}
public void write( byte b[] ) throws IOException {
writeArray( b, 0, b.length );
}
public void write( byte b[], int off, int len ) throws IOException {
writeArray( b, off, len );
}
public void flush() throws IOException {
drain();
}
}
/** Inner class for RS485InputStream */
class RS485InputStream extends InputStream {
public int read() throws IOException {
dataAvailable=0;
return readByte();
}
public int read( byte b[] ) throws IOException
{
return read ( b, 0, b.length);
}
public int read( byte b[], int off, int len ) throws IOException
{
dataAvailable=0;
int i=0, Minimum=0;
int intArray[] =
{
b.length,
InputBuffer,
len
};
/*
find the lowest nonzero value
timeout and threshold are handled on the native side
see NativeEnableReceiveTimeoutThreshold in
RS485Imp.c
*/
while(intArray[i]==0 && i < intArray.length) i++;
Minimum=intArray[i];
while( i < intArray.length )
{
if(intArray[i] > 0 )
{
Minimum=Math.min(Minimum,intArray[i]);
}
i++;
}
Minimum=Math.min(Minimum,threshold);
if(Minimum == 0) Minimum=1;
int Available=available();
int Ret = readArray( b, off, Minimum);
return Ret;
}
public int available() throws IOException {
return nativeavailable();
}
}
class MonitorThread extends Thread {
/** Note: these have to be separate boolean flags because the
RS485PortEvent constants are NOT bit-flags, they are just
defined as integers from 1 to 10 -DPL */
private boolean CTS=false;
private boolean DSR=false;
private boolean RI=false;
private boolean CD=false;
private boolean OE=false;
private boolean PE=false;
private boolean FE=false;
private boolean BI=false;
private boolean Data=false;
private boolean Output=false;
MonitorThread() { }
public void run() {
eventLoop();
}
}
}<|fim▁end|> | | case of linking over a controlled interface. The intent is to add a |
<|file_name|>node_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The go-krypton Authors
// This file is part of the go-krypton library.
//
// The go-krypton library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-krypton library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-krypton library. If not, see <http://www.gnu.org/licenses/>.
package discover
import (
"math/big"
"math/rand"
"net"
"reflect"
"testing"
"testing/quick"
"time"
"github.com/krypton/go-krypton/common"
"github.com/krypton/go-krypton/crypto"
)
var parseNodeTests = []struct {
rawurl string
wantError string
wantResult *Node
}{
{
rawurl: "http://foobar",
wantError: `invalid URL scheme, want "enode"`,
},
{
rawurl: "enode://foobar",
wantError: `does not contain node ID`,
},
{
rawurl: "enode://[email protected]:3",
wantError: `invalid node ID (wrong length, need 64 hex bytes)`,
},
{
rawurl: "enode://1dd9d65c4552b5eb43d5ad55a2ee3f56c6cbc1c64a5c8d659f51fcd51bace24351232b8d7821617d2b29b54b81cdefb9b3e9c37d7fd5f63270bcc9e1a6f6a439@hostname:3",
wantError: `invalid IP address`,
},
{
rawurl: "enode://1dd9d65c4552b5eb43d5ad55a2ee3f56c6cbc1c64a5c8d659f51fcd51bace24351232b8d7821617d2b29b54b81cdefb9b3e9c37d7fd5f63270bcc9e1a6f6a439@127.0.0.1:foo",
wantError: `invalid port`,
},
{
rawurl: "enode://1dd9d65c4552b5eb43d5ad55a2ee3f56c6cbc1c64a5c8d659f51fcd51bace24351232b8d7821617d2b29b54b81cdefb9b3e9c37d7fd5f63270bcc9e1a6f6a439@127.0.0.1:3?discport=foo",
wantError: `invalid discport in query`,
},
{
rawurl: "enode://1dd9d65c4552b5eb43d5ad55a2ee3f56c6cbc1c64a5c8d659f51fcd51bace24351232b8d7821617d2b29b54b81cdefb9b3e9c37d7fd5f63270bcc9e1a6f6a439@127.0.0.1:52150",
wantResult: newNode(
MustHexID("0x1dd9d65c4552b5eb43d5ad55a2ee3f56c6cbc1c64a5c8d659f51fcd51bace24351232b8d7821617d2b29b54b81cdefb9b3e9c37d7fd5f63270bcc9e1a6f6a439"),
net.IP{0x7f, 0x0, 0x0, 0x1},
52150,
52150,
),
},
{
rawurl: "enode://1dd9d65c4552b5eb43d5ad55a2ee3f56c6cbc1c64a5c8d659f51fcd51bace24351232b8d7821617d2b29b54b81cdefb9b3e9c37d7fd5f63270bcc9e1a6f6a439@[::]:52150",
wantResult: newNode(
MustHexID("0x1dd9d65c4552b5eb43d5ad55a2ee3f56c6cbc1c64a5c8d659f51fcd51bace24351232b8d7821617d2b29b54b81cdefb9b3e9c37d7fd5f63270bcc9e1a6f6a439"),
net.ParseIP("::"),
52150,
52150,
),
},
{
rawurl: "enode://1dd9d65c4552b5eb43d5ad55a2ee3f56c6cbc1c64a5c8d659f51fcd51bace24351232b8d7821617d2b29b54b81cdefb9b3e9c37d7fd5f63270bcc9e1a6f6a439@[2001:db8:3c4d:15::abcd:ef12]:52150",
wantResult: newNode(
MustHexID("0x1dd9d65c4552b5eb43d5ad55a2ee3f56c6cbc1c64a5c8d659f51fcd51bace24351232b8d7821617d2b29b54b81cdefb9b3e9c37d7fd5f63270bcc9e1a6f6a439"),
net.ParseIP("2001:db8:3c4d:15::abcd:ef12"),
52150,
52150,
),
},
{
rawurl: "enode://1dd9d65c4552b5eb43d5ad55a2ee3f56c6cbc1c64a5c8d659f51fcd51bace24351232b8d7821617d2b29b54b81cdefb9b3e9c37d7fd5f63270bcc9e1a6f6a439@127.0.0.1:52150?discport=22334",
wantResult: newNode(
MustHexID("0x1dd9d65c4552b5eb43d5ad55a2ee3f56c6cbc1c64a5c8d659f51fcd51bace24351232b8d7821617d2b29b54b81cdefb9b3e9c37d7fd5f63270bcc9e1a6f6a439"),
net.IP{0x7f, 0x0, 0x0, 0x1},
22334,
52150,
),
},
}
func TestParseNode(t *testing.T) {
for i, test := range parseNodeTests {
n, err := ParseNode(test.rawurl)
if test.wantError != "" {
if err == nil {
t.Errorf("test %d: got nil error, expected %#q", i, test.wantError)
continue
} else if err.Error() != test.wantError {
t.Errorf("test %d: got error %#q, expected %#q", i, err.Error(), test.wantError)
continue
}
} else {
if err != nil {
t.Errorf("test %d: unexpected error: %v", i, err)
continue
}
if !reflect.DeepEqual(n, test.wantResult) {
t.Errorf("test %d: result mismatch:\ngot: %#v, want: %#v", i, n, test.wantResult)
}
}
}
}
func TestNodeString(t *testing.T) {
for i, test := range parseNodeTests {
if test.wantError != "" {
continue
}
str := test.wantResult.String()
if str != test.rawurl {
t.Errorf("test %d: Node.String() mismatch:\ngot: %s\nwant: %s", i, str, test.rawurl)
}
}
}
func TestHexID(t *testing.T) {
ref := NodeID{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 106, 217, 182, 31, 165, 174, 1, 67, 7, 235, 220, 150, 66, 83, 173, 205, 159, 44, 10, 57, 42, 161, 26, 188}
id1 := MustHexID("0x000000000000000000000000000000000000000000000000000000000000000000000000000000806ad9b61fa5ae014307ebdc964253adcd9f2c0a392aa11abc")
id2 := MustHexID("000000000000000000000000000000000000000000000000000000000000000000000000000000806ad9b61fa5ae014307ebdc964253adcd9f2c0a392aa11abc")<|fim▁hole|>
if id1 != ref {
t.Errorf("wrong id1\ngot %v\nwant %v", id1[:], ref[:])
}
if id2 != ref {
t.Errorf("wrong id2\ngot %v\nwant %v", id2[:], ref[:])
}
}
func TestNodeID_recover(t *testing.T) {
prv := newkey()
hash := make([]byte, 32)
sig, err := crypto.Sign(hash, prv)
if err != nil {
t.Fatalf("signing error: %v", err)
}
pub := PubkeyID(&prv.PublicKey)
recpub, err := recoverNodeID(hash, sig)
if err != nil {
t.Fatalf("recovery error: %v", err)
}
if pub != recpub {
t.Errorf("recovered wrong pubkey:\ngot: %v\nwant: %v", recpub, pub)
}
ecdsa, err := pub.Pubkey()
if err != nil {
t.Errorf("Pubkey error: %v", err)
}
if !reflect.DeepEqual(ecdsa, &prv.PublicKey) {
t.Errorf("Pubkey mismatch:\n got: %#v\n want: %#v", ecdsa, &prv.PublicKey)
}
}
func TestNodeID_pubkeyBad(t *testing.T) {
ecdsa, err := NodeID{}.Pubkey()
if err == nil {
t.Error("expected error for zero ID")
}
if ecdsa != nil {
t.Error("expected nil result")
}
}
func TestNodeID_distcmp(t *testing.T) {
distcmpBig := func(target, a, b common.Hash) int {
tbig := new(big.Int).SetBytes(target[:])
abig := new(big.Int).SetBytes(a[:])
bbig := new(big.Int).SetBytes(b[:])
return new(big.Int).Xor(tbig, abig).Cmp(new(big.Int).Xor(tbig, bbig))
}
if err := quick.CheckEqual(distcmp, distcmpBig, quickcfg()); err != nil {
t.Error(err)
}
}
// the random tests is likely to miss the case where they're equal.
func TestNodeID_distcmpEqual(t *testing.T) {
base := common.Hash{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}
x := common.Hash{15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0}
if distcmp(base, x, x) != 0 {
t.Errorf("distcmp(base, x, x) != 0")
}
}
func TestNodeID_logdist(t *testing.T) {
logdistBig := func(a, b common.Hash) int {
abig, bbig := new(big.Int).SetBytes(a[:]), new(big.Int).SetBytes(b[:])
return new(big.Int).Xor(abig, bbig).BitLen()
}
if err := quick.CheckEqual(logdist, logdistBig, quickcfg()); err != nil {
t.Error(err)
}
}
// the random tests is likely to miss the case where they're equal.
func TestNodeID_logdistEqual(t *testing.T) {
x := common.Hash{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}
if logdist(x, x) != 0 {
t.Errorf("logdist(x, x) != 0")
}
}
func TestNodeID_hashAtDistance(t *testing.T) {
// we don't use quick.Check here because its output isn't
// very helpful when the test fails.
cfg := quickcfg()
for i := 0; i < cfg.MaxCount; i++ {
a := gen(common.Hash{}, cfg.Rand).(common.Hash)
dist := cfg.Rand.Intn(len(common.Hash{}) * 8)
result := hashAtDistance(a, dist)
actualdist := logdist(result, a)
if dist != actualdist {
t.Log("a: ", a)
t.Log("result:", result)
t.Fatalf("#%d: distance of result is %d, want %d", i, actualdist, dist)
}
}
}
func quickcfg() *quick.Config {
return &quick.Config{
MaxCount: 5000,
Rand: rand.New(rand.NewSource(time.Now().Unix())),
}
}
// TODO: The Generate method can be dropped when we require Go >= 1.5
// because testing/quick learned to generate arrays in 1.5.
func (NodeID) Generate(rand *rand.Rand, size int) reflect.Value {
var id NodeID
m := rand.Intn(len(id))
for i := len(id) - 1; i > m; i-- {
id[i] = byte(rand.Uint32())
}
return reflect.ValueOf(id)
}<|fim▁end|> | |
<|file_name|>ColorButton.cpp<|end_file_name|><|fim▁begin|>/*
===========================================================================
Doom 3 GPL Source Code
Copyright (C) 1999-2011 id Software LLC, a ZeniMax Media company.
This file is part of the Doom 3 GPL Source Code (?Doom 3 Source Code?).
Doom 3 Source Code is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Doom 3 Source Code is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Doom 3 Source Code. If not, see <http://www.gnu.org/licenses/>.
In addition, the Doom 3 Source Code is also subject to certain additional terms. You should have received a copy of these additional terms immediately following the terms and conditions of the GNU General Public License which accompanied the Doom 3 Source Code. If not, please request a copy in writing from id Software at the address below.
If you have questions concerning this license or the applicable additional terms, you may contact in writing id Software LLC, c/o ZeniMax Media Inc., Suite 120, Rockville, Maryland 20850 USA.
===========================================================================
*/
#include "precompiled.h"
#pragma hdrstop
#include "ColorButton.h"
static const int ARROW_SIZE_CX = 4 ;
static const int ARROW_SIZE_CY = 2 ;
/*
================
ColorButton_SetColor
Sets the current color button color
================
*/
void ColorButton_SetColor ( HWND hWnd, COLORREF color )
{
if ( NULL == hWnd )
{
return;
}
SetWindowLong ( hWnd, GWL_USERDATA, color );
InvalidateRect ( hWnd, NULL, FALSE );
}
void ColorButton_SetColor ( HWND hWnd, const char* color )
{
float red;
float green;
float blue;
float alpha;
if ( NULL == hWnd )<|fim▁hole|> {
return;
}
sscanf ( color, "%f,%f,%f,%f", &red, &green, &blue, &alpha );
ColorButton_SetColor ( hWnd, RGB(red*255.0f, green*255.0f, blue*255.0f) );
}
void AlphaButton_SetColor ( HWND hWnd, const char* color )
{
float red;
float green;
float blue;
float alpha;
if ( NULL == hWnd )
{
return;
}
sscanf ( color, "%f,%f,%f,%f", &red, &green, &blue, &alpha );
ColorButton_SetColor ( hWnd, RGB(alpha*255.0f, alpha*255.0f, alpha*255.0f) );
}
/*
================
ColorButton_GetColor
Retrieves the current color button color
================
*/
COLORREF ColorButton_GetColor ( HWND hWnd )
{
return (COLORREF) GetWindowLong ( hWnd, GWL_USERDATA );
}
/*
================
ColorButton_DrawArrow
Draws the arrow on the color button
================
*/
static void ColorButton_DrawArrow ( HDC hDC, RECT* pRect, COLORREF color )
{
POINT ptsArrow[3];
ptsArrow[0].x = pRect->left;
ptsArrow[0].y = pRect->top;
ptsArrow[1].x = pRect->right;
ptsArrow[1].y = pRect->top;
ptsArrow[2].x = (pRect->left + pRect->right)/2;
ptsArrow[2].y = pRect->bottom;
HBRUSH arrowBrush = CreateSolidBrush ( color );
HPEN arrowPen = CreatePen ( PS_SOLID, 1, color );
HGDIOBJ oldBrush = SelectObject ( hDC, arrowBrush );
HGDIOBJ oldPen = SelectObject ( hDC, arrowPen );
SetPolyFillMode(hDC, WINDING);
Polygon(hDC, ptsArrow, 3);
SelectObject ( hDC, oldBrush );
SelectObject ( hDC, oldPen );
DeleteObject ( arrowBrush );
DeleteObject ( arrowPen );
}
/*
================
ColorButton_DrawItem
Draws the actual color button as as reponse to a WM_DRAWITEM message
================
*/
void ColorButton_DrawItem ( HWND hWnd, LPDRAWITEMSTRUCT dis )
{
assert ( dis );
HDC hDC = dis->hDC;
UINT state = dis->itemState;
RECT rDraw = dis->rcItem;
RECT rArrow;
// Draw outter edge
UINT uFrameState = DFCS_BUTTONPUSH|DFCS_ADJUSTRECT;
if (state & ODS_SELECTED)
{
uFrameState |= DFCS_PUSHED;
}
if (state & ODS_DISABLED)
{
uFrameState |= DFCS_INACTIVE;
}
DrawFrameControl ( hDC, &rDraw, DFC_BUTTON, uFrameState );
// Draw Focus
if (state & ODS_SELECTED)
{
OffsetRect(&rDraw, 1,1);
}
if (state & ODS_FOCUS)
{
RECT rFocus = {rDraw.left,
rDraw.top,
rDraw.right - 1,
rDraw.bottom};
DrawFocusRect ( hDC, &rFocus );
}
InflateRect ( &rDraw, -GetSystemMetrics(SM_CXEDGE), -GetSystemMetrics(SM_CYEDGE) );
// Draw the arrow
rArrow.left = rDraw.right - ARROW_SIZE_CX - GetSystemMetrics(SM_CXEDGE) /2;
rArrow.right = rArrow.left + ARROW_SIZE_CX;
rArrow.top = (rDraw.bottom + rDraw.top)/2 - ARROW_SIZE_CY / 2;
rArrow.bottom = (rDraw.bottom + rDraw.top)/2 + ARROW_SIZE_CY / 2;
ColorButton_DrawArrow ( hDC, &rArrow, (state & ODS_DISABLED) ? ::GetSysColor(COLOR_GRAYTEXT) : RGB(0,0,0) );
rDraw.right = rArrow.left - GetSystemMetrics(SM_CXEDGE)/2;
// Draw separator
DrawEdge ( hDC, &rDraw, EDGE_ETCHED, BF_RIGHT);
rDraw.right -= (GetSystemMetrics(SM_CXEDGE) * 2) + 1 ;
// Draw Color
if ((state & ODS_DISABLED) == 0)
{
HBRUSH color = CreateSolidBrush ( (COLORREF)GetWindowLong ( hWnd, GWL_USERDATA ) );
FillRect ( hDC, &rDraw, color );
FrameRect ( hDC, &rDraw, (HBRUSH)::GetStockObject(BLACK_BRUSH));
DeleteObject( color );
}
}<|fim▁end|> | |
<|file_name|>hostfile_windows.go<|end_file_name|><|fim▁begin|>package main
import (
"os"
)
var (
HOSTFILE = os.Getenv("SystemRoot")+"/system32/drivers/etc/hosts"<|fim▁hole|><|fim▁end|> | FLUSH_CMD []string = nil
) |
<|file_name|>jQuery-provider.js<|end_file_name|><|fim▁begin|>///<reference src="js/tempus-dominus"/>
/*global $ */
tempusDominus.jQueryInterface = function (option, argument) {
if (this.length === 1) {
return tempusDominus.jQueryHandleThis(this, option, argument);
}
// "this" is jquery here
return this.each(function () {
tempusDominus.jQueryHandleThis(this, option, argument);
});
};
tempusDominus.jQueryHandleThis = function (me, option, argument) {
let data = $(me).data(tempusDominus.Namespace.dataKey);
if (typeof option === 'object') {
$.extend({}, tempusDominus.DefaultOptions, option);
}
if (!data) {
data = new tempusDominus.TempusDominus($(me)[0], option);
$(me).data(tempusDominus.Namespace.dataKey, data);
}
if (typeof option === 'string') {
if (data[option] === undefined) {
throw new Error(`No method named "${option}"`);
}
if (argument === undefined) {
return data[option]();
} else {
if (option === 'date') {
data.isDateUpdateThroughDateOptionFromClientCode = true;
}
const ret = data[option](argument);
data.isDateUpdateThroughDateOptionFromClientCode = false;
return ret;
}
}
};
tempusDominus.getSelectorFromElement = function ($element) {
let selector = $element.data('target'),
$selector;
if (!selector) {
selector = $element.attr('href') || '';
selector = /^#[a-z]/i.test(selector) ? selector : null;
}
$selector = $(selector);
if ($selector.length === 0) {
return $element;
}
if (!$selector.data(tempusDominus.Namespace.dataKey)) {
$.extend({}, $selector.data(), $(this).data());
}
return $selector;
};
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$(document)
.on(
`click${tempusDominus.Namespace.events.key}.data-api`,
`[data-toggle="${tempusDominus.Namespace.dataKey}"]`,
function () {
const $originalTarget = $(this),
$target = tempusDominus.getSelectorFromElement($originalTarget),
config = $target.data(tempusDominus.Namespace.dataKey);
if ($target.length === 0) {
return;
}
if (
config._options.allowInputToggle &&
$originalTarget.is('input[data-toggle="datetimepicker"]')
) {
return;
}
tempusDominus.jQueryInterface.call($target, 'toggle');
}
)
.on(
tempusDominus.Namespace.events.change,
`.${tempusDominus.Namespace.NAME}-input`,
function (event) {
const $target = tempusDominus.getSelectorFromElement($(this));
if ($target.length === 0 || event.isInit) {
return;
}
tempusDominus.jQueryInterface.call($target, '_change', event);
}
)
.on(
tempusDominus.Namespace.events.blur,
`.${tempusDominus.Namespace.NAME}-input`,
function (event) {
const $target = tempusDominus.getSelectorFromElement($(this)),
config = $target.data(tempusDominus.Namespace.dataKey);
if ($target.length === 0) {
return;
}
if (config._options.debug || window.debug) {
return;
}
tempusDominus.jQueryInterface.call($target, 'hide', event);
}
)
/*.on(tempusDominus.Namespace.Events.keydown, `.${tempusDominus.Namespace.NAME}-input`, function (event) {
const $target = tempusDominus.getSelectorFromElement($(this));
if ($target.length === 0) {
return;
}
tempusDominus.jQueryInterface.call($target, '_keydown', event);
})
.on(tempusDominus.Namespace.Events.keyup, `.${tempusDominus.Namespace.NAME}-input`, function (event) {
const $target = tempusDominus.getSelectorFromElement($(this));
if ($target.length === 0) {
return;
}
tempusDominus.jQueryInterface.call($target, '_keyup', event);
})*/<|fim▁hole|> .on(
tempusDominus.Namespace.events.focus,
`.${tempusDominus.Namespace.NAME}-input`,
function (event) {
const $target = tempusDominus.getSelectorFromElement($(this)),
config = $target.data(tempusDominus.Namespace.dataKey);
if ($target.length === 0) {
return;
}
if (!config._options.allowInputToggle) {
return;
}
tempusDominus.jQueryInterface.call($target, 'show', event);
}
);
const name = 'tempusDominus';
$.fn[name] = tempusDominus.jQueryInterface;
$.fn[name].Constructor = tempusDominus.TempusDominus;
$.fn[name].noConflict = function () {
$.fn[name] = $.fn[name];
return tempusDominus.jQueryInterface;
};<|fim▁end|> | |
<|file_name|>Display.java<|end_file_name|><|fim▁begin|>package mrdev023.opengl;
import static org.lwjgl.glfw.GLFW.*;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.system.MemoryUtil.*;
import java.awt.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.glfw.*;
import org.lwjgl.opengl.*;
import mrdev023.exception.*;
public class Display {
private static DisplayMode displayMode;
private static String TITLE = "";
private static long window;
private static boolean hasResized = false;
public static void create(String title,int width,int height){
if ( !glfwInit() )
throw new IllegalStateException("Unable to initialize GLFW");
TITLE = title;
displayMode = new DisplayMode(width,height);
window = glfwCreateWindow(displayMode.getWidth(),displayMode.getHeight(), TITLE, NULL, NULL);
}
<|fim▁hole|> throw new IllegalStateException("Unable to initialize GLFW");
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, major); // Nous voulons OpenGL 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, minor);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
TITLE = title;
displayMode = new DisplayMode(width,height);
window = glfwCreateWindow(displayMode.getWidth(),displayMode.getHeight(), TITLE, NULL, NULL);
}
public static void setMouseGrabbed(boolean a){
if(a){
glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED);
}else{
glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_NORMAL);
}
}
public static void setVSync(boolean a) throws DisplayException{
if(a)glfwSwapInterval(1);
else glfwSwapInterval(0);
}
public static void create(String title,int width,int height,int major,int minor,int sample){
if ( !glfwInit() )
throw new IllegalStateException("Unable to initialize GLFW");
glfwWindowHint(GLFW_SAMPLES, sample); // antialiasing 4x
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, major); // Nous voulons OpenGL 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, minor);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
TITLE = title;
displayMode = new DisplayMode(width,height);
window = glfwCreateWindow(displayMode.getWidth(),displayMode.getHeight(), TITLE, NULL, NULL);
}
public static void setSample(int sample){
glfwWindowHint(GLFW_SAMPLES, sample);
}
public static void setResizable(boolean a){
if(a)glfwWindowHint(GLFW_RESIZABLE, GL_TRUE);
else glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
}
public static void setTitle(String title){
TITLE = title;
glfwSetWindowTitle(window, TITLE);
}
public static String getTitle(){
return TITLE;
}
public static boolean wasResized(){
IntBuffer w = BufferUtils.createIntBuffer(1);
IntBuffer h = BufferUtils.createIntBuffer(1);
glfwGetWindowSize(window, w, h);
int width = w.get(0);
int height = h.get(0);
if(Display.getDisplayMode().getWidth() != width || Display.getDisplayMode().getHeight() != height || hasResized){
setDisplayMode(new DisplayMode(width, height));
hasResized = false;
return true;
}else{
return false;
}
}
public static void printMonitorsInfo(){
PointerBuffer monitors = glfwGetMonitors();
GLFWVidMode m;
if(monitors == null){
System.out.println("No monitor detected !");
return;
}
for(int i = 0;i < monitors.capacity();i++){
m = glfwGetVideoMode(monitors.get(i));
System.out.println(glfwGetMonitorName(monitors.get(i)) + "(" + i + ") : " + m.width() + "x" + m.height() + ":" + m.refreshRate() + "Hz");
}
}
public static boolean isCloseRequested(){
return glfwWindowShouldClose(window);
}
public static void createContext(){
glfwMakeContextCurrent(window);
GL.createCapabilities();
}
public static void updateEvent(){
glfwPollEvents();
}
public static void updateFrame(){
glfwSwapBuffers(window);
}
public static DisplayMode getDisplayMode() {
return displayMode;
}
public static void setDisplayMode(DisplayMode displayMode) {
if(Display.displayMode == null || displayMode == null)return;
Display.displayMode.setDisplayMode(displayMode);
hasResized = true;
}
public static void destroy(){
glfwDestroyWindow(window);
glfwTerminate();
}
public static long getWindow() {
return window;
}
}<|fim▁end|> | public static void create(String title,int width,int height,int major,int minor){
if ( !glfwInit() )
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
The MIT License
Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import base64
import urllib
import time
import random
import urlparse
import hmac
import binascii
import httplib2
try:
from urlparse import parse_qs
parse_qs # placate pyflakes
except ImportError:
# fall back for Python 2.5
from cgi import parse_qs
try:
from hashlib import sha1
sha = sha1
except ImportError:
# hashlib was added in Python 2.5
import sha
import _version
__version__ = _version.__version__
OAUTH_VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
class Error(RuntimeError):
"""Generic exception class."""
def __init__(self, message='OAuth error occurred.'):
self._message = message
@property
def message(self):
"""A hack to get around the deprecation errors in 2.6."""
return self._message
def __str__(self):
return self._message
class MissingSignature(Error):
pass
def build_authenticate_header(realm=''):
"""Optional WWW-Authenticate header (401 error)"""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def build_xoauth_string(url, consumer, token=None):
"""Build an XOAUTH string for use in SMTP/IMPA authentication."""
request = Request.from_consumer_and_token(consumer, token,
"GET", url)
signing_method = SignatureMethod_HMAC_SHA1()
request.sign_request(signing_method, consumer, token)
params = []
for k, v in sorted(request.iteritems()):
if v is not None:
params.append('%s="%s"' % (k, escape(v)))
return "%s %s %s" % ("GET", url, ','.join(params))
def to_unicode(s):
""" Convert to unicode, raise exception with instructive error
message if s is not unicode, ascii, or utf-8. """
if not isinstance(s, unicode):
if not isinstance(s, str):
raise TypeError('You are required to pass either unicode or string here, not: %r (%s)' % (type(s), s))
try:
s = s.decode('utf-8')
except UnicodeDecodeError, le:
raise TypeError('You are required to pass either a unicode object or a utf-8 string here. You passed a Python string object which contained non-utf-8: %r. The UnicodeDecodeError that resulted from attempting to interpret it as utf-8 was: %s' % (s, le,))
return s
def to_utf8(s):
return to_unicode(s).encode('utf-8')
def to_unicode_if_string(s):
if isinstance(s, basestring):
return to_unicode(s)
else:
return s
def to_utf8_if_string(s):
if isinstance(s, basestring):
return to_utf8(s)
else:
return s
def to_unicode_optional_iterator(x):
"""
Raise TypeError if x is a str containing non-utf8 bytes or if x is
an iterable which contains such a str.
"""
if isinstance(x, basestring):
return to_unicode(x)
try:
l = list(x)
except TypeError, e:
assert 'is not iterable' in str(e)
return x
else:
return [ to_unicode(e) for e in l ]
def to_utf8_optional_iterator(x):
"""
Raise TypeError if x is a str or if x is an iterable which
contains a str.
"""
if isinstance(x, basestring):
return to_utf8(x)
try:
l = list(x)
except TypeError, e:
assert 'is not iterable' in str(e)
return x
else:
return [ to_utf8_if_string(e) for e in l ]
def escape(s):
"""Escape a URL including any /."""
return urllib.quote(s.encode('utf-8'), safe='~')
def generate_timestamp():
"""Get seconds since epoch (UTC)."""
return int(time.time())
def generate_nonce(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
def generate_verifier(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
class Consumer(object):
"""A consumer of OAuth-protected services.
The OAuth consumer is a "third-party" service that wants to access
protected resources from an OAuth service provider on behalf of an end
user. It's kind of the OAuth client.
Usually a consumer must be registered with the service provider by the
developer of the consumer software. As part of that process, the service
provider gives the consumer a *key* and a *secret* with which the consumer
software can identify itself to the service. The consumer will include its
key in each request to identify itself, but will use its secret only when
signing requests, to prove that the request is from that particular
registered consumer.
Once registered, the consumer can then use its consumer credentials to ask
the service provider for a request token, kicking off the OAuth
authorization process.
"""
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def __str__(self):
data = {'oauth_consumer_key': self.key,
'oauth_consumer_secret': self.secret}
return urllib.urlencode(data)
class Token(object):
"""An OAuth credential used to request authorization or a protected
resource.
Tokens in OAuth comprise a *key* and a *secret*. The key is included in
requests to identify the token being used, but the secret is used only in
the signature, to prove that the requester is who the server gave the
token to.
When first negotiating the authorization, the consumer asks for a *request
token* that the live user authorizes with the service provider. The
consumer then exchanges the request token for an *access token* that can
be used to access protected resources.
"""
key = None
secret = None
callback = None
callback_confirmed = None
verifier = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def set_callback(self, callback):
self.callback = callback
self.callback_confirmed = 'true'
def set_verifier(self, verifier=None):
if verifier is not None:
self.verifier = verifier
else:
self.verifier = generate_verifier()
def get_callback_url(self):
if self.callback and self.verifier:
# Append the oauth_verifier.
parts = urlparse.urlparse(self.callback)
scheme, netloc, path, params, query, fragment = parts[:6]
if query:
query = '%s&oauth_verifier=%s' % (query, self.verifier)
else:
query = 'oauth_verifier=%s' % self.verifier
return urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
return self.callback
def to_string(self):
"""Returns this token as a plain string, suitable for storage.
The resulting string includes the token's secret, so you should never
send or store this string where a third party can read it.
"""
data = {
'oauth_token': self.key,
'oauth_token_secret': self.secret,
}
if self.callback_confirmed is not None:
data['oauth_callback_confirmed'] = self.callback_confirmed
return urllib.urlencode(data)
@staticmethod
def from_string(s):
"""Deserializes a token from a string like one returned by
`to_string()`."""
if not len(s):
raise ValueError("Invalid parameter string.")
params = parse_qs(s, keep_blank_values=False)
if not len(params):
raise ValueError("Invalid parameter string.")
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
token = Token(key, secret)
try:
token.callback_confirmed = params['oauth_callback_confirmed'][0]
except KeyError:
pass # 1.0, no callback confirmed.
return token
def __str__(self):
return self.to_string()
def setter(attr):
name = attr.__name__
def getter(self):
try:
return self.__dict__[name]
except KeyError:
raise AttributeError(name)
def deleter(self):
del self.__dict__[name]
return property(getter, attr, deleter)
class Request(dict):
"""The parameters and information for an HTTP request, suitable for
authorizing with OAuth credentials.
When a consumer wants to access a service's protected resources, it does
so using a signed HTTP request identifying itself (the consumer) with its
key, and providing an access token authorized by the end user to access<|fim▁hole|> those resources.
"""
version = OAUTH_VERSION
def __init__(self, method=HTTP_METHOD, url=None, parameters=None,
body='', is_form_encoded=False):
if url is not None:
self.url = to_unicode(url)
self.method = method
if parameters is not None:
for k, v in parameters.iteritems():
k = to_unicode(k)
v = to_unicode_optional_iterator(v)
self[k] = v
self.body = body
self.is_form_encoded = is_form_encoded
@setter
def url(self, value):
self.__dict__['url'] = value
if value is not None:
scheme, netloc, path, params, query, fragment = urlparse.urlparse(value)
# Exclude default port numbers.
if scheme == 'http' and netloc[-3:] == ':80':
netloc = netloc[:-3]
elif scheme == 'https' and netloc[-4:] == ':443':
netloc = netloc[:-4]
if scheme not in ('http', 'https'):
raise ValueError("Unsupported URL %s (%s)." % (value, scheme))
# Normalized URL excludes params, query, and fragment.
self.normalized_url = urlparse.urlunparse((scheme, netloc, path, None, None, None))
else:
self.normalized_url = None
self.__dict__['url'] = None
@setter
def method(self, value):
self.__dict__['method'] = value.upper()
def _get_timestamp_nonce(self):
return self['oauth_timestamp'], self['oauth_nonce']
def get_nonoauth_parameters(self):
"""Get any non-OAuth parameters."""
return dict([(k, v) for k, v in self.iteritems()
if not k.startswith('oauth_')])
def to_header(self, realm=''):
"""Serialize as a header for an HTTPAuth request."""
oauth_params = ((k, v) for k, v in self.items()
if k.startswith('oauth_'))
stringy_params = ((k, escape(str(v))) for k, v in oauth_params)
header_params = ('%s="%s"' % (k, v) for k, v in stringy_params)
params_header = ', '.join(header_params)
auth_header = 'OAuth realm="%s"' % realm
if params_header:
auth_header = "%s, %s" % (auth_header, params_header)
return {'Authorization': auth_header}
def to_postdata(self):
"""Serialize as post data for a POST request."""
d = {}
for k, v in self.iteritems():
d[k.encode('utf-8')] = to_utf8_optional_iterator(v)
# tell urlencode to deal with sequence values and map them correctly
# to resulting querystring. for example self["k"] = ["v1", "v2"] will
# result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D
return urllib.urlencode(d, True).replace('+', '%20')
def to_url(self):
"""Serialize as a URL for a GET request."""
base_url = urlparse.urlparse(self.url)
try:
query = base_url.query
except AttributeError:
# must be python <2.5
query = base_url[4]
query = parse_qs(query)
for k, v in self.items():
query.setdefault(k, []).append(v)
try:
scheme = base_url.scheme
netloc = base_url.netloc
path = base_url.path
params = base_url.params
fragment = base_url.fragment
except AttributeError:
# must be python <2.5
scheme = base_url[0]
netloc = base_url[1]
path = base_url[2]
params = base_url[3]
fragment = base_url[5]
url = (scheme, netloc, path, params,
urllib.urlencode(query, True), fragment)
return urlparse.urlunparse(url)
def get_parameter(self, parameter):
ret = self.get(parameter)
if ret is None:
raise Error('Parameter not found: %s' % parameter)
return ret
def get_normalized_parameters(self):
"""Return a string that contains the parameters that must be signed."""
items = []
for key, value in self.iteritems():
if key == 'oauth_signature':
continue
# 1.0a/9.1.1 states that kvp must be sorted by key, then by value,
# so we unpack sequence values into multiple items for sorting.
if isinstance(value, basestring):
items.append((to_utf8_if_string(key), to_utf8(value)))
else:
try:
value = list(value)
except TypeError, e:
assert 'is not iterable' in str(e)
items.append((to_utf8_if_string(key), to_utf8_if_string(value)))
else:
items.extend((to_utf8_if_string(key), to_utf8_if_string(item)) for item in value)
# Include any query string parameters from the provided URL
query = urlparse.urlparse(self.url)[4]
url_items = self._split_url_string(query).items()
url_items = [(to_utf8(k), to_utf8(v)) for k, v in url_items if k != 'oauth_signature' ]
items.extend(url_items)
items.sort()
encoded_str = urllib.urlencode(items)
# Encode signature parameters per Oauth Core 1.0 protocol
# spec draft 7, section 3.6
# (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6)
# Spaces must be encoded with "%20" instead of "+"
return encoded_str.replace('+', '%20').replace('%7E', '~')
def sign_request(self, signature_method, consumer, token):
"""Set the signature parameter to the result of sign."""
if not self.is_form_encoded:
# according to
# http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html
# section 4.1.1 "OAuth Consumers MUST NOT include an
# oauth_body_hash parameter on requests with form-encoded
# request bodies."
self['oauth_body_hash'] = base64.b64encode(sha(self.body).digest())
if 'oauth_consumer_key' not in self:
self['oauth_consumer_key'] = consumer.key
if token and 'oauth_token' not in self:
self['oauth_token'] = token.key
self['oauth_signature_method'] = signature_method.name
self['oauth_signature'] = signature_method.sign(self, consumer, token)
@classmethod
def make_timestamp(cls):
"""Get seconds since epoch (UTC)."""
return str(int(time.time()))
@classmethod
def make_nonce(cls):
"""Generate pseudorandom number."""
return str(random.randint(0, 100000000))
@classmethod
def from_request(cls, http_method, http_url, headers=None, parameters=None,
query_string=None):
"""Combines multiple parameter sources."""
if parameters is None:
parameters = {}
# Headers
if headers and 'Authorization' in headers:
auth_header = headers['Authorization']
# Check that the authorization header is OAuth.
if auth_header[:6] == 'OAuth ':
auth_header = auth_header[6:]
try:
# Get the parameters from the header.
header_params = cls._split_header(auth_header)
parameters.update(header_params)
except:
raise Error('Unable to parse OAuth parameters from '
'Authorization header.')
# GET or POST query string.
if query_string:
query_params = cls._split_url_string(query_string)
parameters.update(query_params)
# URL parameters.
param_str = urlparse.urlparse(http_url)[4] # query
url_params = cls._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return cls(http_method, http_url, parameters)
return None
@classmethod
def from_consumer_and_token(cls, consumer, token=None,
http_method=HTTP_METHOD, http_url=None, parameters=None,
body='', is_form_encoded=False):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': consumer.key,
'oauth_timestamp': cls.make_timestamp(),
'oauth_nonce': cls.make_nonce(),
'oauth_version': cls.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
if token.verifier:
parameters['oauth_verifier'] = token.verifier
return Request(http_method, http_url, parameters, body=body,
is_form_encoded=is_form_encoded)
@classmethod
def from_token_and_callback(cls, token, callback=None,
http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = callback
return cls(http_method, http_url, parameters)
@staticmethod
def _split_header(header):
"""Turn Authorization: header into parameters."""
params = {}
parts = header.split(',')
for param in parts:
# Ignore realm parameter.
if param.find('realm') > -1:
continue
# Remove whitespace.
param = param.strip()
# Split key-value.
param_parts = param.split('=', 1)
# Remove quotes and unescape the value.
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
@staticmethod
def _split_url_string(param_str):
"""Turn URL string into parameters."""
parameters = parse_qs(param_str.encode('utf-8'), keep_blank_values=True)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
class Client(httplib2.Http):
"""OAuthClient is a worker to attempt to execute a request."""
def __init__(self, consumer, token=None, cache=None, timeout=None,
proxy_info=None):
if consumer is not None and not isinstance(consumer, Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, Token):
raise ValueError("Invalid token.")
self.consumer = consumer
self.token = token
self.method = SignatureMethod_HMAC_SHA1()
httplib2.Http.__init__(self, cache=cache, timeout=timeout, proxy_info=proxy_info)
def set_signature_method(self, method):
if not isinstance(method, SignatureMethod):
raise ValueError("Invalid signature method.")
self.method = method
def request(self, uri, method="GET", body='', headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None):
DEFAULT_POST_CONTENT_TYPE = 'application/x-www-form-urlencoded'
if not isinstance(headers, dict):
headers = {}
if method == "POST":
headers['Content-Type'] = headers.get('Content-Type',
DEFAULT_POST_CONTENT_TYPE)
is_form_encoded = \
headers.get('Content-Type') == 'application/x-www-form-urlencoded'
if is_form_encoded and body:
parameters = dict([(k,v[0]) for k,v in parse_qs(body).items()])
else:
parameters = None
req = Request.from_consumer_and_token(self.consumer,
token=self.token, http_method=method, http_url=uri,
parameters=parameters, body=body, is_form_encoded=is_form_encoded)
req.sign_request(self.method, self.consumer, self.token)
schema, rest = urllib.splittype(uri)
if rest.startswith('//'):
hierpart = '//'
else:
hierpart = ''
host, rest = urllib.splithost(rest)
realm = schema + ':' + hierpart + host
if method == "POST" and is_form_encoded:
body = req.to_postdata()
elif method == "GET":
uri = req.to_url()
else:
headers.update(req.to_header(realm=realm))
return httplib2.Http.request(self, uri, method=method, body=body,
headers=headers, redirections=redirections,
connection_type=connection_type)
class Server(object):
"""A skeletal implementation of a service provider, providing protected
resources to requests from authorized consumers.
This class implements the logic to check requests for authorization. You
can use it with your web server or web framework to protect certain
resources with OAuth.
"""
timestamp_threshold = 300 # In seconds, five minutes.
version = OAUTH_VERSION
signature_methods = None
def __init__(self, signature_methods=None):
self.signature_methods = signature_methods or {}
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.name] = signature_method
return self.signature_methods
def verify_request(self, request, consumer, token):
"""Verifies an api call and checks all the parameters."""
self._check_version(request)
self._check_signature(request, consumer, token)
parameters = request.get_nonoauth_parameters()
return parameters
def build_authenticate_header(self, realm=''):
"""Optional support for the authenticate header."""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def _check_version(self, request):
"""Verify the correct version of the request for this server."""
version = self._get_version(request)
if version and version != self.version:
raise Error('OAuth version %s not supported.' % str(version))
def _get_version(self, request):
"""Return the version of the request for this server."""
try:
version = request.get_parameter('oauth_version')
except:
version = OAUTH_VERSION
return version
def _get_signature_method(self, request):
"""Figure out the signature with some defaults."""
try:
signature_method = request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# Get the signature method object.
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_verifier(self, request):
return request.get_parameter('oauth_verifier')
def _check_signature(self, request, consumer, token):
timestamp, nonce = request._get_timestamp_nonce()
self._check_timestamp(timestamp)
signature_method = self._get_signature_method(request)
try:
signature = request.get_parameter('oauth_signature')
except:
raise MissingSignature('Missing oauth_signature.')
# Validate the signature.
valid = signature_method.check(request, consumer, token, signature)
if not valid:
key, base = signature_method.signing_base(request, consumer, token)
raise Error('Invalid signature. Expected signature base '
'string: %s' % base)
def _check_timestamp(self, timestamp):
"""Verify that timestamp is recentish."""
timestamp = int(timestamp)
now = int(time.time())
lapsed = now - timestamp
if lapsed > self.timestamp_threshold:
raise Error('Expired timestamp: given %d and now %s has a '
'greater difference than threshold %d' % (timestamp, now,
self.timestamp_threshold))
class SignatureMethod(object):
"""A way of signing requests.
The OAuth protocol lets consumers and service providers pick a way to sign
requests. This interface shows the methods expected by the other `oauth`
modules for signing requests. Subclass it and implement its methods to
provide a new way to sign requests.
"""
def signing_base(self, request, consumer, token):
"""Calculates the string that needs to be signed.
This method returns a 2-tuple containing the starting key for the
signing and the message to be signed. The latter may be used in error
messages to help clients debug their software.
"""
raise NotImplementedError
def sign(self, request, consumer, token):
"""Returns the signature for the given request, based on the consumer
and token also provided.
You should use your implementation of `signing_base()` to build the
message to sign. Otherwise it may be less useful for debugging.
"""
raise NotImplementedError
def check(self, request, consumer, token, signature):
"""Returns whether the given signature is the correct signature for
the given consumer and token signing the given request."""
built = self.sign(request, consumer, token)
return built == signature
class SignatureMethod_HMAC_SHA1(SignatureMethod):
name = 'HMAC-SHA1'
def signing_base(self, request, consumer, token):
if not hasattr(request, 'normalized_url') or request.normalized_url is None:
raise ValueError("Base URL for request is not set.")
sig = (
escape(request.method),
escape(request.normalized_url),
escape(request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def sign(self, request, consumer, token):
"""Builds the base signature string."""
key, raw = self.signing_base(request, consumer, token)
hashed = hmac.new(key, raw, sha)
# Calculate the digest base 64.
return binascii.b2a_base64(hashed.digest())[:-1]
class SignatureMethod_PLAINTEXT(SignatureMethod):
name = 'PLAINTEXT'
def signing_base(self, request, consumer, token):
"""Concatenates the consumer key and secret with the token's
secret."""
sig = '%s&' % escape(consumer.secret)
if token:
sig = sig + escape(token.secret)
return sig, sig
def sign(self, request, consumer, token):
key, raw = self.signing_base(request, consumer, token)
return raw<|fim▁end|> | |
<|file_name|>handler.go<|end_file_name|><|fim▁begin|>// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package backend
import (
"errors"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/consensus"
"github.com/ethereum/go-ethereum/consensus/istanbul"
"github.com/ethereum/go-ethereum/p2p"
lru "github.com/hashicorp/golang-lru"
)
const (
istanbulMsg = 0x11
)
var (
// errDecodeFailed is returned when decode message fails
errDecodeFailed = errors.New("fail to decode istanbul message")
)
// Protocol implements consensus.Engine.Protocol
func (sb *backend) Protocol() consensus.Protocol {
return consensus.Protocol{
Name: "istanbul",
Versions: []uint{64},
Lengths: []uint64{18},
}
}
// HandleMsg implements consensus.Handler.HandleMsg
func (sb *backend) HandleMsg(addr common.Address, msg p2p.Msg) (bool, error) {
sb.coreMu.Lock()
defer sb.coreMu.Unlock()
if msg.Code == istanbulMsg {
if !sb.coreStarted {
return true, istanbul.ErrStoppedEngine
}
var data []byte
if err := msg.Decode(&data); err != nil {
return true, errDecodeFailed
}
hash := istanbul.RLPHash(data)
// Mark peer's message
ms, ok := sb.recentMessages.Get(addr)
var m *lru.ARCCache
if ok {
m, _ = ms.(*lru.ARCCache)
} else {
m, _ = lru.NewARC(inmemoryMessages)
sb.recentMessages.Add(addr, m)
}
m.Add(hash, true)
// Mark self known message
if _, ok := sb.knownMessages.Get(hash); ok {
return true, nil
}
sb.knownMessages.Add(hash, true)
go sb.istanbulEventMux.Post(istanbul.MessageEvent{
Payload: data,
})<|fim▁hole|>
return true, nil
}
return false, nil
}
// SetBroadcaster implements consensus.Handler.SetBroadcaster
func (sb *backend) SetBroadcaster(broadcaster consensus.Broadcaster) {
sb.broadcaster = broadcaster
}
func (sb *backend) NewChainHead() error {
sb.coreMu.RLock()
defer sb.coreMu.RUnlock()
if !sb.coreStarted {
return istanbul.ErrStoppedEngine
}
go sb.istanbulEventMux.Post(istanbul.FinalCommittedEvent{})
return nil
}<|fim▁end|> | |
<|file_name|>missing-macro-use.rs<|end_file_name|><|fim▁begin|>// aux-build:two_macros.rs
extern crate two_macros;
pub fn main() {
macro_two!();
//~^ ERROR cannot find macro `macro_two` in this scope<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>type_ellipsis_test.go<|end_file_name|><|fim▁begin|>package acceptance_test
import (
. "github.com/mokiat/gostub/acceptance"
"github.com/mokiat/gostub/acceptance/acceptance_stubs"
"github.com/mokiat/gostub/acceptance/external/external_dup"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("TypeEllipsis", func() {
var stub *acceptance_stubs.EllipsisSupportStub
var methodWasCalled bool
var methodEllipsisArg []external.Address
var firstAddress external.Address
var secondAddress external.Address
BeforeEach(func() {
stub = new(acceptance_stubs.EllipsisSupportStub)
methodWasCalled = false
methodEllipsisArg = []external.Address{}
firstAddress = external.Address{
Value: 1,
}
secondAddress = external.Address{
Value: 2,
}
})
It("stub is assignable to interface", func() {
_, assignable := interface{}(stub).(EllipsisSupport)
Ω(assignable).Should(BeTrue())
})
It("is possible to stub the behavior", func() {
stub.MethodStub = func(arg1 string, arg2 int, ellipsis ...external.Address) {
methodWasCalled = true
methodEllipsisArg = ellipsis
}
stub.Method("whatever", 0, firstAddress, secondAddress)
Ω(methodWasCalled).Should(BeTrue())
Ω(methodEllipsisArg).Should(Equal([]external.Address{firstAddress, secondAddress}))
})
It("is possible to get call count", func() {
stub.Method("whatever", 0, firstAddress, secondAddress)
stub.Method("whatever", 0, firstAddress, secondAddress)
Ω(stub.MethodCallCount()).Should(Equal(2))
})
It("is possible to get arguments for call", func() {
stub.Method("first", 1, firstAddress)
stub.Method("second", 2, firstAddress, secondAddress)
<|fim▁hole|> _, _, argAddresses := stub.MethodArgsForCall(0)
Ω(argAddresses).Should(Equal([]external.Address{firstAddress}))
_, _, argAddresses = stub.MethodArgsForCall(1)
Ω(argAddresses).Should(Equal([]external.Address{firstAddress, secondAddress}))
})
})<|fim▁end|> | |
<|file_name|>htmlselectelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::attr::AttrHelpers;
use dom::bindings::codegen::Bindings::HTMLSelectElementBinding;
use dom::bindings::codegen::Bindings::HTMLSelectElementBinding::HTMLSelectElementMethods;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, NodeCast};
use dom::bindings::codegen::InheritTypes::{ElementCast, HTMLSelectElementDerived, HTMLFieldSetElementDerived};
use dom::bindings::codegen::UnionTypes::HTMLElementOrLong;
use dom::bindings::codegen::UnionTypes::HTMLOptionElementOrHTMLOptGroupElement;
use dom::bindings::js::{JSRef, Temporary};
use dom::document::Document;
use dom::element::{AttributeHandlers, Element};
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{DisabledStateHelpers, Node, NodeHelpers, NodeTypeId, window_from_node};
use dom::validitystate::ValidityState;
use dom::virtualmethods::VirtualMethods;
use util::str::DOMString;
use string_cache::Atom;
use std::borrow::ToOwned;
#[dom_struct]
pub struct HTMLSelectElement {
htmlelement: HTMLElement
}
impl HTMLSelectElementDerived for EventTarget {
fn is_htmlselectelement(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)))
}
}
impl HTMLSelectElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> HTMLSelectElement {
HTMLSelectElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLSelectElement, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> Temporary<HTMLSelectElement> {
let element = HTMLSelectElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLSelectElementBinding::Wrap)
}
}
impl<'a> HTMLSelectElementMethods for JSRef<'a, HTMLSelectElement> {
fn Validity(self) -> Temporary<ValidityState> {
let window = window_from_node(self).root();
ValidityState::new(window.r())
}
// Note: this function currently only exists for test_union.html.
fn Add(self, _element: HTMLOptionElementOrHTMLOptGroupElement, _before: Option<HTMLElementOrLong>) {
}
// https://www.whatwg.org/html/#dom-fe-disabled
make_bool_getter!(Disabled);
// https://www.whatwg.org/html/#dom-fe-disabled
make_bool_setter!(SetDisabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-select-type
fn Type(self) -> DOMString {
let elem: JSRef<Element> = ElementCast::from_ref(self);
if elem.has_attribute(&atom!("multiple")) {
"select-multiple".to_owned()
} else {
"select-one".to_owned()
}
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLSelectElement> {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn after_set_attr(&self, attr: JSRef<Attr>) {
if let Some(ref s) = self.super_type() {
s.after_set_attr(attr);
}
match attr.local_name() {
&atom!("disabled") => {
let node: JSRef<Node> = NodeCast::from_ref(*self);
node.set_disabled_state(true);
node.set_enabled_state(false);
},
_ => ()
}
}
fn before_remove_attr(&self, attr: JSRef<Attr>) {
if let Some(ref s) = self.super_type() {
s.before_remove_attr(attr);
}
match attr.local_name() {
&atom!("disabled") => {
let node: JSRef<Node> = NodeCast::from_ref(*self);
node.set_disabled_state(false);
node.set_enabled_state(true);
node.check_ancestors_disabled_state_for_form_control();
},
_ => ()
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
node.check_ancestors_disabled_state_for_form_control();
}
fn unbind_from_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.unbind_from_tree(tree_in_doc);
}
<|fim▁hole|> } else {
node.check_disabled_attribute();
}
}
}<|fim▁end|> | let node: JSRef<Node> = NodeCast::from_ref(*self);
if node.ancestors().any(|ancestor| ancestor.root().r().is_htmlfieldsetelement()) {
node.check_ancestors_disabled_state_for_form_control(); |
<|file_name|>ChargeServiceTest.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2019 Intuit
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.intuit.payment.services;
import com.intuit.payment.config.RequestContext;
import com.intuit.payment.data.Capture;
import com.intuit.payment.data.Card;
import com.intuit.payment.data.Charge;
import com.intuit.payment.data.DeviceInfo;
import com.intuit.payment.data.PaymentContext;
import com.intuit.payment.data.Refund;
import com.intuit.payment.exception.BaseException;
import com.intuit.payment.http.Request;
import com.intuit.payment.http.Response;
import com.intuit.payment.services.base.ServiceBase;
import com.intuit.payment.util.JsonUtil;
import mockit.Expectations;
import mockit.Mocked;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.math.BigDecimal;
import java.util.Date;
public class ChargeServiceTest {
@Mocked
private ServiceBase serviceBase;
@Test
public void testChargeServiceCreation() {
ChargeService chargeService = new ChargeService();
Assert.assertNull(chargeService.getRequestContext());
}
@Test
public void testChargeServiceRequestContext() {
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
ChargeService chargeService = new ChargeService(requestContext);
Assert.assertEquals(requestContext, chargeService.getRequestContext());
RequestContext secondRequestContext = new RequestContext();
secondRequestContext.setBaseUrl("AnotherBaseUrl");
chargeService.setRequestContext(secondRequestContext);
Assert.assertEquals(secondRequestContext, chargeService.getRequestContext());
}
@Test
public void testCreateCharge() throws BaseException {
// Build response object
Charge expectedCharge = new Charge.Builder()
.amount(new BigDecimal(1234))
.currency("SomeCurrency")
.card(new Card.Builder().name("SomeCardName").build())
.build();
// Serialize response object as JSON string
final String serializedResponseString = JsonUtil.serialize(expectedCharge);
final Response mockedResponse = new Response(200, serializedResponseString, "some_intuit_tid");
mockedResponse.setResponseObject(expectedCharge);
new Expectations() {{
serviceBase.sendRequest((Request) any); result = mockedResponse;
}};
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
Charge chargeRequest = new Charge();
Charge actualCharge = chargeService.create(chargeRequest);
Assert.assertEquals(expectedCharge.getAmount(), actualCharge.getAmount());
Assert.assertEquals(expectedCharge.getCurrency(), actualCharge.getCurrency());
Assert.assertEquals(expectedCharge.getCard().getName(), actualCharge.getCard().getName());
}
@Test(expectedExceptions = BaseException.class)
public void testCreateChargeServiceFailure() throws BaseException {
// Build response object
Charge expectedCharge = new Charge.Builder()
.amount(new BigDecimal(1234))
.currency("SomeCurrency")
.card(new Card.Builder().name("SomeCardName").build())
.build();
// Serialize response object as JSON string
final String serializedResponseString = JsonUtil.serialize(expectedCharge);
final Response mockedResponse = new Response(200, serializedResponseString, "some_intuit_tid");
mockedResponse.setResponseObject(expectedCharge);
new Expectations() {{
serviceBase.sendRequest((Request) any); result = new BaseException("Unexpected Error , service response object was null ");
}};
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
Charge chargeRequest = new Charge();
Charge actualCharge = chargeService.create(chargeRequest); // Should throw exception
}
@Test
public void testRetrieveCharge() throws BaseException {
// Build response object
Charge expectedCharge = new Charge.Builder()
.amount(new BigDecimal(1234))
.currency("SomeCurrency")
.card(new Card.Builder().name("SomeCardName").build())
.build();
// Serialize response object as JSON string
final String serializedResponseString = JsonUtil.serialize(expectedCharge);
final Response mockedResponse = new Response(200, serializedResponseString, "some_intuit_tid");
mockedResponse.setResponseObject(expectedCharge);
new Expectations() {{
serviceBase.sendRequest((Request) any); result = mockedResponse;
}};
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
Charge actualCharge = chargeService.retrieve("SomeChargeId");
Assert.assertEquals(expectedCharge.getAmount(), actualCharge.getAmount());
Assert.assertEquals(expectedCharge.getCurrency(), actualCharge.getCurrency());
Assert.assertEquals(expectedCharge.getCard().getName(), actualCharge.getCard().getName());
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testRetrieveChargeInvalidChargeId() throws BaseException {
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
// Pass in null ChargeId
Charge actualCharge = chargeService.retrieve(null);
}
@Test
public void testCaptureCharge() throws BaseException {
// Build response object
Charge expectedCharge = new Charge.Builder()
.amount(new BigDecimal(1234))
.context(new PaymentContext.Builder().mobile("false").isEcommerce("true").build())
.build();
// Serialize response object as JSON string
final String serializedResponseString = JsonUtil.serialize(expectedCharge);
final Response mockedResponse = new Response(200, serializedResponseString, "some_intuit_tid");
mockedResponse.setResponseObject(expectedCharge);
new Expectations() {{
serviceBase.sendRequest((Request) any); result = mockedResponse;
}};
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
Charge chargeRequest = new Charge();
Capture captureChargeRequest = new Capture.Builder()
.amount(expectedCharge.getAmount())
.context(expectedCharge.getContext())
.build();
Charge actualCharge = chargeService.capture("SomeChargeId", captureChargeRequest);
Assert.assertEquals(expectedCharge.getAmount(), actualCharge.getAmount());
Assert.assertEquals(expectedCharge.getContext().getMobile(), actualCharge.getContext().getMobile());
Assert.assertEquals(expectedCharge.getContext().getIsEcommerce(), actualCharge.getContext().getIsEcommerce());
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testCaptureChargeInvalidChargeId() throws BaseException {
Charge expectedCharge = new Charge.Builder()
.amount(new BigDecimal(1234))
.context(new PaymentContext.Builder().mobile("false").isEcommerce("true").build())
.build();
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
Capture captureChargeRequest = new Capture.Builder()
.amount(expectedCharge.getAmount())
.context(expectedCharge.getContext())
.build();
// Pass in null ChargeId
Charge actualCharge = chargeService.capture(null, captureChargeRequest);
}
@Test
public void testRefundCharge() throws BaseException {
// Build response object
Refund expectedRefund = new Refund.Builder()
.amount(new BigDecimal(1234))
.description("SomeDescription")
.context(
new PaymentContext.Builder()
.tax(new BigDecimal(5678))
.recurring(true)
.deviceInfo(
new DeviceInfo.Builder()
.macAddress("SomeMacAddress")
.ipAddress("SomeIpAddress")
.longitude("SomeLongitude")
.latitude("SomeLatitude")
.phoneNumber("1800-FAKE-FAKE")
.type("mobile")
.build())
.build())
.id("ThisIsAGeneratedRefundId")
.created(new Date(System.currentTimeMillis()))
.build();
// Serialize response object as JSON string
final String serializedResponseString = JsonUtil.serialize(expectedRefund);
final Response mockedResponse = new Response(200, serializedResponseString, "some_intuit_tid");
mockedResponse.setResponseObject(expectedRefund);
new Expectations() {{
serviceBase.sendRequest((Request) any); result = mockedResponse;
}};
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);<|fim▁hole|> .description(expectedRefund.getDescription())
.context(expectedRefund.getContext())
.build();
Refund actualRefund = chargeService.refund("SomeChargeId", refundRequest);
Assert.assertEquals(expectedRefund.getId(), actualRefund.getId());
Assert.assertEquals(expectedRefund.getAmount(), actualRefund.getAmount());
Assert.assertEquals(expectedRefund.getDescription(), actualRefund.getDescription());
Assert.assertEquals(expectedRefund.getContext().getTax(), actualRefund.getContext().getTax());
Assert.assertEquals(expectedRefund.getContext().getRecurring(), actualRefund.getContext().getRecurring());
Assert.assertEquals(expectedRefund.getCreated(), actualRefund.getCreated());
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testRefundChargeInvalidChargeId() throws BaseException {
Charge expectedCharge = new Charge.Builder()
.amount(new BigDecimal(1234))
.context(new PaymentContext.Builder().mobile("false").isEcommerce("true").build())
.build();
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
Capture captureChargeRequest = new Capture.Builder()
.amount(expectedCharge.getAmount())
.context(expectedCharge.getContext())
.build();
// Pass in null ChargeId
Refund actualRefund = chargeService.refund(null, null);
}
@Test
public void testGetRefund() throws BaseException {
// Build response object
Refund expectedRefund = new Refund.Builder()
.amount(new BigDecimal(1234))
.description("SomeDescription")
.context(
new PaymentContext.Builder()
.tax(new BigDecimal(5678))
.recurring(true)
.deviceInfo(
new DeviceInfo.Builder()
.macAddress("SomeMacAddress")
.ipAddress("SomeIpAddress")
.longitude("SomeLongitude")
.latitude("SomeLatitude")
.phoneNumber("1800-FAKE-FAKE")
.type("mobile")
.build())
.build())
.id("ThisIsAGeneratedRefundId")
.created(new Date(System.currentTimeMillis()))
.build();
// Serialize response object as JSON string
final String serializedResponseString = JsonUtil.serialize(expectedRefund);
final Response mockedResponse = new Response(200, serializedResponseString, "some_intuit_tid");
mockedResponse.setResponseObject(expectedRefund);
new Expectations() {{
serviceBase.sendRequest((Request) any); result = mockedResponse;
}};
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
Refund actualRefund = chargeService.getRefund("SomeChargeId", "SomeRefundId");
Assert.assertEquals(expectedRefund.getId(), actualRefund.getId());
Assert.assertEquals(expectedRefund.getAmount(), actualRefund.getAmount());
Assert.assertEquals(expectedRefund.getDescription(), actualRefund.getDescription());
Assert.assertEquals(expectedRefund.getContext().getTax(), actualRefund.getContext().getTax());
Assert.assertEquals(expectedRefund.getContext().getRecurring(), actualRefund.getContext().getRecurring());
Assert.assertEquals(expectedRefund.getCreated(), actualRefund.getCreated());
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetRefundInvalidChargeId() throws BaseException {
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
// Pass in null ChargeId
Refund actualRefund = chargeService.getRefund(null, "SomeRefundId");
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetRefundInvalidRefundId() throws BaseException {
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
// Pass in null RefundId
Refund actualRefund = chargeService.getRefund("SomeChargeId", null);
}
@Test
public void testVoidTransaction() throws BaseException {
// Build response object
Refund expectedRefund = new Refund.Builder()
.amount(new BigDecimal(1234))
.description("SomeDescription")
.context(
new PaymentContext.Builder()
.tax(new BigDecimal(5678))
.recurring(true)
.deviceInfo(
new DeviceInfo.Builder()
.macAddress("SomeMacAddress")
.ipAddress("SomeIpAddress")
.longitude("SomeLongitude")
.latitude("SomeLatitude")
.phoneNumber("1800-FAKE-FAKE")
.type("mobile")
.build())
.build())
.id("ThisIsAGeneratedRefundId")
.created(new Date(System.currentTimeMillis()))
.build();
// Serialize response object as JSON string
final String serializedResponseString = JsonUtil.serialize(expectedRefund);
final Response mockedResponse = new Response(200, serializedResponseString, "some_intuit_tid");
mockedResponse.setResponseObject(expectedRefund);
new Expectations() {{
serviceBase.sendRequest((Request) any); result = mockedResponse;
}};
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
Refund actualRefund = chargeService.voidTransaction("SomeChargeRequestId");
Assert.assertEquals(expectedRefund.getId(), actualRefund.getId());
Assert.assertEquals(expectedRefund.getAmount(), actualRefund.getAmount());
Assert.assertEquals(expectedRefund.getDescription(), actualRefund.getDescription());
Assert.assertEquals(expectedRefund.getContext().getTax(), actualRefund.getContext().getTax());
Assert.assertEquals(expectedRefund.getContext().getRecurring(), actualRefund.getContext().getRecurring());
Assert.assertEquals(expectedRefund.getCreated(), actualRefund.getCreated());
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testVoidTransactionInvalidChargeId() throws BaseException {
RequestContext requestContext = new RequestContext();
requestContext.setBaseUrl("SomeBaseUrl");
// Set mock RequestContext against ChargeService
ChargeService chargeService = new ChargeService(requestContext);
// Pass in null RefundId
Refund actualRefund = chargeService.voidTransaction(null);
}
}<|fim▁end|> |
Refund refundRequest = new Refund.Builder()
.amount(expectedRefund.getAmount()) |
<|file_name|>validations.ts<|end_file_name|><|fim▁begin|>/* global getAttrs, setAttrs, getSectionIDs, generateRowID, on, removeRepeatingRow, _, getTranslationByKey */
/**
* Validations
*/
const validateTab = () => {
getAttrs(["tab", "npc"], v => {
if (v.tab === "character" && v.npc === "1") setAttrs({
tab: "npc"
});
if (v.tab === "npc" && v.npc === "0") setAttrs({
tab: "character"
});
});
};
const validateSuperTab = () => {
getAttrs(["setting_super_type", "tab_super"], v => {
const setting: {[key: string]: string} = {};
if (v.setting_super_type === "magic") setting.tab_super = "magic";
if (v.setting_super_type === "psionics") setting.tab_super = "psionics";
mySetAttrs(setting, v);
});
};
// const validateStrain = () => {
// getAttrs(["strain", "strain_permanent", "strain_max"], v => {
// const currentStrain = parseInt(v.strain) || 0,
// permanentStrain = parseInt(v.strain_permanent) || 0,
// strain = Math.min(parseInt(v.strain_max), Math.max(currentStrain, permanentStrain)) || 0;
//
// if (strain !== currentStrain) setAttrs({
// strain
// });
// });
// };
const validateWeaponSkills = (ids?: [string]) => {
// Makes sure that the select for the weapon skill is never in an invalid state.
const prefixes = (ids && ids.map(id => `repeating_weapons_${id}`)) || ["repeating_weapons"];
getAttrs(["homebrew_skill_list", ...prefixes.map(p => `${p}_weapon_skill_bonus`)], v => {
const revisedList = ["@{skill_exert}", "@{skill_punch}", "@{skill_shoot}", "@{skill_stab}",
"@{skill_telekinesis}", "0"
],
firstList = ["@{skill_combat_energy}", "@{skill_combat_gunnery}", "@{skill_combat_primitive}", "@{skill_combat_projectile}",
"@{skill_combat_psitech}", "@{skill_combat_unarmed}", "@{skill_telekinesis}", "0"
],<|fim▁hole|> prefixes.forEach(prefix => {
if (type === "revised" && !revisedList.includes(v[`${prefix}_weapon_skill_bonus`]))
setting[`${prefix}_weapon_skill_bonus`] = "@{skill_shoot}";
if (type === "first" && !firstList.includes(v[`${prefix}_weapon_skill_bonus`]))
setting[`${prefix}_weapon_skill_bonus`] = "@{skill_combat_energy}";
});
setAttrs(setting);
});
};
const validateShells = (e: EventInfo) => {
//Unchecks every shell 'active' toggle except the one toggle now.
// console.log(e.sourceAttribute);
// console.log(e.sourceAttribute.slice(17, -13));
// console.log(e.sourceAttribute.slice(-6));
// console.log(e.sourceAttribute.slice(-15, -7));
if (e.sourceAttribute.slice(-6) === "active" && e.sourceAttribute.slice(-15, -7) !== "affinity") {
console.log("Changing");
getSectionIDs("repeating_shells", idArray => {
idArray = idArray.filter(item => item !== e.sourceAttribute.slice(17, -13))
const setting: {[key: string]: string} = {}
idArray.forEach(id => setting[`repeating_shells_${id}_shell_active`] = "0");
setAttrs(setting, {}, () => {
calculateShellAttrs();
})
})
} else {
console.log("No change needed");
calculateShellAttrs();
}
}<|fim▁end|> | type = v.homebrew_skill_list,
setting: {[key: string]: string} = {}; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-<|fim▁hole|>from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import versioneer
__author__ = 'Chia-Jung, Yang'
__email__ = '[email protected]'
__version__ = versioneer.get_version()
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions<|fim▁end|> | |
<|file_name|>core_json.js<|end_file_name|><|fim▁begin|>/*
@author Zakai Hamilton
@component CoreJson
*/
screens.core.json = function CoreJson(me, { core, storage }) {
me.init = function () {
if (me.platform === "server") {
me.request = require("request");
}
};
me.loadComponent = async function (path, useCache = true) {
var period = path.lastIndexOf(".");
var component_name = path.substring(period + 1);
var package_name = path.substring(0, period);
var url = "/packages/code/" + package_name + "/" + package_name + "_" + component_name + ".json";
var json = await me.loadFile(url, useCache);
return json;
};
me.get = async function (url) {
if (me.platform === "server") {
return new Promise(resolve => {
me.log("requesting: " + url);
me.request.get(url, (error, response, body) => {<|fim▁hole|> else {
if (body[0] === "<") {
resolve({ error: "response is in an xml format" });
return;
}
let json = JSON.parse(body);
resolve(json);
}
});
});
}
else {
return core.message.send_server("core.json.get", url);
}
};
me.loadFile = async function (path) {
let json = {};
if (path && path.startsWith("/")) {
path = path.substring(1);
}
if (!core.util.isOnline()) {
json = await storage.local.db.get(me.id, path);
if (json) {
return json;
}
}
var info = {
method: "GET",
url: "/" + path
};
let buffer = "{}";
try {
buffer = await core.http.send(info);
}
catch (err) {
var error = "Cannot load json file: " + path + " err: " + err.message || err;
me.log_error(error);
}
if (buffer) {
json = JSON.parse(buffer);
}
await storage.local.db.set(me.id, path, json);
return json;
};
me.compare = function (source, target) {
if (typeof source !== typeof target) {
return false;
}
if (source === target) {
return true;
}
if (!source && !target) {
return true;
}
if (!source || !target) {
return false;
}
else if (Array.isArray(source)) {
var equal = source.length === target.length;
if (equal) {
target.map((item, index) => {
var sourceItem = source[index];
if (!me.compare(sourceItem, item)) {
equal = false;
}
});
}
return equal;
}
else if (typeof source === "object") {
var sourceKeys = Object.getOwnPropertyNames(source);
var targetKeys = Object.getOwnPropertyNames(target);
if (sourceKeys.length !== targetKeys.length) {
return false;
}
for (var i = 0; i < sourceKeys.length; i++) {
var propName = sourceKeys[i];
if (source[propName] !== target[propName]) {
return false;
}
}
return true;
}
else {
return false;
}
};
me.traverse = function (root, path, value) {
var item = root, parent = root, found = false, name = null;
if (root) {
item = path.split(".").reduce((node, token) => {
parent = node;
name = token;
if (!node) {
return;
}
return node[token];
}, root);
if (typeof item !== "undefined") {
value = item;
found = true;
}
}
return { parent, item, value, name, found };
};
me.value = function (root, paths, value) {
var found = false;
Object.entries(paths).forEach(([path, callback]) => {
if (found) {
return;
}
var info = me.traverse(root, path, value);
if (info.found) {
if (callback) {
var result = callback(value, path);
if (result) {
info.value = result;
found = true;
}
}
else {
value = info.value;
found = true;
}
}
});
return value;
};
me.union = function (array, property) {
return array.filter((obj, pos, arr) => {
return arr.map(mapObj => mapObj[property]).indexOf(obj[property]) === pos;
});
};
me.intersection = function (arrays, property) {
var results = [];
results.push(...arrays[0]);
for (var array of arrays) {
var keys = array.map(mapObj => mapObj[property]);
results = results.filter(mapObj => -1 !== keys.indexOf(mapObj[property]));
}
return results;
};
me.processVars = function (object, text, root) {
text = text.replace(/\${[^{}]*}/g, function (match) {
var path = match.substring(2, match.length - 1);
if (path.startsWith("@")) {
path = path.substring(1);
if (path === "date") {
return new Date().toString();
}
else {
var info = core.property.split(object, path);
let item = me.traverse(root, info.value);
if (item.found) {
return core.property.get(object, info.name, item.value);
}
return "";
}
}
let item = me.traverse(root, path);
if (item.found) {
var value = item.value;
if (typeof value === "object") {
value = JSON.stringify(value);
}
return value;
}
return "";
});
return text;
};
me.map = function (root, before, after) {
if (before) {
root = before(root);
}
if (Array.isArray(root)) {
root = Array.from(root);
}
else if (root instanceof ArrayBuffer) {
root = root.slice(0);
}
else if (root !== null && typeof root === "object") {
root = Object.assign({}, root);
}
if (typeof root !== "string") {
for (var key in root) {
root[key] = me.map(root[key], before, after);
}
}
if (after) {
root = after(root);
}
return root;
};
me.isValid = function (str) {
try {
JSON.parse(str);
} catch (e) {
return false;
}
return true;
};
};<|fim▁end|> | if (error) {
resolve({ error });
} |
<|file_name|>viewsets.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2017, Digital Reasoning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
import logging
from django.conf import settings
from django.contrib.auth.models import Group
from django.http import Http404
from guardian.shortcuts import get_groups_with_perms, get_users_with_perms, remove_perm
from rest_framework import viewsets
from rest_framework.serializers import ListField, SlugRelatedField, ValidationError
from stackdio.api.users.models import get_user_queryset
from stackdio.core import fields, mixins, serializers
from stackdio.core.config import StackdioConfigException
from stackdio.core.permissions import StackdioPermissionsModelPermissions
from stackdio.core.shortcuts import get_groups_with_model_perms, get_users_with_model_perms
try:
from django_auth_ldap.backend import LDAPBackend
except ImportError:
LDAPBackend = None
logger = logging.getLogger(__name__)
def _filter_perms(available_perms, perms):
ret = []
for perm in perms:
if perm in available_perms:
ret.append(perm)
return ret
class UserSlugRelatedField(SlugRelatedField):
def to_internal_value(self, data):
try:
return super(UserSlugRelatedField, self).to_internal_value(data)
except ValidationError:
if settings.LDAP_ENABLED:
if LDAPBackend is None:<|fim▁hole|> # Grab the ldap user and try again
user = LDAPBackend().populate_user(data)
if user is not None:
return super(UserSlugRelatedField, self).to_internal_value(data)
# Nothing worked, just re-raise the exception
raise
class StackdioBasePermissionsViewSet(mixins.BulkUpdateModelMixin, viewsets.ModelViewSet):
"""
Viewset for creating permissions endpoints
"""
user_or_group = None
model_or_object = None
lookup_value_regex = r'[\w.@+-]+'
parent_lookup_field = 'pk'
parent_lookup_url_kwarg = None
def get_model_name(self):
raise NotImplementedError('`get_model_name()` must be implemented.')
def get_app_label(self):
raise NotImplementedError('`get_app_label()` must be implemented.')
def get_serializer_class(self):
user_or_group = self.get_user_or_group()
model_or_object = self.get_model_or_object()
model_name = self.get_model_name()
app_label = self.get_app_label()
super_cls = self.switch_model_object(serializers.StackdioModelPermissionsSerializer,
serializers.StackdioObjectPermissionsSerializer)
default_parent_lookup_url_kwarg = 'parent_{}'.format(self.parent_lookup_field)
url_field_kwargs = {
'view_name': 'api:{0}:{1}-{2}-{3}-permissions-detail'.format(
app_label,
model_name,
model_or_object,
user_or_group
),
'permission_lookup_field': self.lookup_field,
'permission_lookup_url_kwarg': self.lookup_url_kwarg or self.lookup_field,
'lookup_field': self.parent_lookup_field,
'lookup_url_kwarg': self.parent_lookup_url_kwarg or default_parent_lookup_url_kwarg,
}
url_field_cls = self.switch_model_object(
fields.HyperlinkedModelPermissionsField,
fields.HyperlinkedObjectPermissionsField,
)
# Create a class
class StackdioUserPermissionsSerializer(super_cls):
user = UserSlugRelatedField(slug_field='username', queryset=get_user_queryset())
url = url_field_cls(**url_field_kwargs)
permissions = ListField()
class Meta(super_cls.Meta):
update_lookup_field = 'user'
class StackdioGroupPermissionsSerializer(super_cls):
group = SlugRelatedField(slug_field='name', queryset=Group.objects.all())
url = url_field_cls(**url_field_kwargs)
permissions = ListField()
class Meta(super_cls.Meta):
update_lookup_field = 'group'
return self.switch_user_group(StackdioUserPermissionsSerializer,
StackdioGroupPermissionsSerializer)
def get_user_or_group(self):
assert self.user_or_group in ('user', 'group'), (
"'%s' should include a `user_or_group` attribute that is one of 'user' or 'group'."
% self.__class__.__name__
)
return self.user_or_group
def switch_user_group(self, if_user, if_group):
return {
'user': if_user,
'group': if_group,
}.get(self.get_user_or_group())
def get_model_or_object(self):
assert self.model_or_object in ('model', 'object'), (
"'%s' should include a `model_or_object` attribute that is one of 'model' or 'object'."
% self.__class__.__name__
)
return self.model_or_object
def switch_model_object(self, if_model, if_object):
return {
'model': if_model,
'object': if_object,
}.get(self.get_model_or_object())
def _transform_perm(self, model_name):
def do_tranform(item):
# pylint: disable=unused-variable
perm, sep, empty = item.partition('_' + model_name)
return perm
return do_tranform
def get_object(self):
queryset = self.get_queryset()
url_kwarg = self.lookup_url_kwarg or self.lookup_field
name_attr = self.switch_user_group('username', 'name')
for obj in queryset:
auth_obj = obj[self.get_user_or_group()]
if self.kwargs[url_kwarg] == getattr(auth_obj, name_attr):
return obj
raise Http404('No permissions found for %s' % self.kwargs[url_kwarg])
class StackdioModelPermissionsViewSet(StackdioBasePermissionsViewSet):
model_cls = None
model_or_object = 'model'
permission_classes = (StackdioPermissionsModelPermissions,)
def get_model_cls(self):
assert self.model_cls, (
"'%s' should include a `model_cls` attribute or override the `get_model_cls()` method."
% self.__class__.__name__
)
return self.model_cls
def get_model_name(self):
return self.get_model_cls()._meta.model_name
def get_app_label(self):
ret = self.get_model_cls()._meta.app_label
if ret == 'auth':
# one-off thing, since users/groups are in the `users` app, not `auth`
return 'users'
return ret
def get_model_permissions(self):
return getattr(self.get_model_cls(),
'model_permissions',
getattr(self, 'model_permissions', ()))
def get_permissions(self):
"""
Instantiates and returns the list of permissions that this view requires.
"""
ret = []
for permission_cls in self.permission_classes:
permission = permission_cls()
# Inject our model_cls into the permission
if isinstance(permission, StackdioPermissionsModelPermissions) \
and permission.model_cls is None:
permission.model_cls = self.model_cls
ret.append(permission)
return ret
def get_queryset(self): # pylint: disable=method-hidden
model_cls = self.get_model_cls()
model_name = model_cls._meta.model_name
model_perms = self.get_model_permissions()
# Grab the perms for either the users or groups
perm_map_func = self.switch_user_group(
lambda: get_users_with_model_perms(model_cls, attach_perms=True,
with_group_users=False),
lambda: get_groups_with_model_perms(model_cls, attach_perms=True),
)
# Do this as a function so we don't fetch both the user AND group permissions on each
# request
perm_map = perm_map_func()
ret = []
sorted_perms = sorted(perm_map.items(), key=lambda x: getattr(x[0], self.lookup_field))
for auth_obj, perms in sorted_perms:
new_perms = [self._transform_perm(model_name)(perm) for perm in perms]
ret.append({
self.get_user_or_group(): auth_obj,
'permissions': _filter_perms(model_perms, new_perms),
})
return ret
def list(self, request, *args, **kwargs):
response = super(StackdioModelPermissionsViewSet, self).list(request, *args, **kwargs)
# add available permissions to the response
response.data['available_permissions'] = sorted(self.get_model_permissions())
return response
def perform_create(self, serializer):
serializer.save(model_cls=self.get_model_cls())
def perform_update(self, serializer):
serializer.save(model_cls=self.get_model_cls())
def perform_destroy(self, instance):
model_cls = self.get_model_cls()
app_label = model_cls._meta.app_label
model_name = model_cls._meta.model_name
for perm in instance['permissions']:
remove_perm('%s.%s_%s' % (app_label, perm, model_name),
instance[self.get_user_or_group()])
class StackdioModelUserPermissionsViewSet(StackdioModelPermissionsViewSet):
user_or_group = 'user'
lookup_field = 'username'
lookup_url_kwarg = 'username'
class StackdioModelGroupPermissionsViewSet(StackdioModelPermissionsViewSet):
user_or_group = 'group'
lookup_field = 'name'
lookup_url_kwarg = 'groupname'
class StackdioObjectPermissionsViewSet(StackdioBasePermissionsViewSet):
"""
Viewset for creating permissions endpoints
"""
model_or_object = 'object'
def get_permissioned_object(self):
raise NotImplementedError('`get_permissioned_object()` must be implemented.')
def get_model_name(self):
return self.get_permissioned_object()._meta.model_name
def get_app_label(self):
ret = self.get_permissioned_object()._meta.app_label
if ret == 'auth':
# one-off thing, since users/groups are in the `users` app, not `auth`
return 'users'
return ret
def get_object_permissions(self):
return getattr(self.get_permissioned_object(),
'object_permissions',
getattr(self, 'object_permissions', ()))
def get_queryset(self): # pylint: disable=method-hidden
obj = self.get_permissioned_object()
model_name = obj._meta.model_name
object_perms = self.get_object_permissions()
# Grab the perms for either the users or groups
perm_map_func = self.switch_user_group(
lambda: get_users_with_perms(obj, attach_perms=True,
with_superusers=False, with_group_users=False),
lambda: get_groups_with_perms(obj, attach_perms=True),
)
perm_map = perm_map_func()
ret = []
sorted_perms = sorted(perm_map.items(), key=lambda x: getattr(x[0], self.lookup_field))
for auth_obj, perms in sorted_perms:
new_perms = [self._transform_perm(model_name)(perm) for perm in perms]
ret.append({
self.get_user_or_group(): auth_obj,
'permissions': _filter_perms(object_perms, new_perms),
})
return ret
def list(self, request, *args, **kwargs):
response = super(StackdioObjectPermissionsViewSet, self).list(request, *args, **kwargs)
# add available permissions to the response
response.data['available_permissions'] = sorted(self.get_object_permissions())
return response
def perform_create(self, serializer):
serializer.save(object=self.get_permissioned_object())
def perform_update(self, serializer):
serializer.save(object=self.get_permissioned_object())
def perform_destroy(self, instance):
obj = self.get_permissioned_object()
app_label = obj._meta.app_label
model_name = obj._meta.model_name
for perm in instance['permissions']:
remove_perm('%s.%s_%s' % (app_label, perm, model_name),
instance[self.get_user_or_group()],
obj)
# pylint: disable=abstract-method
class StackdioObjectUserPermissionsViewSet(StackdioObjectPermissionsViewSet):
user_or_group = 'user'
lookup_field = 'username'
lookup_url_kwarg = 'username'
class StackdioObjectGroupPermissionsViewSet(StackdioObjectPermissionsViewSet):
user_or_group = 'group'
lookup_field = 'name'
lookup_url_kwarg = 'groupname'<|fim▁end|> | raise StackdioConfigException('LDAP is enabled, but django_auth_ldap isn\'t '
'installed. Please install django_auth_ldap') |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>var mySteal = require('@steal');
if (typeof window !== "undefined" && window.assert) {<|fim▁hole|> done();
} else {
console.log("Systems", mySteal.loader == steal.loader);
}<|fim▁end|> | assert.ok(mySteal.loader == steal.loader, "The steal's loader is the loader"); |
<|file_name|>ImportProcessFactory.java<|end_file_name|><|fim▁begin|>package org.adempiere.impexp.impl;
/*
* #%L
* de.metas.adempiere.adempiere.base
* %%
* Copyright (C) 2015 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
import java.util.HashMap;
import java.util.Map;
import org.adempiere.exceptions.AdempiereException;
import org.adempiere.impexp.BPartnerImportProcess;
import org.adempiere.impexp.IImportProcess;
import org.adempiere.impexp.IImportProcessFactory;
import org.adempiere.impexp.ProductImportProcess;
import org.adempiere.impexp.spi.IAsyncImportProcessBuilder;
import org.adempiere.model.InterfaceWrapperHelper;
import org.adempiere.util.Check;
import org.compiere.model.I_I_BPartner;
import org.compiere.model.I_I_Product;
import com.google.common.base.Supplier;
public class ImportProcessFactory implements IImportProcessFactory
{
private final Map<Class<?>, Class<?>> modelImportClass2importProcessClasses = new HashMap<>();
private final Map<String, Class<?>> tableName2importProcessClasses = new HashMap<>();
private Supplier<IAsyncImportProcessBuilder> asyncImportProcessBuilderSupplier;
public ImportProcessFactory()
{
// Register standard import processes
registerImportProcess(I_I_BPartner.class, BPartnerImportProcess.class);
registerImportProcess(I_I_Product.class, ProductImportProcess.class);
}
@Override
public <ImportRecordType> void registerImportProcess(final Class<ImportRecordType> modelImportClass, final Class<? extends IImportProcess<ImportRecordType>> importProcessClass)
{
Check.assumeNotNull(modelImportClass, "modelImportClass not null");
Check.assumeNotNull(importProcessClass, "importProcessClass not null");
modelImportClass2importProcessClasses.put(modelImportClass, importProcessClass);
final String tableName = InterfaceWrapperHelper.getTableName(modelImportClass);
tableName2importProcessClasses.put(tableName, importProcessClass);
}
@Override
public <ImportRecordType> IImportProcess<ImportRecordType> newImportProcess(final Class<ImportRecordType> modelImportClass)
{
final IImportProcess<ImportRecordType> importProcess = newImportProcessOrNull(modelImportClass);
Check.assumeNotNull(importProcess, "importProcess not null for {}", modelImportClass);
return importProcess;
}
@Override
public <ImportRecordType> IImportProcess<ImportRecordType> newImportProcessOrNull(final Class<ImportRecordType> modelImportClass)
{
Check.assumeNotNull(modelImportClass, "modelImportClass not null");
final Class<?> importProcessClass = modelImportClass2importProcessClasses.get(modelImportClass);
if (importProcessClass == null)
{
return null;
}
return newInstance(importProcessClass);
}
private <ImportRecordType> IImportProcess<ImportRecordType> newInstance(final Class<?> importProcessClass)
{
try
{
@SuppressWarnings("unchecked")
final IImportProcess<ImportRecordType> importProcess = (IImportProcess<ImportRecordType>)importProcessClass.newInstance();
return importProcess;
}
catch (Exception e)
{
throw new AdempiereException("Failed instantiating " + importProcessClass, e);
}
}
@Override
public <ImportRecordType> IImportProcess<ImportRecordType> newImportProcessForTableNameOrNull(final String tableName)
{
Check.assumeNotNull(tableName, "tableName not null");
final Class<?> importProcessClass = tableName2importProcessClasses.get(tableName);
if (importProcessClass == null)
{
return null;
}
return newInstance(importProcessClass);
}
@Override
public <ImportRecordType> IImportProcess<ImportRecordType> newImportProcessForTableName(final String tableName)
{
final IImportProcess<ImportRecordType> importProcess = newImportProcessForTableNameOrNull(tableName);
Check.assumeNotNull(importProcess, "importProcess not null for {}", tableName);
return importProcess;
}
@Override
public IAsyncImportProcessBuilder newAsyncImportProcessBuilder()
{<|fim▁hole|> @Override
public void setAsyncImportProcessBuilderSupplier(Supplier<IAsyncImportProcessBuilder> asyncImportProcessBuilderSupplier)
{
Check.assumeNotNull(asyncImportProcessBuilderSupplier, "asyncImportProcessBuilderSupplier not null");
this.asyncImportProcessBuilderSupplier = asyncImportProcessBuilderSupplier;
}
}<|fim▁end|> | Check.assumeNotNull(asyncImportProcessBuilderSupplier, "A supplier for {} shall be registered first", IAsyncImportProcessBuilder.class);
return asyncImportProcessBuilderSupplier.get();
}
|
<|file_name|>write_rss_xml.py<|end_file_name|><|fim▁begin|>import os
from lxml import etree
# write_rss_xml writes name and date data for podcast RSS feeds to XML files
# contained in the relative path ./feeds. It is currently assumed that each
# podcast will have its data stored in a separate file.
def write_rss_xml( feed, feed_url, latest_title, latest_date ):
# the function currently assumes that the storage location will be
file_path = "feeds"
feed_name = feed
file_name = feed_name + "_rss.xml"
# Open the file to write binary data. write_rss_xml will currently only
# operate by re-writing the xml file every time; this will be improved in
# a later version.
with open(os.path.join(file_path, file_name), "wb") as write_file:
# root of lxml ElementTree
root = etree.Element("podcast")<|fim▁hole|> # subElements of root. Right now there should only be four of these,
# corresponding to the podcast name, feed url, latest episode name, and
# latest episode date.
pageElement = etree.SubElement(root, "name").text = feed_name
pageElement = etree.SubElement(root, "url").text = feed_url
pageElement = etree.SubElement(root, "episode").text = latest_title
pageElement = etree.SubElement(root, "date").text = latest_date
# Create a string from the ElementTree with pretty print and necessary
# encoding, for printing to file.
out_xml = etree.tostring(root, xml_declaration=True, pretty_print=True)
# Print the string out_xml to the file whose name is contained in the
# variable file_name.
write_file.write(out_xml)
write_file.close()<|fim▁end|> | |
<|file_name|>ResourcePool.java<|end_file_name|><|fim▁begin|>package org.fluentjava.iwant.plannerapi;
public interface ResourcePool {
boolean hasFreeResources();
Resource acquire();
void release(Resource resource);
<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>'use strict';
module.exports = function (grunt) {
// Time how long tasks take. Can help when optimizing build times
require('time-grunt')(grunt);
// Automatically load required grunt tasks
require('jit-grunt')(grunt, {
lockfile: 'grunt-lock'
});
// Configurable paths
var config = {
sass_dir: 'bundle/Resources/sass/admin',
public_dir: 'bundle/Resources/public/admin'
};
// Define the configuration for all the tasks
grunt.initConfig({
// Project settings
config: config,
//Prevent multiple grunt instances
lockfile: {
grunt: {
path: 'grunt.lock'
}
},
// Watches files for changes and runs tasks based on the changed files
watch: {
gruntfile: {
files: ['Gruntfile.js'],
options: {
reload: true
}
},
sass: {
files: ['<%= config.sass_dir %>/{,*/}*.{scss,sass}'],
tasks: ['sass', 'postcss']
}
},
// Compiles Sass to CSS and generates necessary files if requested
sass: {
options: {
sourceMap: true,
sourceMapEmbed: true,
sourceMapContents: true,
includePaths: ['.']
},
dist: {
files: [{
expand: true,
cwd: '<%= config.sass_dir %>',
src: ['*.{scss,sass}'],
dest: '.tmp/css',
ext: '.css'
}]
}
},
postcss: {
options: {
map: true,
processors: [
// Add vendor prefixed styles
require('autoprefixer')({
browsers: ['> 1%', 'last 3 versions', 'Firefox ESR', 'Opera 12.1']
})
]
},
dist: {
files: [{
expand: true,
cwd: '.tmp/css/',
src: '{,*/}*.css',
dest: '<%= config.public_dir %>/css'
}]
}
}
});
grunt.registerTask('serve', 'Start the server and preview your app', function () {<|fim▁hole|> 'lockfile',
'sass:dist',
'postcss',
'watch'
]);
});
grunt.registerTask('default', [
'serve'
]);
};<|fim▁end|> | grunt.task.run([ |
<|file_name|>system_linux.go<|end_file_name|><|fim▁begin|>package color
func init() {
KeyboardFocus = RGB(240, 119, 70)
SelectedTextBackground = KeyboardFocus<|fim▁hole|><|fim▁end|> | } |
<|file_name|>stringLiteralTypesInUnionTypes03.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>// @declaration: true
type T = number | "foo" | "bar";
var x: "foo" | "bar" | number;
var y: T = undefined;
if (x === "foo") {
let a = x;
}
else if (x !== "bar") {
let b = x || y;
}
else {
let c = x;
let d = y;
let e: (typeof x) | (typeof y) = c || d;
}
x = y;
y = x;<|fim▁end|> | |
<|file_name|>ReactDOMFiberAsync-test.js<|end_file_name|><|fim▁begin|>var React = require('react');
var ReactFeatureFlags = require('ReactFeatureFlags');
var ReactDOM;
var AsyncComponent = React.unstable_AsyncComponent;
describe('ReactDOMFiberAsync', () => {
var container;
beforeEach(() => {
container = document.createElement('div');
ReactDOM = require('react-dom');
});
it('renders synchronously by default', () => {
var ops = [];
ReactDOM.render(<div>Hi</div>, container, () => {
ops.push(container.textContent);
});
ReactDOM.render(<div>Bye</div>, container, () => {
ops.push(container.textContent);
});
expect(ops).toEqual(['Hi', 'Bye']);
});
describe('with feature flag disabled', () => {
beforeEach(() => {
jest.resetModules();
ReactFeatureFlags = require('ReactFeatureFlags');
container = document.createElement('div');
ReactFeatureFlags.enableAsyncSubtreeAPI = false;
ReactDOM = require('react-dom');
});
it('renders synchronously', () => {
ReactDOM.render(
<AsyncComponent><div>Hi</div></AsyncComponent>,
container,
);
expect(container.textContent).toEqual('Hi');
ReactDOM.render(
<AsyncComponent><div>Bye</div></AsyncComponent>,
container,
);
expect(container.textContent).toEqual('Bye');
});
});
describe('with feature flag enabled', () => {
beforeEach(() => {
jest.resetModules();
ReactFeatureFlags = require('ReactFeatureFlags');
container = document.createElement('div');
ReactFeatureFlags.enableAsyncSubtreeAPI = true;
ReactDOM = require('react-dom');
});
it('AsyncComponent at the root makes the entire tree async', () => {
ReactDOM.render(
<AsyncComponent><div>Hi</div></AsyncComponent>,
container,
);
expect(container.textContent).toEqual('');
jest.runAllTimers();
expect(container.textContent).toEqual('Hi');
ReactDOM.render(
<AsyncComponent><div>Bye</div></AsyncComponent>,
container,
);
expect(container.textContent).toEqual('Hi');
jest.runAllTimers();
expect(container.textContent).toEqual('Bye');
});
it('updates inside an async tree are async by default', () => {
let instance;
class Component extends React.Component {
state = {step: 0};
render() {
instance = this;
return <div>{this.state.step}</div>;
}
}
ReactDOM.render(
<AsyncComponent><Component /></AsyncComponent>,
container,
);
expect(container.textContent).toEqual('');
jest.runAllTimers();
expect(container.textContent).toEqual('0');
instance.setState({step: 1});
expect(container.textContent).toEqual('0');
jest.runAllTimers();
expect(container.textContent).toEqual('1');
});
it('AsyncComponent creates an async subtree', () => {
let instance;
class Component extends React.unstable_AsyncComponent {
state = {step: 0};
render() {
instance = this;
return <div>{this.state.step}</div>;
}
}
ReactDOM.render(<div><Component /></div>, container);
jest.runAllTimers();
instance.setState({step: 1});
expect(container.textContent).toEqual('0');
jest.runAllTimers();
expect(container.textContent).toEqual('1');
});
it('updates inside an async subtree are async by default', () => {
class Component extends React.unstable_AsyncComponent {
render() {
return <Child />;
}
}
let instance;
class Child extends React.Component {
state = {step: 0};
render() {
instance = this;
return <div>{this.state.step}</div>;
}
}
ReactDOM.render(<div><Component /></div>, container);
jest.runAllTimers();
instance.setState({step: 1});
expect(container.textContent).toEqual('0');
jest.runAllTimers();
expect(container.textContent).toEqual('1');
});
it('flushSync batches sync updates and flushes them at the end of the batch', () => {
let ops = [];
let instance;
class Component extends React.Component {
state = {text: ''};
push(val) {
this.setState(state => ({text: state.text + val}));
}
componentDidUpdate() {
ops.push(this.state.text);
}
render() {
instance = this;
return <span>{this.state.text}</span>;
}
}
ReactDOM.render(<Component />, container);
instance.push('A');
expect(ops).toEqual(['A']);
expect(container.textContent).toEqual('A');
ReactDOM.flushSync(() => {
instance.push('B');
instance.push('C');
// Not flushed yet
expect(container.textContent).toEqual('A');
expect(ops).toEqual(['A']);
});
expect(container.textContent).toEqual('ABC');
expect(ops).toEqual(['A', 'ABC']);
instance.push('D');
expect(container.textContent).toEqual('ABCD');
expect(ops).toEqual(['A', 'ABC', 'ABCD']);
});
it('flushSync flushes updates even if nested inside another flushSync', () => {
let ops = [];
let instance;
class Component extends React.Component {
state = {text: ''};
push(val) {
this.setState(state => ({text: state.text + val}));
}
componentDidUpdate() {
ops.push(this.state.text);
}
render() {
instance = this;
return <span>{this.state.text}</span>;
}
}
ReactDOM.render(<Component />, container);
instance.push('A');
expect(ops).toEqual(['A']);
expect(container.textContent).toEqual('A');
ReactDOM.flushSync(() => {
instance.push('B');
instance.push('C');
// Not flushed yet
expect(container.textContent).toEqual('A');
expect(ops).toEqual(['A']);
ReactDOM.flushSync(() => {
instance.push('D');
});
// The nested flushSync caused everything to flush.
expect(container.textContent).toEqual('ABCD');
expect(ops).toEqual(['A', 'ABCD']);
});
expect(container.textContent).toEqual('ABCD');
expect(ops).toEqual(['A', 'ABCD']);
});
it('flushSync throws if already performing work', () => {
class Component extends React.Component {
componentDidUpdate() {
ReactDOM.flushSync(() => {});
}
render() {
return null;
}
}
// Initial mount
ReactDOM.render(<Component />, container);
// Update
expect(() => ReactDOM.render(<Component />, container)).toThrow(
'flushSync was called from inside a lifecycle method',
);
});
it('flushSync flushes updates before end of the tick', () => {
let ops = [];
let instance;
<|fim▁hole|> class Component extends React.unstable_AsyncComponent {
state = {text: ''};
push(val) {
this.setState(state => ({text: state.text + val}));
}
componentDidUpdate() {
ops.push(this.state.text);
}
render() {
instance = this;
return <span>{this.state.text}</span>;
}
}
ReactDOM.render(<Component />, container);
jest.runAllTimers();
// Updates are async by default
instance.push('A');
expect(ops).toEqual([]);
expect(container.textContent).toEqual('');
ReactDOM.flushSync(() => {
instance.push('B');
instance.push('C');
// Not flushed yet
expect(container.textContent).toEqual('');
expect(ops).toEqual([]);
});
// Only the active updates have flushed
expect(container.textContent).toEqual('BC');
expect(ops).toEqual(['BC']);
instance.push('D');
expect(container.textContent).toEqual('BC');
expect(ops).toEqual(['BC']);
// Flush the async updates
jest.runAllTimers();
expect(container.textContent).toEqual('BCAD');
expect(ops).toEqual(['BC', 'BCAD']);
});
});
});<|fim▁end|> | |
<|file_name|>ramen.js<|end_file_name|><|fim▁begin|>"use strict";
var i = 180; //3分固定
function count(){
if(i <= 0){<|fim▁hole|> i -= 1;
}
window.onload = function(){
setInterval("count()", 1000);
};<|fim▁end|> | document.getElementById("output").innerHTML = "完成!";
}else{
document.getElementById("output").innerHTML = i + "s";
} |
<|file_name|>test_modelfield.py<|end_file_name|><|fim▁begin|>from django import forms
from example.models import OneUrlModel, ManyUrlsModel
def test_one_url(db):
class F(forms.ModelForm):
class Meta:
model = OneUrlModel
fields = '__all__'
form = F({'url': 'ya.RU'})
instance = form.save()
assert instance.url == 'http://ya.ru'
def test_many_urls(db):<|fim▁hole|>
form = F({'urls': 'ya.RU, xx.com '
'httP://zzz.ff'})
assert form.is_valid()
instance = form.save()
assert instance.urls == [
'http://xx.com',
'http://ya.ru',
'http://zzz.ff',
]
form = F(instance=instance)
assert bool(form.errors) == False
def test_model(db):
instance = ManyUrlsModel.objects.create(
urls=['http://ya.ru', 'http://xx.com'],
)
assert ManyUrlsModel.objects.get(id=instance.id).urls == ['http://ya.ru', 'http://xx.com']
instance = ManyUrlsModel.objects.create(
urls='http://ya.ru',
)
assert ManyUrlsModel.objects.get(id=instance.id).urls == ['http://ya.ru']<|fim▁end|> | class F(forms.ModelForm):
class Meta:
model = ManyUrlsModel
fields = '__all__' |
<|file_name|>ExplicitConvection.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Author: Martin Sandve Alnes
# Date: 2008-10-03
#
from ufl import (Coefficient, TestFunction, TrialFunction, VectorElement, dot,
dx, grad, triangle)
element = VectorElement("Lagrange", triangle, 1)
u = TrialFunction(element)
v = TestFunction(element)
w = Coefficient(element)
a = dot(dot(w, grad(u)), v) * dx<|fim▁end|> | # |
<|file_name|>download.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2018 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package repo
import (
"fmt"
"io"
"path"
"strings"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
)
// ServeData download file from io.Reader
func ServeData(ctx *context.Context, name string, reader io.Reader) error {
buf := make([]byte, 1024)
n, _ := reader.Read(buf)
if n >= 0 {
buf = buf[:n]
}
ctx.Resp.Header().Set("Cache-Control", "public,max-age=86400")
name = path.Base(name)
// Google Chrome dislike commas in filenames, so let's change it to a space
name = strings.Replace(name, ",", " ", -1)
if base.IsTextFile(buf) || ctx.QueryBool("render") {
cs, err := charset.DetectEncoding(buf)
if err != nil {
log.Error("Detect raw file %s charset failed: %v, using by default utf-8", name, err)
cs = "utf-8"
}
ctx.Resp.Header().Set("Content-Type", "text/plain; charset="+strings.ToLower(cs))
} else if base.IsImageFile(buf) || base.IsPDFFile(buf) {
ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf(`inline; filename="%s"`, name))
ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Disposition")
} else {
ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, name))
ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Disposition")
}
_, err := ctx.Resp.Write(buf)
if err != nil {
return err
}
_, err = io.Copy(ctx.Resp, reader)
return err
}
// ServeBlob download a git.Blob
func ServeBlob(ctx *context.Context, blob *git.Blob) error {
dataRc, err := blob.DataAsync()
if err != nil {
return err<|fim▁hole|> }
}()
return ServeData(ctx, ctx.Repo.TreePath, dataRc)
}
// ServeBlobOrLFS download a git.Blob redirecting to LFS if necessary
func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob) error {
dataRc, err := blob.DataAsync()
if err != nil {
return err
}
defer func() {
if err = dataRc.Close(); err != nil {
log.Error("ServeBlobOrLFS: Close: %v", err)
}
}()
if meta, _ := lfs.ReadPointerFile(dataRc); meta != nil {
meta, _ = ctx.Repo.Repository.GetLFSMetaObjectByOid(meta.Oid)
if meta == nil {
return ServeBlob(ctx, blob)
}
lfsDataRc, err := lfs.ReadMetaObject(meta)
if err != nil {
return err
}
defer func() {
if err = lfsDataRc.Close(); err != nil {
log.Error("ServeBlobOrLFS: Close: %v", err)
}
}()
return ServeData(ctx, ctx.Repo.TreePath, lfsDataRc)
}
return ServeBlob(ctx, blob)
}
// SingleDownload download a file by repos path
func SingleDownload(ctx *context.Context) {
blob, err := ctx.Repo.Commit.GetBlobByPath(ctx.Repo.TreePath)
if err != nil {
if git.IsErrNotExist(err) {
ctx.NotFound("GetBlobByPath", nil)
} else {
ctx.ServerError("GetBlobByPath", err)
}
return
}
if err = ServeBlob(ctx, blob); err != nil {
ctx.ServerError("ServeBlob", err)
}
}
// SingleDownloadOrLFS download a file by repos path redirecting to LFS if necessary
func SingleDownloadOrLFS(ctx *context.Context) {
blob, err := ctx.Repo.Commit.GetBlobByPath(ctx.Repo.TreePath)
if err != nil {
if git.IsErrNotExist(err) {
ctx.NotFound("GetBlobByPath", nil)
} else {
ctx.ServerError("GetBlobByPath", err)
}
return
}
if err = ServeBlobOrLFS(ctx, blob); err != nil {
ctx.ServerError("ServeBlobOrLFS", err)
}
}
// DownloadByID download a file by sha1 ID
func DownloadByID(ctx *context.Context) {
blob, err := ctx.Repo.GitRepo.GetBlob(ctx.Params("sha"))
if err != nil {
if git.IsErrNotExist(err) {
ctx.NotFound("GetBlob", nil)
} else {
ctx.ServerError("GetBlob", err)
}
return
}
if err = ServeBlob(ctx, blob); err != nil {
ctx.ServerError("ServeBlob", err)
}
}
// DownloadByIDOrLFS download a file by sha1 ID taking account of LFS
func DownloadByIDOrLFS(ctx *context.Context) {
blob, err := ctx.Repo.GitRepo.GetBlob(ctx.Params("sha"))
if err != nil {
if git.IsErrNotExist(err) {
ctx.NotFound("GetBlob", nil)
} else {
ctx.ServerError("GetBlob", err)
}
return
}
if err = ServeBlobOrLFS(ctx, blob); err != nil {
ctx.ServerError("ServeBlob", err)
}
}<|fim▁end|> | }
defer func() {
if err = dataRc.Close(); err != nil {
log.Error("ServeBlob: Close: %v", err) |
<|file_name|>test_artificial_128_Difference_Lag1Trend_7_12_20.py<|end_file_name|><|fim▁begin|>import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art<|fim▁hole|>art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 7, transform = "Difference", sigma = 0.0, exog_count = 20, ar_order = 12);<|fim▁end|> | |
<|file_name|>database.js<|end_file_name|><|fim▁begin|>/**
* Created by Samuel Schmid on 23.03.14.
*
* Class for Database Handling
*
* Containing
* - App Config
* - Database Information
*
* @type {Database}
*/
module.exports = Database;
Array.prototype.contains = function(obj) {
var i = this.length;
while (i--) {
if (this[i] === obj) {
return true;
}
}
return false;
}
String.prototype.replaceAll = function(target, replacement) {
return this.split(target).join(replacement);
};
function Database(grunt) {
this.grunt = grunt;
this.appconfig = grunt.config().appconfig;
this.db = this.appconfig.db;
}
/**
* delete Database Schemes of Docs
*
* @param docs
*/
Database.prototype.deleteSchemes = function(docs) {
var grunt = this.grunt;
grunt.log.debug("start ");
if(docs.docs.length > 0) {
var firstDoc = docs.docs[0];
var rootfolder = firstDoc.schemefolder.split("/")[0];
grunt.log.debug("Database: delete files in folder:" + rootfolder);
grunt.file.delete(rootfolder);
} else {
grunt.log.debug("Empty");
return;
}
}
/**
* create Database Schemes for Docs
*
* @param docs
*/
Database.prototype.createSchemes = function(docs) {
var grunt = this.grunt;
if(this.db.name === "mongodb") {
if(this.db.provider === "mongoose") {
grunt.log.write("start writing schemes for database " + this.db.name + " and provider "+this.db.provider + ".");
var Provider = require('./providers/mongoose/mongoose-provider.js');
var provider = new Provider(grunt);
for(var i=0;i<docs.docs.length;i++) {
var doc = docs.docs[i];
if(doc.json.type.endsWith('.abstract')) {
provider.writeAbstractScheme(doc);
}
}
for(var i=0;i<docs.docs.length;i++) {
var doc = docs.docs[i];
if(!doc.json.type.endsWith('.apidescription') && !doc.json.type.endsWith('.abstract')) {
provider.writeScheme(doc);
}
}
provider.writeLib();
} else {
<|fim▁hole|> grunt.log.write("cannot create schemes for database " + this.db.name + ", because there is no provider for it.");
}
}<|fim▁end|> | grunt.log.write("cannot create schemes for database " + this.db.name + ", because there we can't use the provider "+this.db.provider+" for it.");
}
} else { |
<|file_name|>unauthenticated.js<|end_file_name|><|fim▁begin|>/* The all pages that do not require authentication */
function UnAuthenticatedHandler() {
"use strict";
this.displayAboutPage = function(req, res, next) {
return res.render("about");
};
this.displayContactPage = function(req, res, next) {
return res.render("contact");
};
this.displayHomePage = function(req, res, next) {
return res.render("home");
};
this.displayChatPage = function(req, res, next) {<|fim▁hole|> return res.render("chat");
};
}
module.exports = UnAuthenticatedHandler;<|fim▁end|> | |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import subprocess
import os
import errno
def download_file(url, local_fname=None, force_write=False):
# requests is not default installed
import requests
if local_fname is None:
local_fname = url.split('/')[-1]
if not force_write and os.path.exists(local_fname):
return local_fname
dir_name = os.path.dirname(local_fname)
if dir_name != "":
if not os.path.exists(dir_name):
try: # try to create the directory if it doesn't exists
os.makedirs(dir_name)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
r = requests.get(url, stream=True)
assert r.status_code == 200, "failed to open %s" % url
with open(local_fname, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
return local_fname<|fim▁hole|> return a list of GPUs
"""
try:
re = subprocess.check_output(["nvidia-smi", "-L"], universal_newlines=True)
except OSError:
return []
return range(len([i for i in re.split('\n') if 'GPU' in i]))<|fim▁end|> |
def get_gpus():
""" |
<|file_name|>comp-1144.component.spec.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { Comp1144Component } from './comp-1144.component';
describe('Comp1144Component', () => {
let component: Comp1144Component;
let fixture: ComponentFixture<Comp1144Component>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ Comp1144Component ]
})
.compileComponents();
}));
beforeEach(() => {<|fim▁hole|> });
it('should create', () => {
expect(component).toBeTruthy();
});
});<|fim▁end|> | fixture = TestBed.createComponent(Comp1144Component);
component = fixture.componentInstance;
fixture.detectChanges(); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.