prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>permission.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
from rest_framework import serializers
from django.utils.translation import ugettext_lazy as _
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from perms.models import AssetPermission, Action
__all__ = [
'AssetPermissionSerializer',
'ActionsField',
]
class ActionsField(serializers.MultipleChoiceField):
def __init__(self, *args, **kwargs):
kwargs['choices'] = Action.CHOICES
super().__init__(*args, **kwargs)
def to_representation(self, value):
return Action.value_to_choices(value)
def to_internal_value(self, data):
if data is None:
return data
return Action.choices_to_value(data)
class ActionsDisplayField(ActionsField):
def to_representation(self, value):
values = super().to_representation(value)
choices = dict(Action.CHOICES)
return [choices.get(i) for i in values]
class AssetPermissionSerializer(BulkOrgResourceModelSerializer):
actions = ActionsField(required=False, allow_null=True)
is_valid = serializers.BooleanField(read_only=True)
is_expired = serializers.BooleanField(read_only=True)
class Meta:
model = AssetPermission
mini_fields = ['id', 'name']
small_fields = mini_fields + [
'is_active', 'is_expired', 'is_valid', 'actions',
'created_by', 'date_created', 'date_expired',
'date_start', 'comment'
]
m2m_fields = [
'users', 'user_groups', 'assets', 'nodes', 'system_users',
'users_amount', 'user_groups_amount', 'assets_amount',
'nodes_amount', 'system_users_amount',
]
fields = small_fields + m2m_fields
read_only_fields = ['created_by', 'date_created']
extra_kwargs = {
'is_expired': {'label': _('Is expired')},
'is_valid': {'label': _('Is valid')},<|fim▁hole|> 'user_groups_amount': {'label': _('User groups amount')},
'assets_amount': {'label': _('Assets amount')},
'nodes_amount': {'label': _('Nodes amount')},
'system_users_amount': {'label': _('System users amount')},
}
@classmethod
def setup_eager_loading(cls, queryset):
""" Perform necessary eager loading of data. """
queryset = queryset.prefetch_related('users', 'user_groups', 'assets', 'nodes', 'system_users')
return queryset<|fim▁end|> | 'actions': {'label': _('Actions')},
'users_amount': {'label': _('Users amount')}, |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/*
*
* apiView
*
*/
import React, { PureComponent } from 'react'
import PropTypes from 'prop-types'
import { FormattedMessage } from 'react-intl'
import { connect } from 'react-redux'
import ReadMargin from 'components/ReadMargin'
import View from 'components/View'
import P from 'components/P'
import messages from './messages'
class WorldView extends PureComponent {<|fim▁hole|> }
render() {
return (
<div>
<View left={true}>
<ReadMargin>
<P><FormattedMessage {...messages.arasaacInWorld} /></P>
</ReadMargin>
</View>
<iframe src="https://www.google.com/maps/d/u/0/embed?mid=1EBR3psLxK-G_WujU93NMWkfisTYK4HwY" width="100%" height="800"></iframe>
</div>
)
}
}
WorldView.propTypes = {
theme: PropTypes.string.isRequired
}
const mapStateToProps = (state) => ({
theme: state.get('theme')
})
export default connect(mapStateToProps)(WorldView)<|fim▁end|> | componentDidMount() {
|
<|file_name|>popup.js<|end_file_name|><|fim▁begin|>document.addEventListener("DOMContentLoaded", function (event) {
'use strict';
var paragraph, url, proxy;
paragraph = document.querySelectorAll('p.error_text');
chrome.tabs.query({ currentWindow: true, active: true }, function (tabs) {
url = tabs[0].url;
if (url.indexOf('chrome://') == 0) {
paragraph[0].innerHTML = 'Sorry, you can\'t activate Browse Google Cache on a page with a "chrome://" URL.';
} else if (url.indexOf('https://chrome.google.com/webstore') == 0) {
paragraph[0].innerHTML = 'Sorry, you can\'t activate Browse Google Cache on the Chrome Web Store.';
} else {
chrome.tabs.query({ currentWindow: true, active: true }, function (tabs) {
chrome.runtime.sendMessage({
action : 'extensionButtonClicked',
'tab': tabs[0]
});
window.close();
});
}
});<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|>exports.CLI = require(__dirname + '/lib/cli');
exports.Events = require(__dirname + '/lib/events');<|fim▁end|> | |
<|file_name|>aptpreferences.py<|end_file_name|><|fim▁begin|>import os
from twisted.python.compat import iteritems
from landscape.lib.fs import read_text_file
from landscape.constants import APT_PREFERENCES_SIZE_LIMIT
from landscape.client.monitor.plugin import DataWatcher
class AptPreferences(DataWatcher):
"""
Report the system APT preferences configuration.
"""
persist_name = "apt-preferences"
message_type = "apt-preferences"
message_key = "data"
run_interval = 900 # 15 minutes
scope = "package"
size_limit = APT_PREFERENCES_SIZE_LIMIT
def __init__(self, etc_apt_directory="/etc/apt"):
self._etc_apt_directory = etc_apt_directory
def get_data(self):
"""Return a C{dict} mapping APT preferences files to their contents.
If no APT preferences configuration is set at all on the system, then
simply return C{None}
"""<|fim▁hole|> if os.path.exists(preferences_filename):
data[preferences_filename] = read_text_file(preferences_filename)
preferences_directory = os.path.join(self._etc_apt_directory,
u"preferences.d")
if os.path.isdir(preferences_directory):
for entry in os.listdir(preferences_directory):
filename = os.path.join(preferences_directory, entry)
if os.path.isfile(filename):
data[filename] = read_text_file(filename)
if data == {}:
return None
item_size_limit = self.size_limit // len(data.keys())
for filename, contents in iteritems(data):
if len(filename) + len(contents) > item_size_limit:
truncated_contents_size = item_size_limit - len(filename)
data[filename] = data[filename][0:truncated_contents_size]
return data
def run(self):
return self.exchange(urgent=True)<|fim▁end|> | data = {}
preferences_filename = os.path.join(self._etc_apt_directory,
u"preferences") |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
<|fim▁hole|>
from adapt.intent import IntentBuilder
from mycroft.messagebus.message import Message
from mycroft.skills.LILACS_core.question_parser import LILACSQuestionParser
from mycroft.skills.LILACS_knowledge.knowledgeservice import KnowledgeService
from mycroft.skills.core import MycroftSkill
from mycroft.util.log import getLogger
__author__ = 'jarbas'
logger = getLogger(__name__)
class LILACSChatbotSkill(MycroftSkill):
# https://github.com/ElliotTheRobot/LILACS-mycroft-core/issues/19
def __init__(self):
super(LILACSChatbotSkill, self).__init__(name="ChatbotSkill")
# initialize your variables
self.reload_skill = False
self.active = True
self.parser = None
self.service = None
self.TIMEOUT = 2
def initialize(self):
# register intents
self.parser = LILACSQuestionParser()
self.service = KnowledgeService(self.emitter)
self.build_intents()
# make thread to keep active
self.make_bump_thread()
def ping(self):
while True:
i = 0
if self.active:
self.emitter.emit(Message("recognizer_loop:utterance", {"source": "LILACS_chatbot_skill",
"utterances": [
"bump chat to active skill list"]}))
while i < 60 * self.TIMEOUT:
i += 1
sleep(1)
i = 0
def make_bump_thread(self):
timer_thread = Thread(target=self.ping)
timer_thread.setDaemon(True)
timer_thread.start()
def build_intents(self):
# build intents
deactivate_intent = IntentBuilder("DeactivateChatbotIntent") \
.require("deactivateChatBotKeyword").build()
activate_intent=IntentBuilder("ActivateChatbotIntent") \
.require("activateChatBotKeyword").build()
bump_intent = IntentBuilder("BumpChatBotSkillIntent"). \
require("bumpChatBotKeyword").build()
# register intents
self.register_intent(deactivate_intent, self.handle_deactivate_intent)
self.register_intent(activate_intent, self.handle_activate_intent)
self.register_intent(bump_intent, self.handle_set_on_top_active_list())
def handle_set_on_top_active_list(self):
# dummy intent just to bump curiosity skill to top of active skill list
# called on a timer in order to always use converse method
pass
def handle_deactivate_intent(self, message):
self.active = False
self.speak_dialog("chatbot_off")
def handle_activate_intent(self, message):
self.active = True
self.speak_dialog("chatbot_on")
def stop(self):
self.handle_deactivate_intent("global stop")
def converse(self, transcript, lang="en-us"):
# parse 1st utterance for entitys
if self.active and "bump chat" not in transcript[0] and "bump curiosity" not in transcript[0]:
nodes, parents, synonims = self.parser.tag_from_dbpedia(transcript[0])
self.log.info("nodes: " + str(nodes))
self.log.info("parents: " + str(parents))
self.log.info("synonims: " + str(synonims))
# get concept net , talk
possible_responses = []
for node in nodes:
try:
dict = self.service.adquire(node, "concept net")
usages = dict["concept net"]["surfaceText"]
for usage in usages:
possible_responses.append(usage.replace("[", "").replace("]", ""))
except:
self.log.info("could not get reply for node " + node)
try:
# say something random
reply = random.choice(possible_responses)
self.speak(reply)
return True
except:
self.log.error("Could not get chatbot response for: " + transcript[0])
# dont know what to say
# TODO ask user a question and play du,mb
return False
# tell intent skill you did not handle intent
return False
def create_skill():
return LILACSChatbotSkill()<|fim▁end|> | from threading import Thread
from time import sleep
import random |
<|file_name|>no_0021_merge_two_sorted_lists.rs<|end_file_name|><|fim▁begin|>// Definition for singly-linked list.
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct ListNode {
pub val: i32,
pub next: Option<Box<ListNode>>,
}
impl ListNode {
#[inline]
fn new(val: i32) -> Self {
ListNode { next: None, val }
}
}
struct Solution;
impl Solution {
// from https://leetcode-cn.com/problems/merge-two-sorted-lists/solution/rust-by-tryfor_-23/
pub fn merge_two_lists(
mut l1: Option<Box<ListNode>>,
mut l2: Option<Box<ListNode>>,
) -> Option<Box<ListNode>> {
let mut res = ListNode::new(0);
let mut ptr = &mut res;
while let (Some(n1), Some(n2)) = (l1.as_ref(), l2.as_ref()) {
if n1.val <= n2.val {
// n1 是个引用,不能使用 take()...
// l1 = n1.next.take();
// 下面没有再使用 n1, 所以 RAII 会把引用的生命周期结束。
// !!! 先把 l1 的所有权转移了,最后再给它一个新的所有权。 !!!
ptr.next = l1;
ptr = ptr.next.as_mut().unwrap();
l1 = ptr.next.take();
} else {
ptr.next = l2;
ptr = ptr.next.as_mut().unwrap();
l2 = ptr.next.take();
}
}
// 剩余的
ptr.next = if l1.is_some() { l1 } else { l2 };
res.next
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_merge_two_lists1() {
let l1 = Some(Box::new(ListNode {
val: 1,
next: Some(Box::new(ListNode {
val: 2,
next: Some(Box::new(ListNode { val: 4, next: None })),
})),
}));
let l2 = Some(Box::new(ListNode {
val: 1,
next: Some(Box::new(ListNode {
val: 3,
next: Some(Box::new(ListNode { val: 4, next: None })),
})),
}));
let want = Some(Box::new(ListNode {
val: 1,
next: Some(Box::new(ListNode {
val: 1,
next: Some(Box::new(ListNode {
val: 2,<|fim▁hole|> next: Some(Box::new(ListNode { val: 4, next: None })),
})),
})),
})),
})),
}));
assert_eq!(Solution::merge_two_lists(l1, l2), want);
}
}<|fim▁end|> | next: Some(Box::new(ListNode {
val: 3,
next: Some(Box::new(ListNode {
val: 4, |
<|file_name|>scipy.py<|end_file_name|><|fim▁begin|>##
# Copyright 2009-2021 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|># along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for building and installing scipy, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
from distutils.version import LooseVersion
from easybuild.easyblocks.generic.fortranpythonpackage import FortranPythonPackage
from easybuild.easyblocks.generic.pythonpackage import det_pylibdir
import easybuild.tools.toolchain as toolchain
class EB_scipy(FortranPythonPackage):
"""Support for installing the scipy Python package as part of a Python installation."""
def __init__(self, *args, **kwargs):
"""Set scipy-specific test command."""
super(EB_scipy, self).__init__(*args, **kwargs)
self.testinstall = True
self.testcmd = "cd .. && %(python)s -c 'import numpy; import scipy; scipy.test(verbose=2)'"
def configure_step(self):
"""Custom configure step for scipy: set extra installation options when needed."""
super(EB_scipy, self).configure_step()
if LooseVersion(self.version) >= LooseVersion('0.13'):
# in recent scipy versions, additional compilation is done in the install step,
# which requires unsetting $LDFLAGS
if self.toolchain.comp_family() in [toolchain.GCC, toolchain.CLANGGCC]: # @UndefinedVariable
self.cfg.update('preinstallopts', "unset LDFLAGS && ")
def sanity_check_step(self, *args, **kwargs):
"""Custom sanity check for scipy."""
# can't use self.pylibdir here, need to determine path on the fly using currently active 'python' command;
# this is important for numpy installations for multiple Python version (via multi_deps)
custom_paths = {
'files': [],
'dirs': [det_pylibdir()],
}
return super(EB_scipy, self).sanity_check_step(custom_paths=custom_paths)<|fim▁end|> | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License |
<|file_name|>vanilla_lstm.py<|end_file_name|><|fim▁begin|>'''
Build a tweet sentiment analyzer
'''
from __future__ import print_function
import cPickle as pickle
import sys
import time
from collections import OrderedDict
import numpy
import theano
import theano.tensor as tensor
from theano import config
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
import imdb
datasets = {'imdb': (imdb.load_data, imdb.prepare_data)}
# Set the random number generators' seeds for consistency
SEED = 123
numpy.random.seed(SEED)
def numpy_floatX(data):
return numpy.asarray(data, dtype=config.floatX)
# NOTE (bitesandbytes) : Important; set minibatch_size = 1 ?
def get_minibatches_idx(n, minibatch_size, shuffle=False):
"""
Used to shuffle the dataset at each iteration.
"""
idx_list = numpy.arange(n, dtype="int32")
if shuffle:
numpy.random.shuffle(idx_list)
minibatches = []
minibatch_start = 0
for i in range(n // minibatch_size):
minibatches.append(idx_list[minibatch_start:
minibatch_start + minibatch_size])
minibatch_start += minibatch_size
if (minibatch_start != n):
# Make a minibatch out of what is left
minibatches.append(idx_list[minibatch_start:])
return zip(range(len(minibatches)), minibatches)
# NOTE (bitesandbytes) : Not needed.
def get_dataset(name):
return datasets[name][0], datasets[name][1]
def zipp(params, tparams):
"""
When we reload the model. Needed for the GPU stuff.
"""
for kk, vv in params.items():
tparams[kk].set_value(vv)
def unzip(zipped):
"""
When we pickle the model. Needed for the GPU stuff.
"""
new_params = OrderedDict()
for kk, vv in zipped.items():
new_params[kk] = vv.get_value()
return new_params
def dropout_layer(state_before, use_noise, trng):
proj = tensor.switch(use_noise,
(state_before *
trng.binomial(state_before.shape,
p=0.5, n=1,
dtype=state_before.dtype)),
state_before * 0.5)
return proj
def _p(pp, name):
return '%s_%s' % (pp, name)
def init_params(options):
"""
Global (not LSTM) parameter. For the embedding and the classifier.
"""
params = OrderedDict()
params = get_layer(options['encoder'])[0](options,
params,
prefix=options['encoder'])
# classifier
params['U'] = 0.01 * numpy.random.randn(options['dim_proj'],
options['ydim']).astype(config.floatX)
params['b'] = numpy.zeros((options['ydim'],)).astype(config.floatX)
return params
def load_params(path, params):
pp = numpy.load(path)
for kk, vv in params.items():
if kk not in pp:
raise Warning('%s is not in the archive' % kk)
params[kk] = pp[kk]
return params
def init_tparams(params):
tparams = OrderedDict()
for kk, pp in params.items():
tparams[kk] = theano.shared(params[kk], name=kk)
return tparams
def get_layer(name):
fns = layers[name]
return fns
def ortho_weight(ndim):
W = numpy.random.randn(ndim, ndim)
u, s, v = numpy.linalg.svd(W)
return u.astype(config.floatX)
def param_init_lstm(options, params, prefix='lstm'):
"""
Init the LSTM parameter:
:see: init_params
"""
W = numpy.concatenate([ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj'])], axis=1)
params[_p(prefix, 'W')] = W
U = numpy.concatenate([ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj'])], axis=1)
params[_p(prefix, 'U')] = U
b = numpy.zeros((4 * options['dim_proj'],))
params[_p(prefix, 'b')] = b.astype(config.floatX)
return params
def lstm_layer(tparams, state_below, options, prefix='lstm', mask=None):
nsteps = state_below.shape[0]
if state_below.ndim == 3:
n_samples = state_below.shape[1]
else:
n_samples = 1
assert mask is not None
def _slice(_x, n, dim):
if _x.ndim == 3:
return _x[:, :, n * dim:(n + 1) * dim]
return _x[:, n * dim:(n + 1) * dim]
def _step(m_, x_, h_, c_):
preact = tensor.dot(h_, tparams[_p(prefix, 'U')])
preact += x_
i = tensor.nnet.sigmoid(_slice(preact, 0, options['dim_proj']))
f = tensor.nnet.sigmoid(_slice(preact, 1, options['dim_proj']))
o = tensor.nnet.sigmoid(_slice(preact, 2, options['dim_proj']))
c = tensor.tanh(_slice(preact, 3, options['dim_proj']))
c = f * c_ + i * c
c = m_[:, None] * c + (1. - m_)[:, None] * c_
h = o * tensor.tanh(c)
h = m_[:, None] * h + (1. - m_)[:, None] * h_
return h, c
# Donno what this is doing :/
state_below = (tensor.dot(state_below, tparams[_p(prefix, 'W')]) +
tparams[_p(prefix, 'b')])
dim_proj = options['dim_proj']
rval, updates = theano.scan(_step,
sequences=[mask, state_below],
outputs_info=[tensor.alloc(numpy_floatX(0.),
n_samples,
dim_proj),
tensor.alloc(numpy_floatX(0.),
n_samples,
dim_proj)],
name=_p(prefix, '_layers'),
n_steps=nsteps)
return rval[0]
# ff: Feed Forward (normal neural net), only useful to put after lstm
# before the classifier.
layers = {'lstm': (param_init_lstm, lstm_layer)}
def sgd(lr, tparams, grads, x, mask, y, cost):
""" Stochastic Gradient Descent
:note: A more complicated version of sgd then needed. This is
done like that for adadelta and rmsprop.
"""
# New set of shared variable that will contain the gradient
# for a mini-batch.
gshared = [theano.shared(p.get_value() * 0., name='%s_grad' % k)
for k, p in tparams.items()]
gsup = [(gs, g) for gs, g in zip(gshared, grads)]
# Function that computes gradients for a mini-batch, but do not
# updates the weights.
f_grad_shared = theano.function([x, mask, y], cost, updates=gsup,
name='sgd_f_grad_shared')
pup = [(p, p - lr * g) for p, g in zip(tparams.values(), gshared)]
# Function that updates the weights from the previously computed
# gradient.
f_update = theano.function([lr], [], updates=pup,
name='sgd_f_update')
return f_grad_shared, f_update
def adadelta(lr, tparams, grads, x, mask, y, cost):
"""
An adaptive learning rate optimizer
Parameters
----------
lr : Theano SharedVariable
Initial learning rate
tpramas: Theano SharedVariable
Model parameters
grads: Theano variable
Gradients of cost w.r.t to parameres
x: Theano variable
Model inputs
mask: Theano variable
Sequence mask
y: Theano variable
Targets
cost: Theano variable
Objective fucntion to minimize
Notes
-----
For more information, see [ADADELTA]_.
.. [ADADELTA] Matthew D. Zeiler, *ADADELTA: An Adaptive Learning
Rate Method*, arXiv:1212.5701.
"""
zipped_grads = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_grad' % k)
for k, p in tparams.items()]
running_up2 = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rup2' % k)
for k, p in tparams.items()]
running_grads2 = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rgrad2' % k)
for k, p in tparams.items()]
zgup = [(zg, g) for zg, g in zip(zipped_grads, grads)]
rg2up = [(rg2, 0.95 * rg2 + 0.05 * (g ** 2))
for rg2, g in zip(running_grads2, grads)]
f_grad_shared = theano.function([x, mask, y], cost, updates=zgup + rg2up,
name='adadelta_f_grad_shared')
updir = [-tensor.sqrt(ru2 + 1e-6) / tensor.sqrt(rg2 + 1e-6) * zg
for zg, ru2, rg2 in zip(zipped_grads,
running_up2,
running_grads2)]
ru2up = [(ru2, 0.95 * ru2 + 0.05 * (ud ** 2))
for ru2, ud in zip(running_up2, updir)]
param_up = [(p, p + ud) for p, ud in zip(tparams.values(), updir)]
f_update = theano.function([lr], [], updates=ru2up + param_up,
on_unused_input='ignore',
name='adadelta_f_update')
return f_grad_shared, f_update
def rmsprop(lr, tparams, grads, x, mask, y, cost):
"""
A variant of SGD that scales the step size by running average of the
recent step norms.
Parameters
----------
lr : Theano SharedVariable
Initial learning rate
tpramas: Theano SharedVariable
Model parameters
grads: Theano variable
Gradients of cost w.r.t to parameres
x: Theano variable
Model inputs
mask: Theano variable
Sequence mask
y: Theano variable
Targets
cost: Theano variable
Objective fucntion to minimize
Notes
-----
For more information, see [Hint2014]_.
.. [Hint2014] Geoff Hinton, *Neural Networks for Machine Learning*,
lecture 6a,
http://cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf
"""
zipped_grads = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_grad' % k)
for k, p in tparams.items()]
running_grads = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rgrad' % k)
for k, p in tparams.items()]
running_grads2 = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rgrad2' % k)
for k, p in tparams.items()]
zgup = [(zg, g) for zg, g in zip(zipped_grads, grads)]
rgup = [(rg, 0.95 * rg + 0.05 * g) for rg, g in zip(running_grads, grads)]
rg2up = [(rg2, 0.95 * rg2 + 0.05 * (g ** 2))
for rg2, g in zip(running_grads2, grads)]
f_grad_shared = theano.function([x, mask, y], cost,
updates=zgup + rgup + rg2up,
name='rmsprop_f_grad_shared')
updir = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_updir' % k)
for k, p in tparams.items()]
updir_new = [(ud, 0.9 * ud - 1e-4 * zg / tensor.sqrt(rg2 - rg ** 2 + 1e-4))
for ud, zg, rg, rg2 in zip(updir, zipped_grads, running_grads,
running_grads2)]
param_up = [(p, p + udn[1])
for p, udn in zip(tparams.values(), updir_new)]
f_update = theano.function([lr], [], updates=updir_new + param_up,
on_unused_input='ignore',
name='rmsprop_f_update')
return f_grad_shared, f_update
def build_model(tparams, options):
trng = RandomStreams(SEED)
# Used for dropout.
use_noise = theano.shared(numpy_floatX(0.))
x = tensor.matrix('x', dtype='int64')
mask = tensor.matrix('mask', dtype=config.floatX)
y = tensor.vector('y', dtype='int64')
n_timesteps = x.shape[0]
n_samples = x.shape[1]
proj = get_layer(options['encoder'])[1](tparams, x, options,
prefix=options['encoder'],
mask=mask)
if options['encoder'] == 'lstm':
proj = (proj * mask[:, :, None]).sum(axis=0)
proj = proj / mask.sum(axis=0)[:, None]
if options['use_dropout']:<|fim▁hole|> proj = dropout_layer(proj, use_noise, trng)
pred = tensor.nnet.softmax(tensor.dot(proj.T, tparams['U']) + tparams['b'])
f_pred_prob = theano.function([x, mask], pred, name='f_pred_prob')
f_pred = theano.function([x, mask], pred.argmax(axis=1), name='f_pred')
off = 1e-8
if pred.dtype == 'float16':
off = 1e-6
cost = -tensor.log(pred[tensor.arange(n_samples), y] + off).mean()
return use_noise, x, mask, y, f_pred_prob, f_pred, cost
def pred_probs(f_pred_prob, prepare_data, data, iterator, verbose=False):
""" If you want to use a trained model, this is useful to compute
the probabilities of new examples.
"""
n_samples = len(data[0])
probs = numpy.zeros((n_samples, 2)).astype(config.floatX)
n_done = 0
for _, valid_index in iterator:
x, mask, y = prepare_data([data[0][t] for t in valid_index],
numpy.array(data[1])[valid_index],
maxlen=None)
pred_probs = f_pred_prob(x, mask)
probs[valid_index, :] = pred_probs
n_done += len(valid_index)
if verbose:
print('%d/%d samples classified' % (n_done, n_samples))
return probs
def pred_error(f_pred, prepare_data, data, iterator, verbose=False):
"""
Just compute the error
f_pred: Theano fct computing the prediction
prepare_data: usual prepare_data for that dataset.
"""
valid_err = 0
for _, valid_index in iterator:
x, mask, y = prepare_data([data[0][t] for t in valid_index],
numpy.array(data[1])[valid_index],
maxlen=None)
preds = f_pred(x, mask)
targets = numpy.array(data[1])[valid_index]
valid_err += (preds == targets).sum()
valid_err = 1. - numpy_floatX(valid_err) / len(data[0])
return valid_err
def train_lstm(
dim_proj=128, # word embeding dimension and LSTM number of hidden units.
patience=10, # Number of epoch to wait before early stop if no progress
max_epochs=5000, # The maximum number of epoch to run
dispFreq=10, # Display to stdout the training progress every N updates
decay_c=0., # Weight decay for the classifier applied to the U weights.
lrate=0.0001, # Learning rate for sgd (not used for adadelta and rmsprop)
n_words=10000, # Vocabulary size
optimizer=adadelta,
# sgd, adadelta and rmsprop available, sgd very hard to use, not recommanded (probably need momentum and decaying learning rate).
encoder='lstm', # TODO: can be removed must be lstm.
saveto='lstm_model.npz', # The best model will be saved there
validFreq=370, # Compute the validation error after this number of update.
saveFreq=1110, # Save the parameters after every saveFreq updates
maxlen=100, # Sequence longer then this get ignored
batch_size=16, # The batch size during training.
valid_batch_size=64, # The batch size used for validation/test set.
dataset='imdb',
# Parameter for extra option
noise_std=0.,
use_dropout=True, # if False slightly faster, but worst test error
# This frequently need a bigger model.
reload_model=None, # Path to a saved model we want to start from.
test_size=-1, # If >0, we keep only this number of test example.
):
# Model options
model_options = locals().copy()
print("model options", model_options)
load_data, prepare_data = get_dataset(dataset)
print('Loading data')
train, valid, test = load_data(n_words=n_words, valid_portion=0.05,
maxlen=maxlen)
if test_size > 0:
# The test set is sorted by size, but we want to keep random
# size example. So we must select a random selection of the
# examples.
idx = numpy.arange(len(test[0]))
numpy.random.shuffle(idx)
idx = idx[:test_size]
test = ([test[0][n] for n in idx], [test[1][n] for n in idx])
ydim = numpy.max(train[1]) + 1
# TOOD(bitesandbytes) : Change ydim to |num words| + 1 (0 -> no word | empty)
model_options['ydim'] = ydim
print('Building model')
# This create the initial parameters as numpy ndarrays.
# Dict name (string) -> numpy ndarray
params = init_params(model_options)
if reload_model:
load_params('lstm_model.npz', params)
# This create Theano Shared Variable from the parameters.
# Dict name (string) -> Theano Tensor Shared Variable
# params and tparams have different copy of the weights.
tparams = init_tparams(params)
# use_noise is for dropout
(use_noise, x, mask,
y, f_pred_prob, f_pred, cost) = build_model(tparams, model_options)
if decay_c > 0.:
decay_c = theano.shared(numpy_floatX(decay_c), name='decay_c')
weight_decay = 0.
weight_decay += (tparams['U'] ** 2).sum()
weight_decay *= decay_c
cost += weight_decay
f_cost = theano.function([x, mask, y], cost, name='f_cost')
grads = tensor.grad(cost, wrt=list(tparams.values()))
f_grad = theano.function([x, mask, y], grads, name='f_grad')
lr = tensor.scalar(name='lr')
f_grad_shared, f_update = optimizer(lr, tparams, grads,
x, mask, y, cost)
print('Optimization')
kf_valid = get_minibatches_idx(len(valid[0]), valid_batch_size)
kf_test = get_minibatches_idx(len(test[0]), valid_batch_size)
print("%d train examples" % len(train[0]))
print("%d valid examples" % len(valid[0]))
print("%d test examples" % len(test[0]))
history_errs = []
best_p = None
bad_count = 0
if validFreq == -1:
validFreq = len(train[0]) // batch_size
if saveFreq == -1:
saveFreq = len(train[0]) // batch_size
uidx = 0 # the number of update done
estop = False # early stop
start_time = time.time()
try:
for eidx in range(max_epochs):
n_samples = 0
# Get new shuffled index for the training set.
kf = get_minibatches_idx(len(train[0]), batch_size, shuffle=True)
for _, train_index in kf:
uidx += 1
use_noise.set_value(1.)
# Select the random examples for this minibatch
y = [train[1][t] for t in train_index]
x = [train[0][t] for t in train_index]
# Get the data in numpy.ndarray format
# This swap the axis!
# Return something of shape (minibatch maxlen, n samples)
x, mask, y = prepare_data(x, y)
n_samples += x.shape[1]
cost = f_grad_shared(x, mask, y)
f_update(lrate)
if numpy.isnan(cost) or numpy.isinf(cost):
print('bad cost detected: ', cost)
return 1., 1., 1.
if numpy.mod(uidx, dispFreq) == 0:
print('Epoch ', eidx, 'Update ', uidx, 'Cost ', cost)
if saveto and numpy.mod(uidx, saveFreq) == 0:
print('Saving...')
if best_p is not None:
params = best_p
else:
params = unzip(tparams)
numpy.savez(saveto, history_errs=history_errs, **params)
pickle.dump(model_options, open('%s.pkl' % saveto, 'wb'), -1)
print('Done')
if numpy.mod(uidx, validFreq) == 0:
use_noise.set_value(0.)
train_err = pred_error(f_pred, prepare_data, train, kf)
valid_err = pred_error(f_pred, prepare_data, valid,
kf_valid)
test_err = pred_error(f_pred, prepare_data, test, kf_test)
history_errs.append([valid_err, test_err])
if (best_p is None or
valid_err <= numpy.array(history_errs)[:,
0].min()):
best_p = unzip(tparams)
bad_counter = 0
print('Train ', train_err, 'Valid ', valid_err,
'Test ', test_err)
if (len(history_errs) > patience and
valid_err >= numpy.array(history_errs)[:-patience,
0].min()):
bad_counter += 1
if bad_counter > patience:
print('Early Stop!')
estop = True
break
print('Seen %d samples' % n_samples)
if estop:
break
except KeyboardInterrupt:
print("Training interupted")
end_time = time.time()
if best_p is not None:
zipp(best_p, tparams)
else:
best_p = unzip(tparams)
use_noise.set_value(0.)
kf_train_sorted = get_minibatches_idx(len(train[0]), batch_size)
train_err = pred_error(f_pred, prepare_data, train, kf_train_sorted)
valid_err = pred_error(f_pred, prepare_data, valid, kf_valid)
test_err = pred_error(f_pred, prepare_data, test, kf_test)
print('Train ', train_err, 'Valid ', valid_err, 'Test ', test_err)
if saveto:
numpy.savez(saveto, train_err=train_err,
valid_err=valid_err, test_err=test_err,
history_errs=history_errs, **best_p)
print('The code run for %d epochs, with %f sec/epochs' % (
(eidx + 1), (end_time - start_time) / (1. * (eidx + 1))))
print(('Training took %.1fs' %
(end_time - start_time)), file=sys.stderr)
return train_err, valid_err, test_err
if __name__ == '__main__':
# See function train for all possible parameter and there definition.
train_lstm(
max_epochs=100,
test_size=500,
)<|fim▁end|> | |
<|file_name|>config.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package server
import (
"fmt"
"io/ioutil"
"net"
"net/http"
"os"
goruntime "runtime"
"runtime/debug"
"sort"
"strconv"
"strings"
"sync/atomic"
"time"
jsonpatch "github.com/evanphx/json-patch"
"github.com/go-openapi/spec"
"github.com/google/uuid"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/runtime/serializer"
"k8s.io/apimachinery/pkg/util/clock"
"k8s.io/apimachinery/pkg/util/sets"
utilwaitgroup "k8s.io/apimachinery/pkg/util/waitgroup"
"k8s.io/apimachinery/pkg/version"
"k8s.io/apiserver/pkg/admission"
"k8s.io/apiserver/pkg/audit"
auditpolicy "k8s.io/apiserver/pkg/audit/policy"
"k8s.io/apiserver/pkg/authentication/authenticator"
"k8s.io/apiserver/pkg/authentication/authenticatorfactory"
authenticatorunion "k8s.io/apiserver/pkg/authentication/request/union"
"k8s.io/apiserver/pkg/authentication/user"
"k8s.io/apiserver/pkg/authorization/authorizer"
"k8s.io/apiserver/pkg/authorization/authorizerfactory"
authorizerunion "k8s.io/apiserver/pkg/authorization/union"
"k8s.io/apiserver/pkg/endpoints/discovery"
genericapifilters "k8s.io/apiserver/pkg/endpoints/filters"
apiopenapi "k8s.io/apiserver/pkg/endpoints/openapi"
apirequest "k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/apiserver/pkg/features"
genericregistry "k8s.io/apiserver/pkg/registry/generic"
"k8s.io/apiserver/pkg/server/dynamiccertificates"
"k8s.io/apiserver/pkg/server/egressselector"
genericfilters "k8s.io/apiserver/pkg/server/filters"
"k8s.io/apiserver/pkg/server/healthz"
"k8s.io/apiserver/pkg/server/routes"
serverstore "k8s.io/apiserver/pkg/server/storage"
"k8s.io/apiserver/pkg/util/feature"
utilflowcontrol "k8s.io/apiserver/pkg/util/flowcontrol"
"k8s.io/client-go/informers"
"k8s.io/client-go/kubernetes"
v1 "k8s.io/client-go/kubernetes/typed/core/v1"
restclient "k8s.io/client-go/rest"
"k8s.io/component-base/logs"
"k8s.io/klog/v2"
openapicommon "k8s.io/kube-openapi/pkg/common"
utilsnet "k8s.io/utils/net"
// install apis
_ "k8s.io/apiserver/pkg/apis/apiserver/install"
)
const (
// DefaultLegacyAPIPrefix is where the legacy APIs will be located.
DefaultLegacyAPIPrefix = "/api"
// APIGroupPrefix is where non-legacy API group will be located.
APIGroupPrefix = "/apis"
)
// Config is a structure used to configure a GenericAPIServer.
// Its members are sorted roughly in order of importance for composers.
type Config struct {
// SecureServing is required to serve https
SecureServing *SecureServingInfo
// Authentication is the configuration for authentication
Authentication AuthenticationInfo
// Authorization is the configuration for authorization
Authorization AuthorizationInfo
// LoopbackClientConfig is a config for a privileged loopback connection to the API server
// This is required for proper functioning of the PostStartHooks on a GenericAPIServer
// TODO: move into SecureServing(WithLoopback) as soon as insecure serving is gone
LoopbackClientConfig *restclient.Config
// EgressSelector provides a lookup mechanism for dialing outbound connections.
// It does so based on a EgressSelectorConfiguration which was read at startup.
EgressSelector *egressselector.EgressSelector
// RuleResolver is required to get the list of rules that apply to a given user
// in a given namespace
RuleResolver authorizer.RuleResolver
// AdmissionControl performs deep inspection of a given request (including content)
// to set values and determine whether its allowed
AdmissionControl admission.Interface
CorsAllowedOriginList []string
// FlowControl, if not nil, gives priority and fairness to request handling
FlowControl utilflowcontrol.Interface
EnableIndex bool
EnableProfiling bool
EnableDiscovery bool
// Requires generic profiling enabled
EnableContentionProfiling bool
EnableMetrics bool
DisabledPostStartHooks sets.String
// done values in this values for this map are ignored.
PostStartHooks map[string]PostStartHookConfigEntry
// Version will enable the /version endpoint if non-nil
Version *version.Info
// AuditBackend is where audit events are sent to.
AuditBackend audit.Backend
// AuditPolicyChecker makes the decision of whether and how to audit log a request.
AuditPolicyChecker auditpolicy.Checker
// ExternalAddress is the host name to use for external (public internet) facing URLs (e.g. Swagger)
// Will default to a value based on secure serving info and available ipv4 IPs.
ExternalAddress string
//===========================================================================
// Fields you probably don't care about changing
//===========================================================================
// BuildHandlerChainFunc allows you to build custom handler chains by decorating the apiHandler.
BuildHandlerChainFunc func(apiHandler http.Handler, c *Config) (secure http.Handler)
// HandlerChainWaitGroup allows you to wait for all chain handlers exit after the server shutdown.
HandlerChainWaitGroup *utilwaitgroup.SafeWaitGroup
// DiscoveryAddresses is used to build the IPs pass to discovery. If nil, the ExternalAddress is
// always reported
DiscoveryAddresses discovery.Addresses
// The default set of healthz checks. There might be more added via AddHealthChecks dynamically.
HealthzChecks []healthz.HealthChecker
// The default set of livez checks. There might be more added via AddHealthChecks dynamically.
LivezChecks []healthz.HealthChecker
// The default set of readyz-only checks. There might be more added via AddReadyzChecks dynamically.
ReadyzChecks []healthz.HealthChecker
// LegacyAPIGroupPrefixes is used to set up URL parsing for authorization and for validating requests
// to InstallLegacyAPIGroup. New API servers don't generally have legacy groups at all.
LegacyAPIGroupPrefixes sets.String
// RequestInfoResolver is used to assign attributes (used by admission and authorization) based on a request URL.
// Use-cases that are like kubelets may need to customize this.
RequestInfoResolver apirequest.RequestInfoResolver
// Serializer is required and provides the interface for serializing and converting objects to and from the wire
// The default (api.Codecs) usually works fine.
Serializer runtime.NegotiatedSerializer
// OpenAPIConfig will be used in generating OpenAPI spec. This is nil by default. Use DefaultOpenAPIConfig for "working" defaults.
OpenAPIConfig *openapicommon.Config
// RESTOptionsGetter is used to construct RESTStorage types via the generic registry.
RESTOptionsGetter genericregistry.RESTOptionsGetter
// If specified, all requests except those which match the LongRunningFunc predicate will timeout
// after this duration.
RequestTimeout time.Duration
// If specified, long running requests such as watch will be allocated a random timeout between this value, and
// twice this value. Note that it is up to the request handlers to ignore or honor this timeout. In seconds.
MinRequestTimeout int
// This represents the maximum amount of time it should take for apiserver to complete its startup
// sequence and become healthy. From apiserver's start time to when this amount of time has
// elapsed, /livez will assume that unfinished post-start hooks will complete successfully and
// therefore return true.
LivezGracePeriod time.Duration
// ShutdownDelayDuration allows to block shutdown for some time, e.g. until endpoints pointing to this API server
// have converged on all node. During this time, the API server keeps serving, /healthz will return 200,
// but /readyz will return failure.
ShutdownDelayDuration time.Duration
// The limit on the total size increase all "copy" operations in a json
// patch may cause.
// This affects all places that applies json patch in the binary.
JSONPatchMaxCopyBytes int64
// The limit on the request size that would be accepted and decoded in a write request
// 0 means no limit.
MaxRequestBodyBytes int64
// MaxRequestsInFlight is the maximum number of parallel non-long-running requests. Every further
// request has to wait. Applies only to non-mutating requests.
MaxRequestsInFlight int
// MaxMutatingRequestsInFlight is the maximum number of parallel mutating requests. Every further
// request has to wait.
MaxMutatingRequestsInFlight int
// Predicate which is true for paths of long-running http requests
LongRunningFunc apirequest.LongRunningRequestCheck
// GoawayChance is the probability that send a GOAWAY to HTTP/2 clients. When client received
// GOAWAY, the in-flight requests will not be affected and new requests will use
// a new TCP connection to triggering re-balancing to another server behind the load balance.
// Default to 0, means never send GOAWAY. Max is 0.02 to prevent break the apiserver.
GoawayChance float64
// MergedResourceConfig indicates which groupVersion enabled and its resources enabled/disabled.
// This is composed of genericapiserver defaultAPIResourceConfig and those parsed from flags.
// If not specify any in flags, then genericapiserver will only enable defaultAPIResourceConfig.
MergedResourceConfig *serverstore.ResourceConfig
// EventSink receives events about the life cycle of the API server, e.g. readiness, serving, signals and termination.
EventSink EventSink
//===========================================================================
// values below here are targets for removal
//===========================================================================
// PublicAddress is the IP address where members of the cluster (kubelet,
// kube-proxy, services, etc.) can reach the GenericAPIServer.
// If nil or 0.0.0.0, the host's default interface will be used.
PublicAddress net.IP
// EquivalentResourceRegistry provides information about resources equivalent to a given resource,
// and the kind associated with a given resource. As resources are installed, they are registered here.
EquivalentResourceRegistry runtime.EquivalentResourceRegistry
// A func that returns whether the server is terminating. This can be nil.
IsTerminating func() bool
}
// EventSink allows to create events.
type EventSink interface {
Create(event *corev1.Event) (*corev1.Event, error)
}
type RecommendedConfig struct {
Config
// SharedInformerFactory provides shared informers for Kubernetes resources. This value is set by
// RecommendedOptions.CoreAPI.ApplyTo called by RecommendedOptions.ApplyTo. It uses an in-cluster client config
// by default, or the kubeconfig given with kubeconfig command line flag.
SharedInformerFactory informers.SharedInformerFactory
// ClientConfig holds the kubernetes client configuration.
// This value is set by RecommendedOptions.CoreAPI.ApplyTo called by RecommendedOptions.ApplyTo.
// By default in-cluster client config is used.
ClientConfig *restclient.Config
}
type SecureServingInfo struct {
// Listener is the secure server network listener.
Listener net.Listener
// Cert is the main server cert which is used if SNI does not match. Cert must be non-nil and is
// allowed to be in SNICerts.
Cert dynamiccertificates.CertKeyContentProvider
// SNICerts are the TLS certificates used for SNI.
SNICerts []dynamiccertificates.SNICertKeyContentProvider
// ClientCA is the certificate bundle for all the signers that you'll recognize for incoming client certificates
ClientCA dynamiccertificates.CAContentProvider
// MinTLSVersion optionally overrides the minimum TLS version supported.
// Values are from tls package constants (https://golang.org/pkg/crypto/tls/#pkg-constants).
MinTLSVersion uint16
// CipherSuites optionally overrides the list of allowed cipher suites for the server.
// Values are from tls package constants (https://golang.org/pkg/crypto/tls/#pkg-constants).
CipherSuites []uint16
// HTTP2MaxStreamsPerConnection is the limit that the api server imposes on each client.
// A value of zero means to use the default provided by golang's HTTP/2 support.
HTTP2MaxStreamsPerConnection int
// DisableHTTP2 indicates that http2 should not be enabled.
DisableHTTP2 bool
}
type AuthenticationInfo struct {
// APIAudiences is a list of identifier that the API identifies as. This is
// used by some authenticators to validate audience bound credentials.
APIAudiences authenticator.Audiences
// Authenticator determines which subject is making the request
Authenticator authenticator.Request
}
type AuthorizationInfo struct {
// Authorizer determines whether the subject is allowed to make the request based only
// on the RequestURI
Authorizer authorizer.Authorizer
}
// NewConfig returns a Config struct with the default values
func NewConfig(codecs serializer.CodecFactory) *Config {
defaultHealthChecks := []healthz.HealthChecker{healthz.PingHealthz, healthz.LogHealthz}
return &Config{
Serializer: codecs,
BuildHandlerChainFunc: DefaultBuildHandlerChain,
HandlerChainWaitGroup: new(utilwaitgroup.SafeWaitGroup),
LegacyAPIGroupPrefixes: sets.NewString(DefaultLegacyAPIPrefix),
DisabledPostStartHooks: sets.NewString(),
PostStartHooks: map[string]PostStartHookConfigEntry{},
HealthzChecks: append([]healthz.HealthChecker{}, defaultHealthChecks...),
ReadyzChecks: append([]healthz.HealthChecker{}, defaultHealthChecks...),
LivezChecks: append([]healthz.HealthChecker{}, defaultHealthChecks...),
EnableIndex: true,
EnableDiscovery: true,
EnableProfiling: true,
EnableMetrics: true,
MaxRequestsInFlight: 400,
MaxMutatingRequestsInFlight: 200,
RequestTimeout: time.Duration(60) * time.Second,
MinRequestTimeout: 1800,
LivezGracePeriod: time.Duration(0),
ShutdownDelayDuration: time.Duration(0),
// 1.5MB is the default client request size in bytes
// the etcd server should accept. See
// https://github.com/etcd-io/etcd/blob/release-3.4/embed/config.go#L56.
// A request body might be encoded in json, and is converted to
// proto when persisted in etcd, so we allow 2x as the largest size
// increase the "copy" operations in a json patch may cause.
JSONPatchMaxCopyBytes: int64(3 * 1024 * 1024),
// 1.5MB is the recommended client request size in byte
// the etcd server should accept. See
// https://github.com/etcd-io/etcd/blob/release-3.4/embed/config.go#L56.
// A request body might be encoded in json, and is converted to
// proto when persisted in etcd, so we allow 2x as the largest request
// body size to be accepted and decoded in a write request.
MaxRequestBodyBytes: int64(3 * 1024 * 1024),
// Default to treating watch as a long-running operation
// Generic API servers have no inherent long-running subresources
LongRunningFunc: genericfilters.BasicLongRunningRequestCheck(sets.NewString("watch"), sets.NewString()),
}
}
// NewRecommendedConfig returns a RecommendedConfig struct with the default values
func NewRecommendedConfig(codecs serializer.CodecFactory) *RecommendedConfig {
return &RecommendedConfig{
Config: *NewConfig(codecs),
}
}
func DefaultOpenAPIConfig(getDefinitions openapicommon.GetOpenAPIDefinitions, defNamer *apiopenapi.DefinitionNamer) *openapicommon.Config {
return &openapicommon.Config{
ProtocolList: []string{"https"},
IgnorePrefixes: []string{},
Info: &spec.Info{
InfoProps: spec.InfoProps{
Title: "Generic API Server",
},
},
DefaultResponse: &spec.Response{
ResponseProps: spec.ResponseProps{
Description: "Default Response.",
},
},
GetOperationIDAndTags: apiopenapi.GetOperationIDAndTags,
GetDefinitionName: defNamer.GetDefinitionName,
GetDefinitions: getDefinitions,
}
}
func (c *AuthenticationInfo) ApplyClientCert(clientCA dynamiccertificates.CAContentProvider, servingInfo *SecureServingInfo) error {
if servingInfo == nil {
return nil
}
if clientCA == nil {
return nil
}
if servingInfo.ClientCA == nil {
servingInfo.ClientCA = clientCA
return nil
}
servingInfo.ClientCA = dynamiccertificates.NewUnionCAContentProvider(servingInfo.ClientCA, clientCA)
return nil
}
type completedConfig struct {
*Config
//===========================================================================
// values below here are filled in during completion
//===========================================================================
// SharedInformerFactory provides shared informers for resources
SharedInformerFactory informers.SharedInformerFactory
}
type CompletedConfig struct {
// Embed a private pointer that cannot be instantiated outside of this package.
*completedConfig
}
// AddHealthChecks adds a health check to our config to be exposed by the health endpoints
// of our configured apiserver. We should prefer this to adding healthChecks directly to
// the config unless we explicitly want to add a healthcheck only to a specific health endpoint.
func (c *Config) AddHealthChecks(healthChecks ...healthz.HealthChecker) {
for _, check := range healthChecks {
c.HealthzChecks = append(c.HealthzChecks, check)
c.LivezChecks = append(c.LivezChecks, check)
c.ReadyzChecks = append(c.ReadyzChecks, check)
}
}
// AddPostStartHook allows you to add a PostStartHook that will later be added to the server itself in a New call.
// Name conflicts will cause an error.
func (c *Config) AddPostStartHook(name string, hook PostStartHookFunc) error {
if len(name) == 0 {
return fmt.Errorf("missing name")
}
if hook == nil {
return fmt.Errorf("hook func may not be nil: %q", name)
}
if c.DisabledPostStartHooks.Has(name) {
klog.V(1).Infof("skipping %q because it was explicitly disabled", name)
return nil
}
if postStartHook, exists := c.PostStartHooks[name]; exists {
// this is programmer error, but it can be hard to debug
return fmt.Errorf("unable to add %q because it was already registered by: %s", name, postStartHook.originatingStack)
}
c.PostStartHooks[name] = PostStartHookConfigEntry{hook: hook, originatingStack: string(debug.Stack())}
return nil
}
// AddPostStartHookOrDie allows you to add a PostStartHook, but dies on failure.
func (c *Config) AddPostStartHookOrDie(name string, hook PostStartHookFunc) {
if err := c.AddPostStartHook(name, hook); err != nil {
klog.Fatalf("Error registering PostStartHook %q: %v", name, err)
}
}
// Complete fills in any fields not set that are required to have valid data and can be derived
// from other fields. If you're going to `ApplyOptions`, do that first. It's mutating the receiver.
func (c *Config) Complete(informers informers.SharedInformerFactory) CompletedConfig {
if len(c.ExternalAddress) == 0 && c.PublicAddress != nil {
c.ExternalAddress = c.PublicAddress.String()
}
// if there is no port, and we listen on one securely, use that one
if _, _, err := net.SplitHostPort(c.ExternalAddress); err != nil {
if c.SecureServing == nil {
klog.Fatalf("cannot derive external address port without listening on a secure port.")
}
_, port, err := c.SecureServing.HostPort()
if err != nil {
klog.Fatalf("cannot derive external address from the secure port: %v", err)
}
c.ExternalAddress = net.JoinHostPort(c.ExternalAddress, strconv.Itoa(port))
}
if c.OpenAPIConfig != nil {
if c.OpenAPIConfig.SecurityDefinitions != nil {<|fim▁hole|> keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
c.OpenAPIConfig.DefaultSecurity = append(c.OpenAPIConfig.DefaultSecurity, map[string][]string{k: {}})
}
if c.OpenAPIConfig.CommonResponses == nil {
c.OpenAPIConfig.CommonResponses = map[int]spec.Response{}
}
if _, exists := c.OpenAPIConfig.CommonResponses[http.StatusUnauthorized]; !exists {
c.OpenAPIConfig.CommonResponses[http.StatusUnauthorized] = spec.Response{
ResponseProps: spec.ResponseProps{
Description: "Unauthorized",
},
}
}
}
// make sure we populate info, and info.version, if not manually set
if c.OpenAPIConfig.Info == nil {
c.OpenAPIConfig.Info = &spec.Info{}
}
if c.OpenAPIConfig.Info.Version == "" {
if c.Version != nil {
c.OpenAPIConfig.Info.Version = strings.Split(c.Version.String(), "-")[0]
} else {
c.OpenAPIConfig.Info.Version = "unversioned"
}
}
}
if c.DiscoveryAddresses == nil {
c.DiscoveryAddresses = discovery.DefaultAddresses{DefaultAddress: c.ExternalAddress}
}
if c.EventSink == nil {
c.EventSink = nullEventSink{}
}
AuthorizeClientBearerToken(c.LoopbackClientConfig, &c.Authentication, &c.Authorization)
if c.RequestInfoResolver == nil {
c.RequestInfoResolver = NewRequestInfoResolver(c)
}
if c.EquivalentResourceRegistry == nil {
if c.RESTOptionsGetter == nil {
c.EquivalentResourceRegistry = runtime.NewEquivalentResourceRegistry()
} else {
c.EquivalentResourceRegistry = runtime.NewEquivalentResourceRegistryWithIdentity(func(groupResource schema.GroupResource) string {
// use the storage prefix as the key if possible
if opts, err := c.RESTOptionsGetter.GetRESTOptions(groupResource); err == nil {
return opts.ResourcePrefix
}
// otherwise return "" to use the default key (parent GV name)
return ""
})
}
}
return CompletedConfig{&completedConfig{c, informers}}
}
// Complete fills in any fields not set that are required to have valid data and can be derived
// from other fields. If you're going to `ApplyOptions`, do that first. It's mutating the receiver.
func (c *RecommendedConfig) Complete() CompletedConfig {
if c.ClientConfig != nil {
ref, err := eventReference()
if err != nil {
klog.Warningf("Failed to derive event reference, won't create events: %v", err)
c.EventSink = nullEventSink{}
} else {
ns := ref.Namespace
if len(ns) == 0 {
ns = "default"
}
c.EventSink = &v1.EventSinkImpl{
Interface: kubernetes.NewForConfigOrDie(c.ClientConfig).CoreV1().Events(ns),
}
}
}
return c.Config.Complete(c.SharedInformerFactory)
}
func eventReference() (*corev1.ObjectReference, error) {
ns := os.Getenv("POD_NAMESPACE")
pod := os.Getenv("POD_NAME")
if len(ns) == 0 && len(pod) > 0 {
serviceAccountNamespaceFile := "/var/run/secrets/kubernetes.io/serviceaccount/namespace"
if _, err := os.Stat(serviceAccountNamespaceFile); err == nil {
bs, err := ioutil.ReadFile(serviceAccountNamespaceFile)
if err != nil {
return nil, err
}
ns = string(bs)
}
}
if len(ns) == 0 {
pod = ""
ns = "kube-system"
}
if len(pod) == 0 {
return &corev1.ObjectReference{
Kind: "Namespace",
Name: ns,
APIVersion: "v1",
}, nil
}
return &corev1.ObjectReference{
Kind: "Pod",
Namespace: ns,
Name: pod,
APIVersion: "v1",
}, nil
}
// New creates a new server which logically combines the handling chain with the passed server.
// name is used to differentiate for logging. The handler chain in particular can be difficult as it starts delgating.
// delegationTarget may not be nil.
func (c completedConfig) New(name string, delegationTarget DelegationTarget) (*GenericAPIServer, error) {
if c.Serializer == nil {
return nil, fmt.Errorf("Genericapiserver.New() called with config.Serializer == nil")
}
if c.LoopbackClientConfig == nil {
return nil, fmt.Errorf("Genericapiserver.New() called with config.LoopbackClientConfig == nil")
}
if c.EquivalentResourceRegistry == nil {
return nil, fmt.Errorf("Genericapiserver.New() called with config.EquivalentResourceRegistry == nil")
}
handlerChainBuilder := func(handler http.Handler) http.Handler {
return c.BuildHandlerChainFunc(handler, c.Config)
}
apiServerHandler := NewAPIServerHandler(name, c.Serializer, handlerChainBuilder, delegationTarget.UnprotectedHandler())
s := &GenericAPIServer{
discoveryAddresses: c.DiscoveryAddresses,
LoopbackClientConfig: c.LoopbackClientConfig,
legacyAPIGroupPrefixes: c.LegacyAPIGroupPrefixes,
admissionControl: c.AdmissionControl,
Serializer: c.Serializer,
AuditBackend: c.AuditBackend,
Authorizer: c.Authorization.Authorizer,
delegationTarget: delegationTarget,
EquivalentResourceRegistry: c.EquivalentResourceRegistry,
HandlerChainWaitGroup: c.HandlerChainWaitGroup,
minRequestTimeout: time.Duration(c.MinRequestTimeout) * time.Second,
ShutdownTimeout: c.RequestTimeout,
ShutdownDelayDuration: c.ShutdownDelayDuration,
SecureServingInfo: c.SecureServing,
ExternalAddress: c.ExternalAddress,
Handler: apiServerHandler,
listedPathProvider: apiServerHandler,
openAPIConfig: c.OpenAPIConfig,
postStartHooks: map[string]postStartHookEntry{},
preShutdownHooks: map[string]preShutdownHookEntry{},
disabledPostStartHooks: c.DisabledPostStartHooks,
healthzChecks: c.HealthzChecks,
livezChecks: c.LivezChecks,
readyzChecks: c.ReadyzChecks,
readinessStopCh: make(chan struct{}),
livezGracePeriod: c.LivezGracePeriod,
DiscoveryGroupManager: discovery.NewRootAPIsHandler(c.DiscoveryAddresses, c.Serializer),
maxRequestBodyBytes: c.MaxRequestBodyBytes,
livezClock: clock.RealClock{},
eventSink: c.EventSink,
}
ref, err := eventReference()
if err != nil {
klog.Warningf("Failed to derive event reference, won't create events: %v", err)
c.EventSink = nullEventSink{}
}
s.eventRef = ref
for {
if c.JSONPatchMaxCopyBytes <= 0 {
break
}
existing := atomic.LoadInt64(&jsonpatch.AccumulatedCopySizeLimit)
if existing > 0 && existing < c.JSONPatchMaxCopyBytes {
break
}
if atomic.CompareAndSwapInt64(&jsonpatch.AccumulatedCopySizeLimit, existing, c.JSONPatchMaxCopyBytes) {
break
}
}
// first add poststarthooks from delegated targets
for k, v := range delegationTarget.PostStartHooks() {
s.postStartHooks[k] = v
}
for k, v := range delegationTarget.PreShutdownHooks() {
s.preShutdownHooks[k] = v
}
// add poststarthooks that were preconfigured. Using the add method will give us an error if the same name has already been registered.
for name, preconfiguredPostStartHook := range c.PostStartHooks {
if err := s.AddPostStartHook(name, preconfiguredPostStartHook.hook); err != nil {
return nil, err
}
}
genericApiServerHookName := "generic-apiserver-start-informers"
if c.SharedInformerFactory != nil {
if !s.isPostStartHookRegistered(genericApiServerHookName) {
err := s.AddPostStartHook(genericApiServerHookName, func(context PostStartHookContext) error {
c.SharedInformerFactory.Start(context.StopCh)
return nil
})
if err != nil {
return nil, err
}
// TODO: Once we get rid of /healthz consider changing this to post-start-hook.
err = s.addReadyzChecks(healthz.NewInformerSyncHealthz(c.SharedInformerFactory))
if err != nil {
return nil, err
}
}
}
const priorityAndFairnessConfigConsumerHookName = "priority-and-fairness-config-consumer"
if s.isPostStartHookRegistered(priorityAndFairnessConfigConsumerHookName) {
} else if c.FlowControl != nil {
err := s.AddPostStartHook(priorityAndFairnessConfigConsumerHookName, func(context PostStartHookContext) error {
go c.FlowControl.Run(context.StopCh)
return nil
})
if err != nil {
return nil, err
}
// TODO(yue9944882): plumb pre-shutdown-hook for request-management system?
} else {
klog.V(3).Infof("Not requested to run hook %s", priorityAndFairnessConfigConsumerHookName)
}
for _, delegateCheck := range delegationTarget.HealthzChecks() {
skip := false
for _, existingCheck := range c.HealthzChecks {
if existingCheck.Name() == delegateCheck.Name() {
skip = true
break
}
}
if skip {
continue
}
s.AddHealthChecks(delegateCheck)
}
s.listedPathProvider = routes.ListedPathProviders{s.listedPathProvider, delegationTarget}
installAPI(s, c.Config)
// use the UnprotectedHandler from the delegation target to ensure that we don't attempt to double authenticator, authorize,
// or some other part of the filter chain in delegation cases.
if delegationTarget.UnprotectedHandler() == nil && c.EnableIndex {
s.Handler.NonGoRestfulMux.NotFoundHandler(routes.IndexLister{
StatusCode: http.StatusNotFound,
PathProvider: s.listedPathProvider,
})
}
return s, nil
}
func DefaultBuildHandlerChain(apiHandler http.Handler, c *Config) http.Handler {
handler := genericapifilters.WithAuthorization(apiHandler, c.Authorization.Authorizer, c.Serializer)
if c.FlowControl != nil {
handler = genericfilters.WithPriorityAndFairness(handler, c.LongRunningFunc, c.FlowControl)
} else {
handler = genericfilters.WithMaxInFlightLimit(handler, c.MaxRequestsInFlight, c.MaxMutatingRequestsInFlight, c.LongRunningFunc)
}
handler = genericapifilters.WithImpersonation(handler, c.Authorization.Authorizer, c.Serializer)
handler = genericapifilters.WithAudit(handler, c.AuditBackend, c.AuditPolicyChecker, c.LongRunningFunc)
failedHandler := genericapifilters.Unauthorized(c.Serializer)
failedHandler = genericapifilters.WithFailedAuthenticationAudit(failedHandler, c.AuditBackend, c.AuditPolicyChecker)
handler = genericapifilters.WithAuthentication(handler, c.Authentication.Authenticator, failedHandler, c.Authentication.APIAudiences)
handler = genericfilters.WithCORS(handler, c.CorsAllowedOriginList, nil, nil, nil, "true")
handler = genericfilters.WithTimeoutForNonLongRunningRequests(handler, c.LongRunningFunc, c.RequestTimeout)
handler = genericfilters.WithWaitGroup(handler, c.LongRunningFunc, c.HandlerChainWaitGroup)
handler = WithLateConnectionFilter(handler)
handler = genericapifilters.WithRequestInfo(handler, c.RequestInfoResolver)
if c.SecureServing != nil && !c.SecureServing.DisableHTTP2 && c.GoawayChance > 0 {
handler = genericfilters.WithProbabilisticGoaway(handler, c.GoawayChance)
}
handler = genericapifilters.WithAuditAnnotations(handler, c.AuditBackend, c.AuditPolicyChecker)
handler = genericapifilters.WithWarningRecorder(handler)
handler = genericapifilters.WithCacheControl(handler)
handler = genericfilters.WithPanicRecovery(handler, c.IsTerminating)
return handler
}
func installAPI(s *GenericAPIServer, c *Config) {
if c.EnableIndex {
routes.Index{}.Install(s.listedPathProvider, s.Handler.NonGoRestfulMux)
}
if c.EnableProfiling {
routes.Profiling{}.Install(s.Handler.NonGoRestfulMux)
if c.EnableContentionProfiling {
goruntime.SetBlockProfileRate(1)
}
// so far, only logging related endpoints are considered valid to add for these debug flags.
routes.DebugFlags{}.Install(s.Handler.NonGoRestfulMux, "v", routes.StringFlagPutHandler(logs.GlogSetter))
}
if c.EnableMetrics {
if c.EnableProfiling {
routes.MetricsWithReset{}.Install(s.Handler.NonGoRestfulMux)
} else {
routes.DefaultMetrics{}.Install(s.Handler.NonGoRestfulMux)
}
}
routes.Version{Version: c.Version}.Install(s.Handler.GoRestfulContainer)
if c.EnableDiscovery {
s.Handler.GoRestfulContainer.Add(s.DiscoveryGroupManager.WebService())
}
if feature.DefaultFeatureGate.Enabled(features.APIPriorityAndFairness) {
c.FlowControl.Install(s.Handler.NonGoRestfulMux)
}
}
func NewRequestInfoResolver(c *Config) *apirequest.RequestInfoFactory {
apiPrefixes := sets.NewString(strings.Trim(APIGroupPrefix, "/")) // all possible API prefixes
legacyAPIPrefixes := sets.String{} // APIPrefixes that won't have groups (legacy)
for legacyAPIPrefix := range c.LegacyAPIGroupPrefixes {
apiPrefixes.Insert(strings.Trim(legacyAPIPrefix, "/"))
legacyAPIPrefixes.Insert(strings.Trim(legacyAPIPrefix, "/"))
}
return &apirequest.RequestInfoFactory{
APIPrefixes: apiPrefixes,
GrouplessAPIPrefixes: legacyAPIPrefixes,
}
}
func (s *SecureServingInfo) HostPort() (string, int, error) {
if s == nil || s.Listener == nil {
return "", 0, fmt.Errorf("no listener found")
}
addr := s.Listener.Addr().String()
host, portStr, err := net.SplitHostPort(addr)
if err != nil {
return "", 0, fmt.Errorf("failed to get port from listener address %q: %v", addr, err)
}
port, err := utilsnet.ParsePort(portStr, true)
if err != nil {
return "", 0, fmt.Errorf("invalid non-numeric port %q", portStr)
}
return host, port, nil
}
// AuthorizeClientBearerToken wraps the authenticator and authorizer in loopback authentication logic
// if the loopback client config is specified AND it has a bearer token. Note that if either authn or
// authz is nil, this function won't add a token authenticator or authorizer.
func AuthorizeClientBearerToken(loopback *restclient.Config, authn *AuthenticationInfo, authz *AuthorizationInfo) {
if loopback == nil || len(loopback.BearerToken) == 0 {
return
}
if authn == nil || authz == nil {
// prevent nil pointer panic
return
}
if authn.Authenticator == nil || authz.Authorizer == nil {
// authenticator or authorizer might be nil if we want to bypass authz/authn
// and we also do nothing in this case.
return
}
privilegedLoopbackToken := loopback.BearerToken
var uid = uuid.New().String()
tokens := make(map[string]*user.DefaultInfo)
tokens[privilegedLoopbackToken] = &user.DefaultInfo{
Name: user.APIServerUser,
UID: uid,
Groups: []string{user.SystemPrivilegedGroup},
}
tokenAuthenticator := authenticatorfactory.NewFromTokens(tokens)
authn.Authenticator = authenticatorunion.New(tokenAuthenticator, authn.Authenticator)
if !skipSystemMastersAuthorizer {
tokenAuthorizer := authorizerfactory.NewPrivilegedGroups(user.SystemPrivilegedGroup)
authz.Authorizer = authorizerunion.New(tokenAuthorizer, authz.Authorizer)
}
}
type nullEventSink struct{}
func (nullEventSink) Create(event *corev1.Event) (*corev1.Event, error) {
return nil, nil
}<|fim▁end|> | // Setup OpenAPI security: all APIs will have the same authentication for now.
c.OpenAPIConfig.DefaultSecurity = []map[string][]string{}
keys := []string{}
for k := range *c.OpenAPIConfig.SecurityDefinitions { |
<|file_name|>registry_test.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2017 The DLT2T Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for DLT2T.registry."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
from DLT2T.utils import modality
from DLT2T.utils import registry
from DLT2T.utils import t2t_model
import tensorflow as tf
# pylint: disable=unused-variable
class ModelRegistryTest(tf.test.TestCase):
def setUp(self):
registry._reset()
def testT2TModelRegistration(self):
@registry.register_model
class MyModel1(t2t_model.T2TModel):
pass
model = registry.model("my_model1")
self.assertTrue(model is MyModel1)
def testNamedRegistration(self):
@registry.register_model("model2")
class MyModel1(t2t_model.T2TModel):
pass
model = registry.model("model2")
self.assertTrue(model is MyModel1)
def testNonT2TModelRegistration(self):
@registry.register_model
def model_fn():
pass
model = registry.model("model_fn")
self.assertTrue(model is model_fn)
def testUnknownModel(self):
with self.assertRaisesRegexp(LookupError, "never registered"):
registry.model("not_registered")
def testDuplicateRegistration(self):
@registry.register_model
def m1():
pass
with self.assertRaisesRegexp(LookupError, "already registered"):
@registry.register_model("m1")
def m2():
pass
def testListModels(self):
@registry.register_model
def m1():
pass
@registry.register_model
def m2():
pass
self.assertSetEqual(set(["m1", "m2"]), set(registry.list_models()))
def testSnakeCase(self):
convert = registry._convert_camel_to_snake
self.assertEqual("typical_camel_case", convert("TypicalCamelCase"))
self.assertEqual("numbers_fuse2gether", convert("NumbersFuse2gether"))
self.assertEqual("numbers_fuse2_gether", convert("NumbersFuse2Gether"))
self.assertEqual("lstm_seq2_seq", convert("LSTMSeq2Seq"))
self.assertEqual("starts_lower", convert("startsLower"))
self.assertEqual("starts_lower_caps", convert("startsLowerCAPS"))
self.assertEqual("caps_fuse_together", convert("CapsFUSETogether"))
self.assertEqual("startscap", convert("Startscap"))
self.assertEqual("s_tartscap", convert("STartscap"))
class HParamRegistryTest(tf.test.TestCase):
def setUp(self):
registry._reset()
def testHParamSet(self):
@registry.register_hparams
def my_hparams_set():
pass
@registry.register_ranged_hparams
def my_hparams_range(_):
pass
self.assertTrue(registry.hparams("my_hparams_set") is my_hparams_set)
self.assertTrue(
registry.ranged_hparams("my_hparams_range") is my_hparams_range)
def testNamedRegistration(self):
@registry.register_hparams("a")
def my_hparams_set():
pass
@registry.register_ranged_hparams("a")
def my_hparams_range(_):
pass
self.assertTrue(registry.hparams("a") is my_hparams_set)
self.assertTrue(registry.ranged_hparams("a") is my_hparams_range)
def testUnknownHparams(self):
with self.assertRaisesRegexp(LookupError, "never registered"):
registry.hparams("not_registered")
with self.assertRaisesRegexp(LookupError, "never registered"):
registry.ranged_hparams("not_registered")
def testDuplicateRegistration(self):
@registry.register_hparams
def hp1():
pass
with self.assertRaisesRegexp(LookupError, "already registered"):
@registry.register_hparams("hp1")
def hp2():
pass
@registry.register_ranged_hparams
def rhp1(_):
pass
with self.assertRaisesRegexp(LookupError, "already registered"):
@registry.register_ranged_hparams("rhp1")
def rhp2(_):
pass
def testListHparams(self):
@registry.register_hparams
def hp1():
pass
@registry.register_hparams("hp2_named")
def hp2():<|fim▁hole|> pass
@registry.register_ranged_hparams("rhp2_named")
def rhp2(_):
pass
self.assertSetEqual(set(["hp1", "hp2_named"]), set(registry.list_hparams()))
self.assertSetEqual(
set(["rhp1", "rhp2_named"]), set(registry.list_ranged_hparams()))
def testRangeSignatureCheck(self):
with self.assertRaisesRegexp(ValueError, "must take a single argument"):
@registry.register_ranged_hparams
def rhp_bad():
pass
with self.assertRaisesRegexp(ValueError, "must take a single argument"):
@registry.register_ranged_hparams
def rhp_bad2(a, b): # pylint: disable=unused-argument
pass
class ModalityRegistryTest(tf.test.TestCase):
def setUp(self):
registry._reset()
def testModalityRegistration(self):
@registry.register_symbol_modality
class MySymbolModality(modality.Modality):
pass
@registry.register_audio_modality
class MyAudioModality(modality.Modality):
pass
@registry.register_image_modality
class MyImageModality(modality.Modality):
pass
@registry.register_class_label_modality
class MyClassLabelModality(modality.Modality):
pass
self.assertTrue(
registry.symbol_modality("my_symbol_modality") is MySymbolModality)
self.assertTrue(
registry.audio_modality("my_audio_modality") is MyAudioModality)
self.assertTrue(
registry.image_modality("my_image_modality") is MyImageModality)
self.assertTrue(
registry.class_label_modality("my_class_label_modality") is
MyClassLabelModality)
def testDefaultNameLookup(self):
@registry.register_symbol_modality("default")
class MyDefaultModality(modality.Modality):
pass
self.assertTrue(registry.symbol_modality() is MyDefaultModality)
def testList(self):
@registry.register_symbol_modality
class MySymbolModality(modality.Modality):
pass
@registry.register_audio_modality
class MyAudioModality(modality.Modality):
pass
@registry.register_image_modality
class MyImageModality(modality.Modality):
pass
@registry.register_class_label_modality
class MyClassLabelModality(modality.Modality):
pass
expected = [
"symbol:my_symbol_modality", "audio:my_audio_modality",
"image:my_image_modality", "class_label:my_class_label_modality"
]
self.assertSetEqual(set(registry.list_modalities()), set(expected))
if __name__ == "__main__":
tf.test.main()<|fim▁end|> | pass
@registry.register_ranged_hparams
def rhp1(_): |
<|file_name|>messages.js<|end_file_name|><|fim▁begin|>module.exports = {<|fim▁hole|> reminder_newscheduleSuccess: "A new mail has been successfully saved and scheduled",
schedule_ShdlError: "The scheduleAt should be a timestamp (like : 1411820580000) and should be in the future",
gbl_oops: "Oops something went wrong",
gbl_success: "success"
};<|fim▁end|> | schedule_inputError: "Not all required inputs are present in the request", |
<|file_name|>HSSFName.hpp<|end_file_name|><|fim▁begin|>// Generated from /POI/java/org/apache/poi/hssf/usermodel/HSSFName.java
#pragma once
#include <fwd-POI.hpp>
#include <java/lang/fwd-POI.hpp>
#include <org/apache/poi/hssf/record/fwd-POI.hpp>
#include <org/apache/poi/hssf/usermodel/fwd-POI.hpp>
#include <org/apache/poi/ss/formula/ptg/fwd-POI.hpp>
#include <java/lang/Object.hpp>
#include <org/apache/poi/ss/usermodel/Name.hpp>
template<typename ComponentType, typename... Bases> struct SubArray;
namespace poi
{
namespace ss
{
namespace formula
{
namespace ptg
{
typedef ::SubArray< ::poi::ss::formula::ptg::Ptg, ::java::lang::ObjectArray > PtgArray;<|fim▁hole|>
struct default_init_tag;
class poi::hssf::usermodel::HSSFName final
: public virtual ::java::lang::Object
, public ::poi::ss::usermodel::Name
{
public:
typedef ::java::lang::Object super;
private:
HSSFWorkbook* _book { };
::poi::hssf::record::NameRecord* _definedNameRec { };
::poi::hssf::record::NameCommentRecord* _commentRec { };
protected:
void ctor(HSSFWorkbook* book, ::poi::hssf::record::NameRecord* name);
void ctor(HSSFWorkbook* book, ::poi::hssf::record::NameRecord* name, ::poi::hssf::record::NameCommentRecord* comment);
public:
::java::lang::String* getSheetName() override;
::java::lang::String* getNameName() override;
void setNameName(::java::lang::String* nameName) override;
private:
static void validateName(::java::lang::String* name);
public:
void setRefersToFormula(::java::lang::String* formulaText) override;
::java::lang::String* getRefersToFormula() override;
public: /* package */
void setNameDefinition(::poi::ss::formula::ptg::PtgArray* ptgs);
public:
bool isDeleted() override;
bool isFunctionName() override;
::java::lang::String* toString() override;
void setSheetIndex(int32_t index) override;
int32_t getSheetIndex() override;
::java::lang::String* getComment() override;
void setComment(::java::lang::String* comment) override;
void setFunction(bool value) override;
// Generated
public: /* package */
HSSFName(HSSFWorkbook* book, ::poi::hssf::record::NameRecord* name);
HSSFName(HSSFWorkbook* book, ::poi::hssf::record::NameRecord* name, ::poi::hssf::record::NameCommentRecord* comment);
protected:
HSSFName(const ::default_init_tag&);
public:
static ::java::lang::Class *class_();
private:
virtual ::java::lang::Class* getClass0();
};<|fim▁end|> | } // ptg
} // formula
} // ss
} // poi |
<|file_name|>suntsu.py<|end_file_name|><|fim▁begin|># Natural Language Toolkit: Sun Tsu-Bot
#
# Copyright (C) 2001-2011 NLTK Project
# Author: Sam Huston 2007
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
from util import *
"""
Tsu bot responds to all queries with a Sun Tsu sayings
Quoted from Sun Tsu's The Art of War
Translated by LIONEL GILES, M.A. 1910
Hosted by the Gutenberg Project
http://www.gutenberg.org/
"""
pairs = (
(r'quit',
( "Good-bye.",
"Plan well",
"May victory be your future")),
(r'[^\?]*\?',
("Please consider whether you can answer your own question.",
"Ask me no questions!")),
(r'[0-9]+(.*)',
("It is the rule in war, if our forces are ten to the enemy's one, to surround him; if five to one, to attack him; if twice as numerous, to divide our army into two.",
"There are five essentials for victory")),
(r'[A-Ca-c](.*)',
("The art of war is of vital importance to the State.",
"All warfare is based on deception.",
"If your opponent is secure at all points, be prepared for him. If he is in superior strength, evade him.",
"If the campaign is protracted, the resources of the State will not be equal to the strain.",<|fim▁hole|> ("The skillful soldier does not raise a second levy, neither are his supply-wagons loaded more than twice.",
"Bring war material with you from home, but forage on the enemy.",
"In war, then, let your great object be victory, not lengthy campaigns.",
"To fight and conquer in all your battles is not supreme excellence; supreme excellence consists in breaking the enemy's resistance without fighting.")),
(r'[G-Ig-i](.*)',
("Heaven signifies night and day, cold and heat, times and seasons.",
"It is the rule in war, if our forces are ten to the enemy's one, to surround him; if five to one, to attack him; if twice as numerous, to divide our army into two.",
"The good fighters of old first put themselves beyond the possibility of defeat, and then waited for an opportunity of defeating the enemy.",
"One may know how to conquer without being able to do it.")),
(r'[J-Lj-l](.*)',
("There are three ways in which a ruler can bring misfortune upon his army.",
"By commanding the army to advance or to retreat, being ignorant of the fact that it cannot obey. This is called hobbling the army.",
"By attempting to govern an army in the same way as he administers a kingdom, being ignorant of the conditions which obtain in an army. This causes restlessness in the soldier's minds.",
"By employing the officers of his army without discrimination, through ignorance of the military principle of adaptation to circumstances. This shakes the confidence of the soldiers.",
"There are five essentials for victory",
"He will win who knows when to fight and when not to fight.",
"He will win who knows how to handle both superior and inferior forces.",
"He will win whose army is animated by the same spirit throughout all its ranks.",
"He will win who, prepared himself, waits to take the enemy unprepared.",
"He will win who has military capacity and is not interfered with by the sovereign.")),
(r'[M-Om-o](.*)',
("If you know the enemy and know yourself, you need not fear the result of a hundred battles.",
"If you know yourself but not the enemy, for every victory gained you will also suffer a defeat.",
"If you know neither the enemy nor yourself, you will succumb in every battle.",
"The control of a large force is the same principle as the control of a few men: it is merely a question of dividing up their numbers.")),
(r'[P-Rp-r](.*)',
("Security against defeat implies defensive tactics; ability to defeat the enemy means taking the offensive.",
"Standing on the defensive indicates insufficient strength; attacking, a superabundance of strength.",
"He wins his battles by making no mistakes. Making no mistakes is what establishes the certainty of victory, for it means conquering an enemy that is already defeated.",
"A victorious army opposed to a routed one, is as a pound's weight placed in the scale against a single grain.",
"The onrush of a conquering force is like the bursting of pent-up waters into a chasm a thousand fathoms deep.")),
(r'[S-Us-u](.*)',
("What the ancients called a clever fighter is one who not only wins, but excels in winning with ease.",
"Hence his victories bring him neither reputation for wisdom nor credit for courage.",
"Hence the skillful fighter puts himself into a position which makes defeat impossible, and does not miss the moment for defeating the enemy.",
"In war the victorious strategist only seeks battle after the victory has been won, whereas he who is destined to defeat first fights and afterwards looks for victory.",
"There are not more than five musical notes, yet the combinations of these five give rise to more melodies than can ever be heard.",
"Appear at points which the enemy must hasten to defend; march swiftly to places where you are not expected.")),
(r'[V-Zv-z](.*)',
("It is a matter of life and death, a road either to safety or to ruin.",
"Hold out baits to entice the enemy. Feign disorder, and crush him.",
"All men can see the tactics whereby I conquer, but what none can see is the strategy out of which victory is evolved.",
"Do not repeat the tactics which have gained you one victory, but let your methods be regulated by the infinite variety of circumstances.",
"So in war, the way is to avoid what is strong and to strike at what is weak.",
"Just as water retains no constant shape, so in warfare there are no constant conditions.")),
(r'(.*)',
( "Your statement insults me.",
""))
)
suntsu_chatbot = Chat(pairs, reflections)
def suntsu_chat():
print "Talk to the program by typing in plain English, using normal upper-"
print 'and lower-case letters and punctuation. Enter "quit" when done.'
print '='*72
print "You seek enlightenment?"
suntsu_chatbot.converse()
def demo():
suntsu_chat()
if __name__ == "__main__":
demo()<|fim▁end|> | "Attack him where he is unprepared, appear where you are not expected.",
"There is no instance of a country having benefited from prolonged warfare.")),
(r'[D-Fd-f](.*)', |
<|file_name|>mutates_in_test.py<|end_file_name|><|fim▁begin|>import mutable_attr
import unittest
class T(unittest.TestCase):
def test_foo(self):<|fim▁hole|><|fim▁end|> | mutable_attr.y = 3 |
<|file_name|>SynchronizationTest.java<|end_file_name|><|fim▁begin|>/*
* The MIT License
*
* Copyright 2019 Karus Labs.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights<|fim▁hole|> * The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.karuslabs.commons.command.synchronization;
import java.util.List;
import org.bukkit.entity.Player;
import org.bukkit.event.player.PlayerCommandSendEvent;
import org.bukkit.scheduler.BukkitScheduler;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
class SynchronizationTest {
Synchronizer synchronizer = mock(Synchronizer.class);
BukkitScheduler scheduler = mock(BukkitScheduler.class);
Synchronization synchronization = new Synchronization(synchronizer, scheduler, null);
PlayerCommandSendEvent event = mock(PlayerCommandSendEvent.class);
@Test
void add() {
synchronization.add(event);
assertTrue(synchronization.events.contains(event));
assertTrue(synchronization.running);
verify(scheduler).scheduleSyncDelayedTask(null, synchronization);
}
@Test
void add_duplicate() {
synchronization.events.add(event);
synchronization.add(event);
assertTrue(synchronization.events.contains(event));
assertFalse(synchronization.running);
verify(scheduler, times(0)).scheduleSyncDelayedTask(null, synchronization);
}
@Test
void add_running() {
synchronization.running = true;
synchronization.add(event);
assertTrue(synchronization.events.contains(event));
assertTrue(synchronization.running);
verify(scheduler, times(0)).scheduleSyncDelayedTask(null, synchronization);
}
@Test
void run() {
when(event.getPlayer()).thenReturn(mock(Player.class));
when(event.getCommands()).thenReturn(List.of("a"));
synchronization.add(event);
synchronization.run();
verify(synchronizer).synchronize(any(Player.class), any(List.class));
assertTrue(synchronization.events.isEmpty());
assertFalse(synchronization.running);
}
}<|fim▁end|> | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* |
<|file_name|>get_all_groups.rs<|end_file_name|><|fim▁begin|>extern crate philipshue;
use std::env;
use philipshue::bridge::Bridge;
mod discover;
use discover::discover;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
println!("Usage : {:?} <username>", args[0]);
return;
}
let bridge = Bridge::new(discover().pop().unwrap(), &*args[1]);
match bridge.get_all_groups() {
Ok(groups) => {
let name_len = groups.values().map(|l| l.name.len()).chain(Some(4)).max().unwrap();
let type_len = groups.values()
.map(|l| l.group_type.to_string().len())
.chain(Some(4))
.max()
.unwrap();
println!("id {0:1$} {2:3$} class any_on all_on bri lights",<|fim▁hole|> for (id, group) in groups.iter() {
println!("{:2} {:name_len$} {:type_len$} {:12} {:6} {:6} {:3} {:?}",
id,
group.name,
group.group_type,
Show(&group.class),
Show(&group.state.as_ref().map(|s| s.any_on)),
Show(&group.state.as_ref().map(|s| s.all_on)),
Show(&group.state.as_ref().and_then(|s| s.bri)),
group.lights,
name_len = name_len,
type_len = type_len);
}
}
Err(err) => println!("Error: {}", err),
}
}
use std::fmt::{self, Display, Debug};
struct Show<'a, T: 'a>(&'a Option<T>);
impl<'a, T: 'a + Display> Display for Show<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self.0 {
Some(ref x) => x.fmt(f),
_ => Display::fmt("N/A", f),
}
}
}
impl<'a, T: 'a + Debug> Debug for Show<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self.0 {
Some(ref x) => x.fmt(f),
_ => Display::fmt("N/A", f),
}
}
}<|fim▁end|> | "name",
name_len,
"type",
type_len); |
<|file_name|>qscodemarker.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 1992-2008 Trolltech ASA. All rights reserved.
**
** This file is part of the tools applications of the Qt Toolkit.
**
** This file may be used under the terms of the GNU General Public
** License versions 2.0 or 3.0 as published by the Free Software
** Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
** included in the packaging of this file. Alternatively you may (at
** your option) use any later version of the GNU General Public
** License if such license has been publicly approved by Trolltech ASA
** (or its successors, if any) and the KDE Free Qt Foundation. In
** addition, as a special exception, Trolltech gives you certain
** additional rights. These rights are described in the Trolltech GPL
** Exception version 1.2, which can be found at
** http://www.trolltech.com/products/qt/gplexception/ and in the file
** GPL_EXCEPTION.txt in this package.
**
** Please review the following information to ensure GNU General
** Public Licensing requirements will be met:
** http://trolltech.com/products/qt/licenses/licensing/opensource/. If
** you are unsure which license is appropriate for your use, please
** review the following information:
** http://trolltech.com/products/qt/licenses/licensing/licensingoverview
** or contact the sales department at [email protected].
**
** In addition, as a special exception, Trolltech, as the sole
** copyright holder for Qt Designer, grants users of the Qt/Eclipse
** Integration plug-in the right for the Qt/Eclipse Integration to
** link to functionality provided by Qt Designer and its related
** libraries.
**
** This file is provided "AS IS" with NO WARRANTY OF ANY KIND,
** INCLUDING THE WARRANTIES OF DESIGN, MERCHANTABILITY AND FITNESS FOR
** A PARTICULAR PURPOSE. Trolltech reserves all rights not expressly
** granted herein.
**
** This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
** WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
**
****************************************************************************/
/*
qscodemarker.cpp
*/
#include "node.h"
#include "qscodemarker.h"
QsCodeMarker::QsCodeMarker()
{
}
QsCodeMarker::~QsCodeMarker()
{
}
bool QsCodeMarker::recognizeCode( const QString& /* code */ )
{
return true;
}
bool QsCodeMarker::recognizeExtension( const QString& ext )
{
return ext == "js" || ext == "qs";
}
bool QsCodeMarker::recognizeLanguage( const QString& lang )
{
return lang == "JavaScript" || lang == "Qt Script";
}
QString QsCodeMarker::plainName( const Node *node )
{
QString name = node->name();
if ( node->type() == Node::Function )
name += "()";
return name;
}
QString QsCodeMarker::plainFullName( const Node *node, const Node * /* relative */ )
{
QString fullName;
for ( ;; ) {
fullName.prepend( plainName(node) );
if ( node->parent()->name().isEmpty() )
break;
node = node->parent();
fullName.prepend(".");
}
return fullName;
}
QString QsCodeMarker::markedUpCode( const QString& code,
const Node * /* relative */,
const QString& /* dirPath */ )
{
return protect( code );
}
QString QsCodeMarker::markedUpSynopsis( const Node *node,
const Node * /* relative */,
SynopsisStyle style )
{
QString synopsis;
QStringList extras;
QString name;
name = taggedNode( node );
if ( style != Detailed )
name = linkTag( node, name );
name = "<@name>" + name + "</@name>";
if ( style == Detailed && !node->parent()->name().isEmpty() &&
node->type() != Node::Enum )
name.prepend( taggedNode(node->parent()) + "." );
switch ( node->type() ) {
case Node::Class:
synopsis = "class " + name;
break;
case Node::Function:
{
const FunctionNode *func = (const FunctionNode *) node;
synopsis = name;
if ( style == SeparateList ) {
synopsis += "()";
} else {
synopsis += " (";
if ( !func->parameters().isEmpty() ) {
synopsis += " ";
int numOptional = 0;
QList<Parameter>::ConstIterator p = func->parameters().begin();
while ( p != func->parameters().end() ) {
if ( !(*p).defaultValue().isEmpty() ) {
if ( p == func->parameters().begin() ) {
synopsis += "[ ";
} else {
synopsis += " [ , ";
}
numOptional++;
} else {
if ( p != func->parameters().begin() )
synopsis += ", ";
}
if ( !(*p).name().isEmpty() )
synopsis += "<@param>" + protect( (*p).name() ) +
"</@param> : ";
synopsis += protect( (*p).leftType() );
++p;
}
for ( int i = 0; i < numOptional; i++ )
synopsis += " ]";
synopsis += " ";
}
synopsis += ")";
}
if ( style != SeparateList && !func->returnType().isEmpty() )
synopsis += " : " + protect( func->returnType() );
if ( style == Detailed && func->metaness() == FunctionNode::Signal )
extras << "[signal]";
}
break;
case Node::Property:
{
const PropertyNode *property = (const PropertyNode *) node;
synopsis = name;
if ( style != SeparateList )
synopsis += " : " + property->dataType();
if ( style == Detailed && property->setters().isEmpty() )
extras << "[read only]";
}
break;
case Node::Enum:
{
/*
The letters A to F and X (upper- and lower-case) can
appear in a hexadecimal constant (e.g. 0x3F).
*/
QRegExp letterRegExp( "[G-WYZg-wyz_]" );
const EnumNode *enume = (const EnumNode *) node;
synopsis = name;
if ( style == Summary && !enume->items().isEmpty() ) {
synopsis += " : ";
QString comma;
QList<EnumItem>::ConstIterator it = enume->items().begin();
while ( it != enume->items().end() ) {
if ( enume->itemAccess((*it).name()) == Node::Public ) {
synopsis += comma;
synopsis += (*it).name();
if ( (*it).value().indexOf(letterRegExp) != -1 )
synopsis += " = " + (*it).value();
comma = ", ";
}
++it;
}
}
}
break;<|fim▁hole|> case Node::Namespace:
case Node::Typedef:
default:
synopsis = name;
}
if ( style == Summary ) {
if ( node->status() == Node::Preliminary ) {
extras << "(preliminary)";
} else if ( node->status() == Node::Deprecated ) {
extras << "(deprecated)";
} else if ( node->status() == Node::Obsolete ) {
extras << "(obsolete)";
}
}
QString extra;
if ( !extras.isEmpty() )
extra = "<@extra>" + extras.join(" ") + "</@extra>";
return synopsis + extra;
}
QString QsCodeMarker::markedUpName( const Node *node )
{
QString name = linkTag( node, taggedNode(node) );
if ( node->type() == Node::Function )
name += "()";
return name;
}
QString QsCodeMarker::markedUpFullName( const Node *node,
const Node * /* relative */ )
{
QString fullName;
for ( ;; ) {
fullName.prepend( markedUpName(node) );
if ( node->parent()->name().isEmpty() )
break;
node = node->parent();
fullName.prepend( "<@op>.</@op>" );
}
return fullName;
}
QString QsCodeMarker::markedUpEnumValue(const QString & /* enumValue */,
const Node * /* relative */)
{
return QString();
}
QString QsCodeMarker::markedUpIncludes( const QStringList& /* includes */ )
{
return QString();
}
QString QsCodeMarker::functionBeginRegExp( const QString& funcName )
{
return "^function[ \t].*\\b" + QRegExp::escape( funcName );
}
QString QsCodeMarker::functionEndRegExp( const QString& /* funcName */ )
{
return "^}";
}
QList<Section> QsCodeMarker::sections( const InnerNode *inner, SynopsisStyle style, Status status )
{
QList<Section> sections;
if (inner->type() != Node::Class)
return sections;
const ClassNode *classe = static_cast<const ClassNode *>(inner);
if ( style == Summary ) {
FastSection enums(classe, "Enums", "enum", "enums");
FastSection functions(classe, "Functions", "function", "functions");
FastSection readOnlyProperties(classe, "Read-Only Properties", "property", "properties");
FastSection signalz(classe, "Signals", "signal", "signals");
FastSection writableProperties(classe, "Writable Properties", "property", "properties");
QStack<const ClassNode *> stack;
stack.push( classe );
while ( !stack.isEmpty() ) {
const ClassNode *ancestorClass = stack.pop();
NodeList::ConstIterator c = ancestorClass->childNodes().begin();
while ( c != ancestorClass->childNodes().end() ) {
if ( (*c)->access() == Node::Public ) {
if ( (*c)->type() == Node::Enum ) {
insert( enums, *c, style, status );
} else if ( (*c)->type() == Node::Function ) {
const FunctionNode *func = (const FunctionNode *) *c;
if ( func->metaness() == FunctionNode::Signal ) {
insert( signalz, *c, style, status );
} else {
insert( functions, *c, style, status );
}
} else if ( (*c)->type() == Node::Property ) {
const PropertyNode *property =
(const PropertyNode *) *c;
if ( property->setters().isEmpty() ) {
insert( readOnlyProperties, *c, style, status );
} else {
insert( writableProperties, *c, style, status );
}
}
}
++c;
}
QList<RelatedClass>::ConstIterator r = ancestorClass->baseClasses().begin();
while ( r != ancestorClass->baseClasses().end() ) {
stack.prepend( (*r).node );
++r;
}
}
append( sections, enums );
append( sections, writableProperties );
append( sections, readOnlyProperties );
append( sections, functions );
append( sections, signalz );
} else if ( style == Detailed ) {
FastSection enums( classe, "Enum Documentation" );
FastSection functionsAndSignals( classe, "Function and Signal Documentation" );
FastSection properties( classe, "Property Documentation" );
NodeList::ConstIterator c = classe->childNodes().begin();
while ( c != classe->childNodes().end() ) {
if ( (*c)->access() == Node::Public ) {
if ( (*c)->type() == Node::Enum ) {
insert( enums, *c, style, status );
} else if ( (*c)->type() == Node::Function ) {
insert( functionsAndSignals, *c, style, status );
} else if ( (*c)->type() == Node::Property ) {
insert( properties, *c, style, status );
}
}
++c;
}
append( sections, enums );
append( sections, properties );
append( sections, functionsAndSignals );
} else { // ( style == SeparateList )
FastSection all( classe );
QStack<const ClassNode *> stack;
stack.push( classe );
while ( !stack.isEmpty() ) {
const ClassNode *ancestorClass = stack.pop();
NodeList::ConstIterator c = ancestorClass->childNodes().begin();
while ( c != ancestorClass->childNodes().end() ) {
if ( (*c)->access() == Node::Public )
insert( all, *c, style, status );
++c;
}
QList<RelatedClass>::ConstIterator r = ancestorClass->baseClasses().begin();
while ( r != ancestorClass->baseClasses().end() ) {
stack.prepend( (*r).node );
++r;
}
}
append( sections, all );
}
return sections;
}
const Node *QsCodeMarker::resolveTarget( const QString& /* target */,
const Tree * /* tree */,
const Node * /* relative */ )
{
return 0;
}<|fim▁end|> | |
<|file_name|>__main__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
import argparse
from .audio import create_tracks
from .downloader import YouTube
from .parser import parse_tracks_file
from .prompt import wizard
from .exceptions import WizardError
def get_from_youtube(source):
yt = YouTube(source)
highest_bitrate = yt.audio_available.get('high')
return yt.download_audio(highest_bitrate)
def get_from_local(source):
return source
def generate_album(artist, album, tracks, source, input, output,
format='mp3', from_wizard=None):
"""
Generates tracks under dest_dir using the source media file (download|local)
"""
get_media_file_src = {'youtube': get_from_youtube,
'local': get_from_local}
media_file_src = get_media_file_src.get(source)(input)
if from_wizard is None:
audio_segments = parse_tracks_file(tracks)
else:
audio_segments = tracks
create_tracks(media_file_src, output, audio_segments,
artist, album, source_type=source, format=format)
def main():
parser = argparse.ArgumentParser(
prog='lobster',
description='Cut audio files with a single command'
)
parser.add_argument('--artist', '-ar', type=str, required=False,
help='Name of the artist of the track this will be used '\
+ 'to name the output directory')
parser.add_argument('--album', '-al', type=str, required=False,
help='Name of the album, this will be used to name '\
+ 'the output directory')
parser.add_argument('--tracks', '-t', type=str, required=False,
help='File containing the information to build the tracks')
parser.add_argument('--source', '-s', type=str, choices=['local', 'youtube'],
required=False, help='Name of the media file source')
parser.add_argument('--input', '-i', type=str, required=False,
help='Path to the source media file')
parser.add_argument('--output', '-o', type=str, required=False,
help='Path to the utput directory')
parser.add_argument('--format', type=str, help='Input media file format',
default='mp3')
mode_help_mesage = 'Launch Lobster in Wizard or Command mode,`wizard`'\
' will launch the Wizard mode, `cmd` will lauch' \
' Command mode, `cmd` is the current default '
parser.add_argument('--mode', '-m', type=str,
help=mode_help_mesage,
default='cmd')
kwargs=vars(parser.parse_args())
mode = kwargs.get('mode').lower()
if mode == 'cmd':
required_fields = ["artist", "album", "tracks", "source", "input",
"output"]
should_generate = True
for req_field in required_fields:
if kwargs.get(req_field) is None:
should_generate = False
print("Missing required argument --{}".format(req_field))
if should_generate:
del kwargs['mode']
generate_album(**kwargs)
elif mode == 'wizard':<|fim▁hole|> else:
print('Invalid {} mode'.format(mode))
sys.exit(main())<|fim▁end|> | try:
generate_album(**wizard())
except WizardError:
sys.exit() |
<|file_name|>novasetup.py<|end_file_name|><|fim▁begin|># Still some problems...
import time
import shutil<|fim▁hole|>from configobj import ConfigObj
NOVA_API_CONF = "/etc/nova/api-paste.ini"
OS_API_SEC = "composite:openstack_compute_api_v2"
DR_FILTER_TARGET_KEY = "keystone_nolimit"
DR_FILTER_TARGET_KEY_VALUE = "compute_req_id faultwrap sizelimit " \
"authtoken keystonecontext drfilter " \
"osapi_compute_app_v2"
DR_SEC = "filter:drfilter"
DR_KEY = "paste.filter_factory"
DR_KEY_VALUE = "drfilter.urlforwarding:url_forwarding_factory"
# Backup /etc/nova/api-paste.ini
now = time.strftime('%Y%m%d%H%M%S')
target = NOVA_API_CONF + "." + now + ".bak"
shutil.copyfile(NOVA_API_CONF, target)
# Update /etc/nova/api-paste.ini
conf = ConfigObj(NOVA_API_CONF)
conf[OS_API_SEC][DR_FILTER_TARGET_KEY] = DR_FILTER_TARGET_KEY_VALUE
conf[DR_SEC] = {}
conf[DR_SEC][DR_KEY] = DR_KEY_VALUE
conf.write()
for sec in conf:
print(sec)
for key in conf[sec]:
print("\t" + key + " = " + conf[sec][key])<|fim▁end|> | |
<|file_name|>0016_auto_20170411_0342.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-11 03:42
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('students', '0007_auto_20170410_0523'),
('job', '0015_auto_20170410_0523'),
]
operations = [
migrations.CreateModel(
name='JobApplication',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('city', models.CharField(max_length=200)),
('state', models.CharField(max_length=200)),
('country', models.CharField(max_length=200)),
],
),
migrations.RemoveField(
model_name='jobapply',
name='job_id',
),
migrations.RemoveField(
model_name='jobapply',
name='user_id',
),
migrations.RemoveField(
model_name='job',
name='location',
),
migrations.AlterField(<|fim▁hole|> name='skills',
field=models.ManyToManyField(null=True, related_name='reqskills', to='users.Skill'),
),
migrations.DeleteModel(
name='JobApply',
),
migrations.AddField(
model_name='jobapplication',
name='job',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='job.Job'),
),
migrations.AddField(
model_name='jobapplication',
name='student',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='students.Student'),
),
migrations.AddField(
model_name='job',
name='applications',
field=models.ManyToManyField(related_name='applications', through='job.JobApplication', to='students.Student'),
),
]<|fim▁end|> | model_name='job', |
<|file_name|>radiusd.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding=utf-8
import datetime
import os
import six
import msgpack
import toughradius
from txzmq import ZmqEndpoint, ZmqFactory, ZmqPushConnection, ZmqPullConnection
from twisted.internet import protocol
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
from twisted.internet import defer
from toughlib import utils
from toughlib import mcache
from toughlib import logger,dispatch
from toughlib import db_cache as cache
from toughlib.redis_cache import CacheManager
from toughlib.dbengine import get_engine
from txradius.radius import dictionary
from txradius.radius import packet
from txradius.radius.packet import PacketError
from txradius import message
from toughlib.utils import timecast
from toughradius.common import log_trace
from toughradius.manage import models
from toughradius.manage.settings import *
from toughradius.manage.radius.plugins import mac_parse,vlan_parse, rate_process
from toughradius.manage.radius.radius_authorize import RadiusAuth
from toughradius.manage.radius.radius_acct_start import RadiusAcctStart
from toughradius.manage.radius.radius_acct_update import RadiusAcctUpdate
from toughradius.manage.radius.radius_acct_stop import RadiusAcctStop
from toughradius.manage.radius.radius_acct_onoff import RadiusAcctOnoff
class RadiusError(BaseException):
def __init__(self,error,message,*args,**kwargs):
self.error = error
self.message = message
def __str__(self):
return "<RadiusError> %s %s" % (repr(self.error),repr(self.message))
class RADIUSMaster(protocol.DatagramProtocol):
def __init__(self, config, service='auth'):
self.config = config
self.service = service
self.pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('bind', 'ipc:///tmp/radiusd-%s-message' % service))
self.puller = ZmqPullConnection(ZmqFactory(), ZmqEndpoint('bind', 'ipc:///tmp/radiusd-%s-result' % service))
self.puller.onPull = self.reply
logger.info("init %s master pusher : %s " % (self.service, self.pusher))
logger.info("init %s master puller : %s " % (self.service, self.puller))
def datagramReceived(self, datagram, (host, port)):
message = msgpack.packb([datagram, host, port])
self.pusher.push(message)
def reply(self, result):
data, host, port = msgpack.unpackb(result[0])
self.transport.write(data, (host, int(port)))
class TraceMix:
def is_trace_on(self):
def fetch_result():
table = models.TrParam.__table__
with self.db_engine.begin() as conn:
r = conn.execute(table.select().where(table.c.param_name=="radius_user_trace")).first()
return r and r.param_value or None
return int(self.mcache.aget(param_cache_key("radius_user_trace"),fetch_result, expire=3600))
def user_exists(self,username):
def fetch_result():
table = models.TrAccount.__table__
with self.db_engine.begin() as conn:
val = conn.execute(table.select().where(
table.c.account_number==username)).first()
return val and Storage(val.items()) or None
return self.mcache.aget(account_cache_key(username),fetch_result, expire=3600) is not None
def log_trace(self,host,port,req,reply=None):
if not self.is_trace_on():
return
if not self.user_exists(req.get_user_name()):
return
try:
if reply is None:
msg = message.format_packet_log(req)
logger.info(u"Radius请求来自 Nas(%s:%s) %s"%(host,port,utils.safeunicode(msg)),
trace="radius",username=req.get_user_name())
else:
msg = message.format_packet_log(reply)
logger.info(u"Radius响应至 Nas(%s:%s) %s"%(host,port,utils.safeunicode(msg)),
trace="radius",username=req.get_user_name())
except Exception as err:
logger.exception(err)
class RADIUSAuthWorker(protocol.DatagramProtocol,TraceMix):
def __init__(self, config, dbengine, radcache=None):
self.config = config
self.dict = dictionary.Dictionary(
os.path.join(os.path.dirname(toughradius.__file__), 'dictionarys/dictionary'))
self.db_engine = dbengine or get_engine(config)
self.aes = utils.AESCipher(key=self.config.system.secret)
self.mcache = radcache
self.pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-auth-result'))
self.stat_pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-stat-task'))
self.puller = ZmqPullConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-auth-message'))
self.puller.onPull = self.process
reactor.listenUDP(0, self)
logger.info("init auth worker pusher : %s " % (self.pusher))
logger.info("init auth worker puller : %s " % (self.puller))
logger.info("init auth stat pusher : %s " % (self.stat_pusher))
def find_nas(self,ip_addr):
def fetch_result():
table = models.TrBas.__table__
with self.db_engine.begin() as conn:
return conn.execute(table.select().where(table.c.ip_addr==ip_addr)).first()
return self.mcache.aget(bas_cache_key(ip_addr),fetch_result, expire=600)
def do_stat(self,code):
try:
stat_msg = []
if code == packet.AccessRequest:
stat_msg.append('auth_req')
elif code == packet.AccessAccept:
stat_msg.append('auth_accept')
elif code == packet.AccessReject:
stat_msg.append('auth_reject')
else:
stat_msg = ['auth_drop']
self.stat_pusher.push(msgpack.packb(stat_msg))
except:
pass
def process(self, message):
datagram, host, port = msgpack.unpackb(message[0])
reply = self.processAuth(datagram, host, port)
if not reply:
return
logger.info("[Radiusd] :: Send radius response: %s" % repr(reply))
if self.config.system.debug:
logger.debug(reply.format_str())
self.pusher.push(msgpack.packb([reply.ReplyPacket(),host,port]))
# self.transport.write(reply.ReplyPacket(), (host,port))
self.do_stat(reply.code)
def createAuthPacket(self, **kwargs):
vendor_id = kwargs.pop('vendor_id',0)
auth_message = message.AuthMessage(**kwargs)
auth_message.vendor_id = vendor_id
auth_message = mac_parse.process(auth_message)
auth_message = vlan_parse.process(auth_message)
return auth_message
def processAuth(self, datagram, host, port):
try:
bas = self.find_nas(host)
if not bas:
raise PacketError('[Radiusd] :: Dropping packet from unknown host %s' % host)
secret, vendor_id = bas['bas_secret'], bas['vendor_id']
req = self.createAuthPacket(packet=datagram,
dict=self.dict, secret=six.b(str(secret)),vendor_id=vendor_id)
self.log_trace(host,port,req)
self.do_stat(req.code)
logger.info("[Radiusd] :: Received radius request: %s" % (repr(req)))
if self.config.system.debug:
logger.debug(req.format_str())
if req.code != packet.AccessRequest:
raise PacketError('non-AccessRequest packet on authentication socket')
reply = req.CreateReply()
reply.vendor_id = req.vendor_id
aaa_request = dict(
account_number=req.get_user_name(),
domain=req.get_domain(),
macaddr=req.client_mac,
nasaddr=req.get_nas_addr() or host,
vlanid1=req.vlanid1,
vlanid2=req.vlanid2
)
auth_resp = RadiusAuth(self.db_engine,self.mcache,self.aes,aaa_request).authorize()
if auth_resp['code'] > 0:
reply['Reply-Message'] = auth_resp['msg']
reply.code = packet.AccessReject
self.log_trace(host,port,req,reply)
return reply
if 'bypass' in auth_resp and int(auth_resp['bypass']) == 0:
is_pwd_ok = True
else:
is_pwd_ok = req.is_valid_pwd(auth_resp.get('passwd'))
if not is_pwd_ok:
reply['Reply-Message'] = "password not match"
reply.code = packet.AccessReject
self.log_trace(host,port,req,reply)
return reply
else:
if u"input_rate" in auth_resp and u"output_rate" in auth_resp:
reply = rate_process.process(
reply, input_rate=auth_resp['input_rate'], output_rate=auth_resp['output_rate'])
attrs = auth_resp.get("attrs") or {}
for attr_name in attrs:
try:
# todo: May have a type matching problem
reply.AddAttribute(utils.safestr(attr_name), attrs[attr_name])
except Exception as err:
errstr = "RadiusError:current radius cannot support attribute {0},{1}".format(
attr_name,utils.safestr(err.message))
logger.error(RadiusError(err,errstr))
for attr, attr_val in req.resp_attrs.iteritems():
reply[attr] = attr_val
reply['Reply-Message'] = 'success!'
reply.code = packet.AccessAccept
if not req.VerifyReply(reply):
raise PacketError('VerifyReply error')
self.log_trace(host,port,req,reply)
return reply
except Exception as err:
self.do_stat(0)
logger.exception(err)
class RADIUSAcctWorker(TraceMix):
def __init__(self, config, dbengine,radcache=None):
self.config = config
self.dict = dictionary.Dictionary(
os.path.join(os.path.dirname(toughradius.__file__), 'dictionarys/dictionary'))
self.db_engine = dbengine or get_engine(config)
self.mcache = radcache
self.pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-acct-result'))
self.stat_pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-stat-task'))
self.puller = ZmqPullConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-acct-message'))
self.puller.onPull = self.process
logger.info("init acct worker pusher : %s " % (self.pusher))
logger.info("init acct worker puller : %s " % (self.puller))
logger.info("init auth stat pusher : %s " % (self.stat_pusher))
self.acct_class = {
STATUS_TYPE_START: RadiusAcctStart,
STATUS_TYPE_STOP: RadiusAcctStop,
STATUS_TYPE_UPDATE: RadiusAcctUpdate,
STATUS_TYPE_ACCT_ON: RadiusAcctOnoff,
STATUS_TYPE_ACCT_OFF: RadiusAcctOnoff
}
def find_nas(self,ip_addr):
def fetch_result():
table = models.TrBas.__table__
with self.db_engine.begin() as conn:
return conn.execute(table.select().where(table.c.ip_addr==ip_addr)).first()
return self.mcache.aget(bas_cache_key(ip_addr),fetch_result, expire=600)
def do_stat(self,code, status_type=0):
try:
stat_msg = ['acct_drop']
if code in (4,5):
stat_msg = []
if code == packet.AccountingRequest:
stat_msg.append('acct_req')
elif code == packet.AccountingResponse:
stat_msg.append('acct_resp')
if status_type == 1:
stat_msg.append('acct_start')
elif status_type == 2:
stat_msg.append('acct_stop')
elif status_type == 3:
stat_msg.append('acct_update')
elif status_type == 7:
stat_msg.append('acct_on')
elif status_type == 8:
stat_msg.append('acct_off')
self.stat_pusher.push(msgpack.packb(stat_msg))
except:
pass
def process(self, message):
datagram, host, port = msgpack.unpackb(message[0])
self.processAcct(datagram, host, port)
def createAcctPacket(self, **kwargs):
vendor_id = 0
if 'vendor_id' in kwargs:
vendor_id = kwargs.pop('vendor_id')
acct_message = message.AcctMessage(**kwargs)
acct_message.vendor_id = vendor_id
acct_message = mac_parse.process(acct_message)
acct_message = vlan_parse.process(acct_message)
return acct_message
def processAcct(self, datagram, host, port):
try:
bas = self.find_nas(host)
if not bas:
raise PacketError('[Radiusd] :: Dropping packet from unknown host %s' % host)
secret, vendor_id = bas['bas_secret'], bas['vendor_id']
req = self.createAcctPacket(packet=datagram,
dict=self.dict, secret=six.b(str(secret)),vendor_id=vendor_id)
self.log_trace(host,port,req)
self.do_stat(req.code, req.get_acct_status_type())
logger.info("[Radiusd] :: Received radius request: %s" % (repr(req)))
if self.config.system.debug:
logger.debug(req.format_str())
if req.code != packet.AccountingRequest:
raise PacketError('non-AccountingRequest packet on authentication socket')
if not req.VerifyAcctRequest():
raise PacketError('VerifyAcctRequest error')
reply = req.CreateReply()
self.log_trace(host,port,req,reply)
self.pusher.push(msgpack.packb([reply.ReplyPacket(),host,port]))
self.do_stat(reply.code)
logger.info("[Radiusd] :: Send radius response: %s" % repr(reply))
if self.config.system.debug:
logger.debug(reply.format_str())
status_type = req.get_acct_status_type()
if status_type in self.acct_class:
ticket = req.get_ticket()
if not ticket.get('nas_addr'):
ticket['nas_addr'] = host
acct_func = self.acct_class[status_type](
self.db_engine,self.mcache,None,ticket).acctounting
reactor.callLater(0.1,acct_func)
else:<|fim▁hole|> self.do_stat(0)
logger.exception(error)
def run_auth(config):
auth_protocol = RADIUSMaster(config, service='auth')
reactor.listenUDP(int(config.radiusd.auth_port), auth_protocol, interface=config.radiusd.host)
def run_acct(config):
acct_protocol = RADIUSMaster(config,service='acct')
reactor.listenUDP(int(config.radiusd.acct_port), acct_protocol, interface=config.radiusd.host)
def run_worker(config,dbengine,**kwargs):
_cache = kwargs.pop("cache",CacheManager(redis_conf(config),cache_name='RadiusWorkerCache-%s'%os.getpid()))
_cache.print_hit_stat(120)
# app event init
if not kwargs.get('standalone'):
logger.info("start register radiusd events")
dispatch.register(log_trace.LogTrace(redis_conf(config)),check_exists=True)
event_params= dict(dbengine=dbengine, mcache=_cache, aes=kwargs.pop('aes',None))
event_path = os.path.abspath(os.path.dirname(toughradius.manage.events.__file__))
dispatch.load_events(event_path,"toughradius.manage.events",event_params=event_params)
logger.info('start radius worker: %s' % RADIUSAuthWorker(config,dbengine,radcache=_cache))
logger.info('start radius worker: %s' % RADIUSAcctWorker(config,dbengine,radcache=_cache))<|fim▁end|> | logger.error('status_type <%s> not support' % status_type)
except Exception as err: |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Nikola documentation build configuration file, created by
# sphinx-quickstart on Sun Sep 22 17:43:37 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
from __future__ import unicode_literals
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
try:
import sphinxcontrib.gist # NOQA
extensions = ['sphinxcontrib.gist']
except ImportError:
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Nikola'
copyright = '2012-2015, The Nikola Contributors'
# The version info for the project yo're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '7.6.4'
# The full version, including alpha/beta/rc tags.
release = '7.6.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Nikoladoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto/manual]).<|fim▁hole|>latex_documents = [
('index', 'Nikola.tex', 'Nikola Documentation',
'The Nikola Contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'nikola', 'Nikola Documentation',
['The Nikola Contributors'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Nikola', 'Nikola Documentation',
'The Nikola Contributors', 'Nikola', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
primary_domain = None<|fim▁end|> | |
<|file_name|>QueuesPage.ts<|end_file_name|><|fim▁begin|>import { ChangeDetectionStrategy, Component, OnDestroy } from '@angular/core';
import { Title } from '@angular/platform-browser';
import { BehaviorSubject, Observable, Subscription } from 'rxjs';
import { CommandQueue, ConnectionInfo, QueueEventsSubscription, QueueStatisticsSubscription } from '../../client';
import { MessageService } from '../../core/services/MessageService';
import { YamcsService } from '../../core/services/YamcsService';
@Component({
templateUrl: './QueuesPage.html',
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class QueuesPage implements OnDestroy {
connectionInfo$: Observable<ConnectionInfo | null>;
cqueues$ = new BehaviorSubject<CommandQueue[]>([]);
private queueSubscription: QueueStatisticsSubscription;
private queueEventSubscription: QueueEventsSubscription;
// Regroup WebSocket updates (which are for 1 queue at a time)<|fim▁hole|> title.setTitle('Queues');
this.connectionInfo$ = yamcs.connectionInfo$;
yamcs.yamcsClient.getCommandQueues(yamcs.instance!, yamcs.processor!).then(cqueues => {
for (const cqueue of cqueues) {
this.cqueueByName[cqueue.name] = cqueue;
}
this.emitChange();
}).catch(err => messageService.showError(err));
this.queueSubscription = yamcs.yamcsClient.createQueueStatisticsSubscription({
instance: yamcs.instance!,
processor: yamcs.processor!,
}, queue => {
const existingQueue = this.cqueueByName[queue.name];
if (existingQueue) {
// Update queue (but keep already known entries)
queue.entry = existingQueue.entry;
this.cqueueByName[queue.name] = queue;
this.emitChange();
}
});
this.queueEventSubscription = yamcs.yamcsClient.createQueueEventsSubscription({
instance: yamcs.instance!,
processor: yamcs.processor!,
}, queueEvent => {
const queue = this.cqueueByName[queueEvent.data.queueName];
if (queue) {
if (queueEvent.type === 'COMMAND_ADDED') {
queue.entry = queue.entry || [];
queue.entry.push(queueEvent.data);
} else if (queueEvent.type === 'COMMAND_UPDATED') {
const idx = (queue.entry || []).findIndex(entry => {
return entry.id === queueEvent.data.id;
});
if (idx !== -1) {
queue.entry[idx] = queueEvent.data;
}
} else if (queueEvent.type === 'COMMAND_REJECTED') {
queue.entry = queue.entry || [];
queue.entry = queue.entry.filter(entry => {
return entry.id !== queueEvent.data.id;
});
} else if (queueEvent.type === 'COMMAND_SENT') {
queue.entry = queue.entry || [];
queue.entry = queue.entry.filter(entry => {
return entry.id !== queueEvent.data.id;
});
} else {
throw new Error(`Unexpected queue event ${queueEvent.type}`);
}
this.emitChange();
} else {
console.warn('Received an event for an unknown queue', queueEvent);
}
});
}
private emitChange() {
this.cqueues$.next(Object.values(this.cqueueByName));
}
ngOnDestroy() {
this.queueSubscription?.cancel();
this.queueEventSubscription?.cancel();
}
}<|fim▁end|> | private cqueueByName: { [key: string]: CommandQueue; } = {};
constructor(readonly yamcs: YamcsService, title: Title, messageService: MessageService) { |
<|file_name|>trees-test.js<|end_file_name|><|fim▁begin|>import { moduleFor, test } from 'ember-qunit';
<|fim▁hole|> // Specify the other units that are required for this test.
// needs: ['controller:foo']
});
// Replace this with your real tests.
test('it exists', function(assert) {
let controller = this.subject();
assert.ok(controller);
});<|fim▁end|> | moduleFor('controller:plants/trees', 'Unit | Controller | plants/trees', { |
<|file_name|>utils.cc<|end_file_name|><|fim▁begin|><|fim▁hole|>you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/core/grappler/clusters/utils.h"
#include "third_party/eigen3/Eigen/Core"
#if GOOGLE_CUDA
#include "cuda/include/cuda.h"
#include "cuda/include/cuda_runtime_api.h"
#include "cuda/include/cudnn.h"
#endif
#ifdef EIGEN_USE_LIBXSMM
#include "include/libxsmm.h"
#endif
#include "tensorflow/core/lib/strings/numbers.h"
#include "tensorflow/core/lib/strings/strcat.h"
#include "tensorflow/core/platform/cpu_info.h"
namespace tensorflow {
namespace grappler {
DeviceProperties GetLocalCPUInfo() {
DeviceProperties device;
device.set_type("CPU");
device.set_vendor(port::CPUVendorIDString());
// Combine cpu family and model into the model string.
device.set_model(
strings::StrCat((port::CPUFamily() << 4) + port::CPUModelNum()));
device.set_frequency(port::NominalCPUFrequency() * 1e-6);
device.set_num_cores(port::NumSchedulableCPUs());
device.set_l1_cache_size(Eigen::l1CacheSize());
device.set_l2_cache_size(Eigen::l2CacheSize());
device.set_l3_cache_size(Eigen::l3CacheSize());
(*device.mutable_environment())["cpu_instruction_set"] =
Eigen::SimdInstructionSetsInUse();
(*device.mutable_environment())["eigen"] = strings::StrCat(
EIGEN_WORLD_VERSION, ".", EIGEN_MAJOR_VERSION, ".", EIGEN_MINOR_VERSION);
#ifdef EIGEN_USE_LIBXSMM
(*device.mutable_environment())["libxsmm"] = LIBXSMM_VERSION;
#endif
return device;
}
DeviceProperties GetLocalGPUInfo(int gpu_id) {
DeviceProperties device;
device.set_type("GPU");
#if GOOGLE_CUDA
cudaDeviceProp properties;
cudaError_t error = cudaGetDeviceProperties(&properties, gpu_id);
if (error == cudaSuccess) {
device.set_vendor("NVidia");
device.set_model(properties.name);
device.set_frequency(properties.clockRate * 1e-3);
device.set_num_cores(properties.multiProcessorCount);
device.set_num_registers(properties.regsPerMultiprocessor);
// For compute capability less than 5, l1 cache size is configurable to
// either 16 KB or 48 KB. We use the initial configuration 16 KB here. For
// compute capability larger or equal to 5, l1 cache (unified with texture
// cache) size is 24 KB. This number may need to be updated for future
// compute capabilities.
device.set_l1_cache_size((properties.major < 5) ? 16 * 1024 : 24 * 1024);
device.set_l2_cache_size(properties.l2CacheSize);
device.set_l3_cache_size(0);
device.set_shared_memory_size_per_multiprocessor(
properties.sharedMemPerMultiprocessor);
device.set_memory_size(properties.totalGlobalMem);
// 8 is the number of bits per byte. 2 is accounted for
// double data rate (DDR).
device.set_bandwidth(properties.memoryBusWidth / 8 *
properties.memoryClockRate * 2);
}
(*device.mutable_environment())["architecture"] =
strings::StrCat(properties.major, ".", properties.minor);
(*device.mutable_environment())["cuda"] = strings::StrCat(CUDA_VERSION);
(*device.mutable_environment())["cudnn"] = strings::StrCat(CUDNN_VERSION);
#endif
return device;
}
DeviceProperties GetDeviceInfo(const DeviceNameUtils::ParsedName& device) {
if (device.type == "CPU") {
return GetLocalCPUInfo();
} else if (device.type == "GPU") {
if (device.has_id) {
return GetLocalGPUInfo(device.id);
} else {
return GetLocalGPUInfo(0);
}
}
DeviceProperties result;
result.set_type("UNKNOWN");
return result;
}
} // end namespace grappler
} // end namespace tensorflow<|fim▁end|> | /* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"); |
<|file_name|>Version.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014-2015 Daniel Hrabovcak
*
* This file is part of Natural IDE.
*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation, either version 3 of the License, or (at your option) any
* later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
**/
#include "Version.hpp"
#include <cstdio>
#include <cinttypes>
#ifdef __MINGW32__
#ifndef SCNu8
#define SCNu8 "hhu"
#endif
#endif
namespace natural
{
Version::Version()
: major_(0)
, minor_(0)
, patch_(0)
{}
Version::Version(uint8_t major, uint8_t minor, uint8_t patch)
: major_(major)
, minor_(minor)
, patch_(patch)
{}
Version::Version(const char *str)
{
uint8_t major, minor;
uint16_t patch;
if (sscanf(str, "%" SCNu8 ".%" SCNu8 ".%" SCNu16,
&major, &minor, &patch) != 3)
{
Version();
return;
}
Version(major, minor, patch);
}
bool Version::compatible(const Version &other)
{
return (major_ == other.major_ && (minor_ < other.minor_ ||
(minor_ == other.minor_ && patch_ <= other.patch_)));
}
bool Version::forward_compatible(const Version &other)
{
return (major_ > other.major_ || compatible(other));
}<|fim▁hole|>Version Version::version()
{
// Must be in the `.cpp` file, so it is compiled into the shared library.
return Version(NATURAL_VERSION_MAJOR, NATURAL_VERSION_MINOR,
NATURAL_VERSION_PATCH);
}
}<|fim▁end|> | |
<|file_name|>openstack_cinder.py<|end_file_name|><|fim▁begin|># Copyright (C) 2009 Red Hat, Inc., Joey Boggs <[email protected]>
# Copyright (C) 2012 Rackspace US, Inc.,
# Justin Shepherd <[email protected]>
# Copyright (C) 2013 Red Hat, Inc., Flavio Percoco <[email protected]>
# Copyright (C) 2013 Red Hat, Inc., Jeremy Agee <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin<|fim▁hole|>
class OpenStackCinder(Plugin):
"""OpenStack cinder
"""
plugin_name = "openstack_cinder"
profiles = ('openstack', 'openstack_controller')
option_list = [("db", "gathers openstack cinder db version", "slow",
False)]
def setup(self):
if self.get_option("db"):
self.add_cmd_output(
"cinder-manage db version",
suggest_filename="cinder_db_version")
self.add_copy_spec(["/etc/cinder/"])
self.limit = self.get_option("log_size")
if self.get_option("all_logs"):
self.add_copy_spec_limit("/var/log/cinder/",
sizelimit=self.limit)
else:
self.add_copy_spec_limit("/var/log/cinder/*.log",
sizelimit=self.limit)
def postproc(self):
protect_keys = [
"admin_password", "backup_tsm_password", "chap_password",
"nas_password", "cisco_fc_fabric_password", "coraid_password",
"eqlx_chap_password", "fc_fabric_password",
"hitachi_auth_password", "hitachi_horcm_password",
"hp3par_password", "hplefthand_password", "memcache_secret_key",
"netapp_password", "netapp_sa_password", "nexenta_password",
"password", "qpid_password", "rabbit_password", "san_password",
"ssl_key_password", "vmware_host_password", "zadara_password",
"zfssa_initiator_password", "connection", "zfssa_target_password",
"os_privileged_user_password", "hmac_keys"
]
regexp = r"((?m)^\s*(%s)\s*=\s*)(.*)" % "|".join(protect_keys)
self.do_path_regex_sub("/etc/cinder/*", regexp, r"\1*********")
class DebianCinder(OpenStackCinder, DebianPlugin, UbuntuPlugin):
cinder = False
packages = (
'cinder-api',
'cinder-backup',
'cinder-common',
'cinder-scheduler',
'cinder-volume',
'python-cinder',
'python-cinderclient'
)
def check_enabled(self):
self.cinder = self.is_installed("cinder-common")
return self.cinder
def setup(self):
super(DebianCinder, self).setup()
class RedHatCinder(OpenStackCinder, RedHatPlugin):
cinder = False
packages = ('openstack-cinder',
'python-cinder',
'python-cinderclient')
def check_enabled(self):
self.cinder = self.is_installed("openstack-cinder")
return self.cinder
def setup(self):
super(RedHatCinder, self).setup()
self.add_copy_spec(["/etc/sudoers.d/cinder"])
# vim: set et ts=4 sw=4 :<|fim▁end|> | |
<|file_name|>framework.py<|end_file_name|><|fim▁begin|>import numpy as np<|fim▁hole|>
_EPSILON = 1e-6 # avoid nan
# local network for advantage actor-critic which are also know as A2C
class Framework(object):
def __init__(self, access, state_size, action_size, scope_name):
self.Access = access
self.action_size = action_size
self.action_space = list(range(action_size))
with tf.variable_scope(scope_name):
# placeholder
self.inputs = tf.placeholder(tf.float32, [None] + state_size, "states")
self.actions = tf.placeholder(tf.int32, [None], "actions")
self.targets = tf.placeholder(tf.float32, [None], "discounted_rewards")
# network interface
self.actor = Forward('actor')
self.critic = Forward('critic')
self.policy = tf.nn.softmax(self.actor(self.inputs, self.action_size))
self.value = self.critic(self.inputs, 1)
self.policy_step = tf.squeeze(self.policy, axis=0)
self.greedy_action = tf.argmax(self.policy_step)
# losses
self._build_losses()
# async framework
self._build_async_interface()
self._build_interface()
print('graph %s' % (str(scope_name)))
def _build_losses(self):
# value loss
targets = tf.expand_dims(self.targets, axis=1)
self.advantage = targets - self.value
self.value_loss = tf.reduce_mean(tf.square(self.advantage))
# policy loss
action_gather = tf.one_hot(self.actions, self.action_size)
policy_action = tf.reduce_sum(self.policy * action_gather,
axis=1, keep_dims=True)
log_policy_action = tf.log(policy_action + _EPSILON)
self.policy_loss = -tf.reduce_mean(
tf.stop_gradient(self.advantage) * log_policy_action)
# entropy loss
entropy_loss = tf.reduce_sum(
self.policy * tf.log(self.policy + _EPSILON),
axis=1, keep_dims=True)
self.entropy_loss = tf.reduce_mean(entropy_loss)
# regularization
self.actor_norm = tf.add_n(self.actor.get_regularization()) * ACTOR_NORM_BETA
self.critic_norm = tf.add_n(self.critic.get_regularization()) * CRITIC_NORM_BETA
# total loss
self.actor_loss = self.policy_loss + ENTROPY_BETA * self.entropy_loss + self.actor_norm
self.critic_loss = self.value_loss + self.critic_norm
# interface adjustment parameters
self.a_actor_loss = self.actor_loss
self.a_policy_mean = -tf.reduce_mean(log_policy_action)
self.a_policy_loss = self.policy_loss
self.a_entropy_loss = ENTROPY_BETA * self.entropy_loss
self.a_actor_norm = self.actor_norm
self.a_critic_loss = self.critic_loss
self.a_value_loss = self.value_loss
self.a_critic_norm = self.critic_norm
self.a_value_mean = tf.reduce_mean(self.value)
self.a_advantage = tf.reduce_mean(self.advantage)
def _build_interface(self):
self.a_interface = [self.a_actor_loss,
self.a_actor_grad,
self.a_policy_mean,
self.a_policy_loss,
self.a_entropy_loss,
self.a_actor_norm,
self.a_critic_loss,
self.a_critic_grad,
self.a_value_loss,
self.a_critic_norm,
self.a_value_mean,
self.a_advantage]
def _build_async_interface(self):
global_actor_params, global_critic_params = self.Access.get_trainable()
local_actor_params, local_critic_params = self.get_trainable()
actor_grads = tf.gradients(self.actor_loss, list(local_actor_params))
critic_grads = tf.gradients(self.critic_loss, list(local_critic_params))
# Set up optimizer with global norm clipping.
actor_grads, self.a_actor_grad = tf.clip_by_global_norm(actor_grads, MAX_GRAD_NORM)
critic_grads, self.a_critic_grad = tf.clip_by_global_norm(critic_grads, MAX_GRAD_NORM)
# update Access
actor_apply = self.Access.optimizer_actor.apply_gradients(
zip(list(actor_grads), list(global_actor_params)))
critic_apply = self.Access.optimizer_critic.apply_gradients(
zip(list(critic_grads), list(global_critic_params)))
self.update_global = [actor_apply, critic_apply]
# update ACNet
assign_list = []
for gv, lv in zip(global_actor_params, local_actor_params):
assign_list.append(tf.assign(lv, gv))
for gv, lv in zip(global_critic_params, local_critic_params):
assign_list.append(tf.assign(lv, gv))
self.update_local = assign_list
def get_trainable(self):
return [self.actor.get_variables(), self.critic.get_variables()]
def get_policy(self, sess, inputs):
return sess.run(self.policy, {self.inputs: inputs})
def get_stochastic_action(self, sess, inputs, epsilon=0.95):
# get stochastic action for train
if np.random.uniform() < epsilon:
policy = sess.run(self.policy_step,
{self.inputs: np.expand_dims(inputs, axis=0)})
return np.random.choice(self.action_space, 1, p=policy)[0]
else:
return np.random.randint(self.action_size)
def get_deterministic_policy_action(self, sess, inputs):
# get deterministic action for test
return sess.run(self.greedy_action,
{self.inputs: np.expand_dims(inputs, axis=0)})
def get_value(self, sess, inputs):
return sess.run(self.value, {self.inputs: inputs})
def train_step(self, sess, inputs, actions, targets):
feed_dict = {self.inputs: inputs,
self.actions: actions,
self.targets: targets}
sess.run(self.update_global, feed_dict)
def init_network(self, sess):
"""
init or update local network
:param sess:
:return:
"""
sess.run(self.update_local)
def get_losses(self, sess, inputs, actions, targets):
"""
get all loss functions of network
:param sess:
:param inputs:
:param actions:
:param targets:
:return:
"""
feed_dict = {self.inputs: inputs,
self.actions: actions,
self.targets: targets}
return sess.run(self.a_interface, feed_dict)<|fim▁end|> | import tensorflow as tf
from agent.forward import Forward
from config import *
|
<|file_name|>test_mirna.py<|end_file_name|><|fim▁begin|>from resolwe.flow.models import Data
from resolwe.test import tag_process, with_resolwe_host
from resolwe_bio.utils.test import KBBioProcessTestCase
class MicroRNATestCase(KBBioProcessTestCase):
@with_resolwe_host
@tag_process("workflow-mirna")
def test_mirna_workflow(self):
# Prepare data for aligning the reads with bowtie2 and annotation file for featureCounts.
with self.preparation_stage():
inputs = {
"src": "genome_rsem.fa.gz",
"species": "Homo sapiens",
"build": "fake_genome_RSEM",
}
ref_seq = self.run_process("upload-fasta-nucl", inputs)
bowtie2_index = self.run_process("bowtie2-index", {"ref_seq": ref_seq.id})
single_reads = self.prepare_reads(["reads rsem.fq.gz"])
annotation = self.prepare_annotation(
"annotation_rsem.gtf.gz",
species="Homo sapiens",
build="fake_genome_RSEM",
)
inputs = {
"preprocessing": {
"reads": single_reads.pk,
"adapters": {"down_primers_seq": ["TAATGAACAATGCAAGTTTGA"]},
"filtering": {"minlen": 15, "maxlen": 35, "error_rate": 0.2},
},
"alignment": {
"genome": bowtie2_index.pk,
"alignment_options": {
"mode": "--local",
"speed": "--very-sensitive",
"L": 8,
"rep_mode": "k",
"k_reports": 5,
},
},
"quant_options": {
"annotation": annotation.pk,
"id_attribute": "gene_id",
"feature_class": "exon",
"normalization_type": "CPM",<|fim▁hole|> "allow_multi_overlap": True,
},
"assay_type": "non_specific",
}
# Run process and assert.
self.run_process("workflow-mirna", inputs)
workflow = Data.objects.filter(process__slug="feature_counts").last()
# check featureCount summary
self.assertFile(
workflow, "rc", "mirna_featurecounts_rc.tab.gz", compression="gzip"
)
self.assertFile(
workflow, "exp", "mirna_featurecounts_cpm.tab.gz", compression="gzip"
)<|fim▁end|> | "count_multi_mapping_reads": True, |
<|file_name|>auto_suggest.js<|end_file_name|><|fim▁begin|>/**
* Plugin Name: Autocomplete for Textarea
* Author: Amir Harel
* Copyright: amir harel ([email protected])
* Twitter: @amir_harel
* Version 1.4
* Published at : http://www.amirharel.com/2011/03/07/implementing-autocomplete-jquery-plugin-for-textarea/
*/
(function($){
/**
* @param obj
* @attr wordCount {Number} the number of words the user want to for matching it with the dictionary
* @attr mode {String} set "outter" for using an autocomplete that is being displayed in the outter layout of the textarea, as opposed to inner display
* @attr on {Object} containing the followings:
* @attr query {Function} will be called to query if there is any match for the user input
*/
$.fn.autocomplete = function(obj){
if( typeof $.browser.msie != 'undefined' ) obj.mode = 'outter';
this.each(function(index,element){
if( element.nodeName == 'TEXTAREA' ){
makeAutoComplete(element,obj);
}
});
}
var browser = {isChrome: $.browser.webkit };
function getTextAreaSelectionEnd(ta) {
var textArea = ta;//document.getElementById('textarea1');
if (document.selection) { //IE
var bm = document.selection.createRange().getBookmark();
var sel = textArea.createTextRange();
sel.moveToBookmark(bm);
var sleft = textArea.createTextRange();
sleft.collapse(true);
sleft.setEndPoint("EndToStart", sel);
return sleft.text.length + sel.text.length;
}
return textArea.selectionEnd; //ff & chrome
}
function getDefaultCharArray(){
return {
'`':0,<|fim▁hole|> '1':0,
'!':0,
'2':0,
'@':0,
'3':0,
'#':0,
'4':0,
'$':0,
'5':0,
'%':0,
'6':0,
'^':0,
'7':0,
'&':0,
'8':0,
'*':0,
'9':0,
'(':0,
'0':0,
')':0,
'-':0,
'_':0,
'=':0,
'+':0,
'q':0,
'Q':0,
'w':0,
'W':0,
'e':0,
'E':0,
'r':0,
'R':0,
't':0,
'T':0,
'y':0,
'Y':0,
'u':0,
'U':0,
'i':0,
'I':0,
'o':0,
'O':0,
'p':0,
'P':0,
'[':0,
'{':0,
']':0,
'}':0,
'a':0,
'A':0,
's':0,
'S':0,
'd':0,
'D':0,
'f':0,
'F':0,
'g':0,
'G':0,
'h':0,
'H':0,
'j':0,
'J':0,
'k':0,
'K':0,
'l':0,
'L':0,
';':0,
':':0,
'\'':0,
'"':0,
'\\':0,
'|':0,
'z':0,
'Z':0,
'x':0,
'X':0,
'c':0,
'C':0,
'v':0,
'V':0,
'b':0,
'B':0,
'n':0,
'N':0,
'm':0,
'M':0,
',':0,
'<':0,
'.':0,
'>':0,
'/':0,
'?':0,
' ':0
};
}
function setCharSize(data){
for( var ch in data.chars ){
if( ch == ' ' ) $(data.clone).html("<span id='test-width_"+data.id+"' style='line-block'> </span>");
else $(data.clone).html("<span id='test-width_"+data.id+"' style='line-block'>"+ch+"</span>");
var testWidth = $("#test-width_"+data.id).width();
data.chars[ch] = testWidth;
}
}
var _data = {};
var _count = 0;
function makeAutoComplete(ta,obj){
_count++;
_data[_count] = {
id:"auto_"+_count,
ta:ta,
wordCount:obj.wordCount,
wrap: obj.wrap,
on:obj.on,
clone:null,
lineHeight:0,
list:null,
charInLines:{},
mode:obj.mode,
chars:getDefaultCharArray()};
var clone = createClone(_count);
_data[_count].clone = clone;
setCharSize(_data[_count]);
//_data[_count].lineHeight = $(ta).css("font-size");
_data[_count].list = createList(_data[_count]);
registerEvents(_data[_count]);
}
function createList(data){
var ul = document.createElement("ul");
$(ul).addClass("auto-list");
document.body.appendChild(ul);
return ul;
}
function createClone(id){
var data = _data[id];
var div = document.createElement("div");
var offset = $(data.ta).offset();
offset.top = offset.top - parseInt($(data.ta).css("margin-top"));
offset.left = offset.left - parseInt($(data.ta).css("margin-left"));
//console.log("createClone: offset.top=",offset.top," offset.left=",offset.left);
$(div).css({
position:"absolute",
top: offset.top,
left: offset.left,
"overflow-x" : "hidden",
"overflow-y" : "hidden",
"z-index" : -10
});
data.chromeWidthFix = (data.ta.clientWidth - $(data.ta).width());
data.lineHeight = $(data.ta).css("line-height");
if( isNaN(parseInt(data.lineHeight)) ) data.lineHeight = parseInt($(data.ta).css("font-size"))+2;
document.body.appendChild(div);
return div;
}
function getWords(data){
var selectionEnd = getTextAreaSelectionEnd(data.ta);//.selectionEnd;
var text = data.ta.value;
text = text.substr(0,selectionEnd);
if( text.charAt(text.length-1) == ' ' || text.charAt(text.length-1) == '\n' ) return "";
var ret = [];
var wordsFound = 0;
var pos = text.length-1;
while( wordsFound < data.wordCount && pos >= 0 && text.charAt(pos) != '\n'){
ret.unshift(text.charAt(pos));
pos--;
if( text.charAt(pos) == ' ' || pos < 0 ){
wordsFound++;
}
}
return ret.join("");
}
function showList(data,list,text){
if( !data.listVisible ){
data.listVisible = true;
var pos = getCursorPosition(data);
$(data.list).css({
left: pos.left+"px",
top: pos.top+"px",
display: "block"
});
}
var html = "";
var regEx = new RegExp("("+text+")");
var taWidth = $(data.ta).width()-5;
var width = data.mode == "outter" ? "style='width:"+taWidth+"px;'" : "";
for( var i=0; i< list.length; i++ ){
//var a = list[i].replace(regEx,"<mark>$1</mark>");
html += "<li data-value='"+list[i]+"' "+width+">"+list[i].replace(regEx,"<mark>$1</mark>")+"</li>";
}
$(data.list).html(html);
}
function breakLines(text,data){
var lines = [];
var width = $(data.clone).width();
var line1 = "";
var line1Width = 0;
var line2Width = 0;
var line2 = "";
var chSize = data.chars;
var len = text.length;
for( var i=0; i<len; i++){
var ch = text.charAt(i);
line2 += ch.replace(" "," ");
var size = (typeof chSize[ch] == 'undefined' ) ? 0 : chSize[ch];
line2Width += size;
if( ch == ' '|| ch == '-' ){
if( line1Width + line2Width < width-1 ){
line1 = line1 + line2;
line1Width = line1Width + line2Width;
line2 = "";
line2Width = 0;
}
else{
lines.push(line1);
line1= line2;
line1Width = line2Width;
line2= "";
line2Width = 0;
}
}
if( ch == '\n'){
if( line1Width + line2Width < width-1 ){
lines.push(line1 + line2);
}
else{
lines.push(line1);
lines.push(line2);
}
line1 = "";
line2 = "";
line1Width = 0;
line2Width = 0;
}
//else{
//line2 += ch;
//}
}
if( line1Width + line2Width < width-1 ){
lines.push(line1 + line2);
}
else{
lines.push(line1);
lines.push(line2);
}
return lines;
}
function getCursorPosition(data){
if( data.mode == "outter" ){
return getOuterPosition(data);
}
//console.log("getCursorPosition: ta width=",$(data.ta).css("width")," ta clientWidth=",data.ta.clientWidth, "scrollWidth=",data.ta.scrollWidth," offsetWidth=",data.ta.offsetWidth," jquery.width=",$(data.ta).width());
if( browser.isChrome ){
$(data.clone).width(data.ta.clientWidth-data.chromeWidthFix);
}
else{
$(data.clone).width(data.ta.clientWidth);
}
var ta = data.ta;
var selectionEnd = getTextAreaSelectionEnd(data.ta);
var text = ta.value;//.replace(/ /g," ");
var subText = text.substr(0,selectionEnd);
var restText = text.substr(selectionEnd,text.length);
var lines = breakLines(subText,data);//subText.split("\n");
var miror = $(data.clone);
miror.html("");
for( var i=0; i< lines.length-1; i++){
miror.append("<div style='height:"+(parseInt(data.lineHeight))+"px"+";'>"+lines[i]+"</div>");
}
miror.append("<span id='"+data.id+"' style='display:inline-block;'>"+lines[lines.length-1]+"</span>");
miror.append("<span id='rest' style='max-width:'"+data.ta.clientWidth+"px'>"+restText.replace(/\n/g,"<br/>")+" </span>");
miror.get(0).scrollTop = ta.scrollTop;
var span = miror.children("#"+data.id);
var offset = span.offset();
return {top:offset.top+span.height(),left:offset.left+span.width()};
}
function getOuterPosition(data){
var offset = $(data.ta).offset();
return {top:offset.top+$(data.ta).height()+8,left:offset.left};
}
function hideList(data){
if( data.listVisible ){
$(data.list).css("display","none");
data.listVisible = false;
}
}
function setSelected(dir,data){
var selected = $(data.list).find("[data-selected=true]");
if( selected.length != 1 ){
if( dir > 0 ) $(data.list).find("li:first-child").attr("data-selected","true");
else $(data.list).find("li:last-child").attr("data-selected","true");
return;
}
selected.attr("data-selected","false");
if( dir > 0 ){
selected.next().attr("data-selected","true");
}
else{
selected.prev().attr("data-selected","true");
}
}
function getCurrentSelected(data){
var selected = $(data.list).find("[data-selected=true]");
if( selected.length == 1) return selected.get(0);
return null;
}
function onUserSelected(li,data){
var seletedText = $(li).attr("data-value");
var selectionEnd = getTextAreaSelectionEnd(data.ta);//.selectionEnd;
var text = data.ta.value;
text = text.substr(0,selectionEnd);
//if( text.charAt(text.length-1) == ' ' || text.charAt(text.length-1) == '\n' ) return "";
//var ret = [];
var wordsFound = 0;
var pos = text.length-1;
while( wordsFound < data.wordCount && pos >= 0 && text.charAt(pos) != '\n'){
//ret.unshift(text.charAt(pos));
pos--;
if( text.charAt(pos) == ' ' || pos < 0 ){
wordsFound++;
}
}
var a = data.ta.value.substr(0,pos+1);
var c = data.ta.value.substr(selectionEnd,data.ta.value.length);
var scrollTop = data.ta.scrollTop;
if(data.wrap.length > 0){
seletedText = "["+data.wrap+"]"+seletedText+"[/"+data.wrap+"] ";
}
data.ta.value = a+seletedText+c;
data.ta.scrollTop = scrollTop;
data.ta.selectionEnd = pos+1+seletedText.length;
hideList(data);
$(data.ta).focus();
}
function registerEvents(data){
$(data.list).delegate("li","click",function(e){
var li = this;
onUserSelected(li,data);
e.stopPropagation();
e.preventDefault();
return false;
});
$(data.ta).blur(function(e){
setTimeout(function(){
hideList(data);
},400);
});
$(data.ta).click(function(e){
hideList(data);
});
$(data.ta).keydown(function(e){
//console.log("keydown keycode="+e.keyCode);
if( data.listVisible ){
switch(e.keyCode){
case 13:
case 40:
case 38:
e.stopImmediatePropagation();
e.preventDefault();
return false;
case 27: //esc
hideList(data);
}
}
});
$(data.ta).keyup(function(e){
if( data.listVisible ){
//console.log("keCode=",e.keyCode);
if( e.keyCode == 40 ){//down key
setSelected(+1,data);
e.stopImmediatePropagation();
e.preventDefault();
return false;
}
if( e.keyCode == 38 ){//up key
setSelected(-1,data);
e.stopImmediatePropagation();
e.preventDefault();
return false;
}
if( e.keyCode == 13 ){//enter key
var li = getCurrentSelected(data);
if( li ){
e.stopImmediatePropagation();
e.preventDefault();
hideList(data);
onUserSelected(li,data);
return false;
}
hideList(data);
}
if( e.keyCode == 27 ){
e.stopImmediatePropagation();
e.preventDefault();
return false;
}
}
switch( e.keyCode ){
case 27:
return true;
}
var text = getWords(data);
//console.log("getWords return ",text);
if( text != "" ){
data.on.query(text,function(list){
//console.log("got list = ",list);
if( list.length ){
showList(data,list,text);
}
else{
hideList(data);
}
});
}
else{
hideList(data);
}
});
$(data.ta).scroll(function(e){
var ta = e.target;
var miror = $(data.clone);
miror.get(0).scrollTop = ta.scrollTop;
});
}
})(jQuery);<|fim▁end|> | '~':0, |
<|file_name|>PlayerbotAI.cpp<|end_file_name|><|fim▁begin|>#include "Common.h"
#include "Database/DatabaseEnv.h"
#include "ItemPrototype.h"
#include "World.h"
#include "SpellMgr.h"
#include "PlayerbotAI.h"
#include "PlayerbotMgr.h"
#include "PlayerbotDeathKnightAI.h"
#include "PlayerbotDruidAI.h"
#include "PlayerbotHunterAI.h"
#include "PlayerbotMageAI.h"
#include "PlayerbotPaladinAI.h"
#include "PlayerbotPriestAI.h"
#include "PlayerbotRogueAI.h"
#include "PlayerbotShamanAI.h"
#include "PlayerbotWarlockAI.h"
#include "PlayerbotWarriorAI.h"
#include "Player.h"
#include "ObjectMgr.h"
#include "Chat.h"
#include "WorldPacket.h"
#include "Spell.h"
#include "Unit.h"
#include "SpellAuras.h"
#include "SharedDefines.h"
#include "Log.h"
#include "GossipDef.h"
// returns a float in range of..
float rand_float(float low, float high)
{
return (rand() / (static_cast<float> (RAND_MAX) + 1.0)) * (high - low) + low;
}
/*
* Packets often compress the GUID (global unique identifier)
* This function extracts the guid from the packet and decompresses it.
* The first word (8 bits) in the packet represents how many words in the following packet(s) are part of
* the guid and what weight they hold. I call it the mask. For example) if mask is 01001001,
* there will be only 3 words. The first word is shifted to the left 0 times,
* the second is shifted 3 times, and the third is shifted 6.
*
* Possibly use ByteBuffer::readPackGUID?
*/
uint64 extractGuid(WorldPacket& packet)
{
uint8 mask;
packet >> mask;
uint64 guid = 0;
uint8 bit = 0;
uint8 testMask = 1;
while (true)
{
if (mask & testMask)
{
uint8 word;
packet >> word;
guid += (word << bit);
}
if (bit == 7)
break;
++bit;
testMask <<= 1;
}
return guid;
}
// ChatHandler already implements some useful commands the master can call on bots
// These commands are protected inside the ChatHandler class so this class provides access to the commands
// we'd like to call on our bots
class PlayerbotChatHandler: protected ChatHandler
{
public:
explicit PlayerbotChatHandler(Player* pMasterPlayer) : ChatHandler(pMasterPlayer) {}
bool revive(const Player& botPlayer) { return HandleReviveCommand(botPlayer.GetName()); }
bool teleport(const Player& botPlayer) { return HandleNamegoCommand(botPlayer.GetName()); }
void sysmessage(const char *str) { SendSysMessage(str); }
bool dropQuest(const char *str) { return HandleQuestRemove(str); }
};
PlayerbotAI::PlayerbotAI(PlayerbotMgr* const mgr, Player* const bot) :
m_mgr(mgr), m_bot(bot), m_ignoreAIUpdatesUntilTime(0),
m_combatOrder(ORDERS_NONE), m_ScenarioType(SCENARIO_PVEEASY),
m_TimeDoneEating(0), m_TimeDoneDrinking(0),
m_CurrentlyCastingSpellId(0), m_spellIdCommand(0),
m_targetGuidCommand(0), m_classAI(0) {
// set bot state and needed item list
m_botState = BOTSTATE_NORMAL;
SetQuestNeedItems();
// reset some pointers
m_targetChanged = false;
m_targetType = TARGET_NORMAL;
m_targetCombat = 0;
m_targetAssist = 0;
m_targetProtect = 0;
// start following master (will also teleport bot to master)
SetMovementOrder( MOVEMENT_FOLLOW, GetMaster() );
// get class specific ai
switch (m_bot->getClass())
{
case CLASS_PRIEST:
m_combatStyle = COMBAT_RANGED;
m_classAI = (PlayerbotClassAI*) new PlayerbotPriestAI(GetMaster(), m_bot, this);
break;
case CLASS_MAGE:
m_combatStyle = COMBAT_RANGED;
m_classAI = (PlayerbotClassAI*) new PlayerbotMageAI(GetMaster(), m_bot, this);
break;
case CLASS_WARLOCK:
m_combatStyle = COMBAT_RANGED;
m_classAI = (PlayerbotClassAI*) new PlayerbotWarlockAI(GetMaster(), m_bot, this);
break;
case CLASS_WARRIOR:
m_combatStyle = COMBAT_MELEE;
m_classAI = (PlayerbotClassAI*) new PlayerbotWarriorAI(GetMaster(), m_bot, this);
break;
case CLASS_SHAMAN:
m_combatStyle = COMBAT_MELEE;
m_classAI = (PlayerbotClassAI*) new PlayerbotShamanAI(GetMaster(), m_bot, this);
break;
case CLASS_PALADIN:
m_combatStyle = COMBAT_MELEE;
m_classAI = (PlayerbotClassAI*) new PlayerbotPaladinAI(GetMaster(), m_bot, this);
break;
case CLASS_ROGUE:
m_combatStyle = COMBAT_MELEE;
m_classAI = (PlayerbotClassAI*) new PlayerbotRogueAI(GetMaster(), m_bot, this);
break;
case CLASS_DRUID:
m_combatStyle = COMBAT_MELEE;
m_classAI = (PlayerbotClassAI*) new PlayerbotDruidAI(GetMaster(), m_bot, this);
break;
case CLASS_HUNTER:
m_combatStyle = COMBAT_RANGED;
m_classAI = (PlayerbotClassAI*)new PlayerbotHunterAI(GetMaster(), m_bot, this);
break;
case CLASS_DEATH_KNIGHT:
m_combatStyle = COMBAT_MELEE;
m_classAI = (PlayerbotClassAI*)new PlayerbotDeathKnightAI(GetMaster(), m_bot, this);
break;
}
}
PlayerbotAI::~PlayerbotAI()
{
if (m_classAI) delete m_classAI;
}
Player* PlayerbotAI::GetMaster() const
{
return m_mgr->GetMaster();
}
// finds spell ID for matching substring args
// in priority of full text match, spells not taking reagents, and highest rank
uint32 PlayerbotAI::getSpellId(const char* args, bool master) const
{
if (!*args)
return 0;
std::string namepart = args;
std::wstring wnamepart;
if (!Utf8toWStr(namepart, wnamepart))
return 0;
// converting string that we try to find to lower case
wstrToLower(wnamepart);
int loc = 0;
if (master)
loc = GetMaster()->GetSession()->GetSessionDbcLocale();
else
loc = m_bot->GetSession()->GetSessionDbcLocale();
uint32 foundSpellId = 0;
bool foundExactMatch = false;
bool foundMatchUsesNoReagents = false;
for (PlayerSpellMap::iterator itr = m_bot->GetSpellMap().begin(); itr != m_bot->GetSpellMap().end(); ++itr)
{
uint32 spellId = itr->first;
if (itr->second.state == PLAYERSPELL_REMOVED || itr->second.disabled || IsPassiveSpell(spellId))
continue;
const SpellEntry* pSpellInfo = sSpellStore.LookupEntry(spellId);
if (!pSpellInfo)
continue;
const std::string name = pSpellInfo->SpellName[loc];
if (name.empty() || !Utf8FitTo(name, wnamepart))
continue;
bool isExactMatch = (name.length() == wnamepart.length()) ? true : false;
bool usesNoReagents = (pSpellInfo->Reagent[0] <= 0) ? true : false;
// if we already found a spell
bool useThisSpell = true;
if (foundSpellId > 0)
{
if (isExactMatch && !foundExactMatch) {}
else if (usesNoReagents && !foundMatchUsesNoReagents) {}
else if (spellId > foundSpellId) {}
else
useThisSpell = false;
}
if (useThisSpell)
{
foundSpellId = spellId;
foundExactMatch = isExactMatch;
foundMatchUsesNoReagents = usesNoReagents;
}
}
return foundSpellId;
}
/*
* Send a list of equipment that is in bot's inventor that is currently unequipped.
* This is called when the master is inspecting the bot.
*/
void PlayerbotAI::SendNotEquipList(Player& player)
{
// find all unequipped items and put them in
// a vector of dynamically created lists where the vector index is from 0-18
// and the list contains Item* that can be equipped to that slot
// Note: each dynamically created list in the vector must be deleted at end
// so NO EARLY RETURNS!
// see enum EquipmentSlots in Player.h to see what equipment slot each index in vector
// is assigned to. (The first is EQUIPMENT_SLOT_HEAD=0, and last is EQUIPMENT_SLOT_TABARD=18)
std::list<Item*>* equip[19];
for (uint8 i = 0; i < 19; ++i)
equip[i] = NULL;
// list out items in main backpack
for (uint8 slot = INVENTORY_SLOT_ITEM_START; slot < INVENTORY_SLOT_ITEM_END; slot++)
{
Item* const pItem = m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, slot);
if (!pItem)
continue;
uint16 dest;
uint8 msg = m_bot->CanEquipItem(NULL_SLOT, dest, pItem, !pItem->IsBag());
if (msg != EQUIP_ERR_OK)
continue;
// the dest looks like it includes the old loc in the 8 higher bits
// so casting it to a uint8 strips them
uint8 equipSlot = uint8(dest);
if (!(equipSlot >= 0 && equipSlot < 19))
continue;
// create a list if one doesn't already exist
if (equip[equipSlot] == NULL)
equip[equipSlot] = new std::list<Item*>;
std::list<Item*>* itemListForEqSlot = equip[equipSlot];
itemListForEqSlot->push_back(pItem);
}
// list out items in other removable backpacks
for (uint8 bag = INVENTORY_SLOT_BAG_START; bag < INVENTORY_SLOT_BAG_END; ++bag)
{
const Bag* const pBag = (Bag*) m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, bag);
if (pBag)
{
for (uint8 slot = 0; slot < pBag->GetBagSize(); ++slot)
{
Item* const pItem = m_bot->GetItemByPos(bag, slot);
if (!pItem)
continue;
uint16 equipSlot;
uint8 msg = m_bot->CanEquipItem(NULL_SLOT, equipSlot, pItem, !pItem->IsBag());
if (msg != EQUIP_ERR_OK)
continue;
if (!(equipSlot >= 0 && equipSlot < 19))
continue;
// create a list if one doesn't already exist
if (equip[equipSlot] == NULL)
equip[equipSlot] = new std::list<Item*>;
std::list<Item*>* itemListForEqSlot = equip[equipSlot];
itemListForEqSlot->push_back(pItem);
}
}
}
TellMaster("Here's all the items in my inventory that I can equip.");
ChatHandler ch(GetMaster());
const std::string descr[] = { "head", "neck", "shoulders", "body", "chest",
"waist", "legs", "feet", "wrists", "hands", "finger1", "finger2",
"trinket1", "trinket2", "back", "mainhand", "offhand", "ranged",
"tabard" };
// now send client all items that can be equipped by slot
for (uint8 equipSlot = 0; equipSlot < 19; ++equipSlot)
{
if (equip[equipSlot] == NULL)
continue;
std::list<Item*>* itemListForEqSlot = equip[equipSlot];
std::ostringstream out;
out << descr[equipSlot] << ": ";
for (std::list<Item*>::iterator it = itemListForEqSlot->begin(); it != itemListForEqSlot->end(); ++it) {
const ItemPrototype* const pItemProto = (*it)->GetProto();
std::string itemName = pItemProto->Name1;
ItemLocalization(itemName, pItemProto->ItemId);
out << " |cffffffff|Hitem:" << pItemProto->ItemId
<< ":0:0:0:0:0:0:0" << "|h[" << itemName
<< "]|h|r";
}
ch.SendSysMessage(out.str().c_str());
delete itemListForEqSlot; // delete list of Item*
}
}
void PlayerbotAI::SendQuestItemList( Player& player )
{
std::ostringstream out;
for( BotNeedItem::iterator itr=m_needItemList.begin(); itr!=m_needItemList.end(); ++itr )
{
const ItemPrototype * pItemProto = sObjectMgr.GetItemPrototype( itr->first );
std::string itemName = pItemProto->Name1;
ItemLocalization(itemName, pItemProto->ItemId);
out << " " << itr->second << "x|cffffffff|Hitem:" << pItemProto->ItemId
<< ":0:0:0:0:0:0:0" << "|h[" << itemName
<< "]|h|r";
}
TellMaster( "Here's a list of all items I need for quests:" );
TellMaster( out.str().c_str() );
}
void PlayerbotAI::SendOrders( Player& player )
{
std::ostringstream out;
if( !m_combatOrder )
out << "Got no combat orders!";
else if( m_combatOrder&ORDERS_TANK )
out << "I TANK";
else if( m_combatOrder&ORDERS_ASSIST )
out << "I ASSIST " << (m_targetAssist?m_targetAssist->GetName():"unknown");
else if( m_combatOrder&ORDERS_HEAL )
out << "I HEAL";
if( (m_combatOrder&ORDERS_PRIMARY) && (m_combatOrder&ORDERS_SECONDARY) )
out << " and ";
if( m_combatOrder&ORDERS_PROTECT )
out << "I PROTECT " << (m_targetProtect?m_targetProtect->GetName():"unknown");
out << ".";
if( m_mgr->m_confDebugWhisper )
{
out << " " << (IsInCombat()?"I'm in COMBAT! ":"Not in combat. ");
out << "Current state is ";
if( m_botState == BOTSTATE_NORMAL )
out << "NORMAL";
else if( m_botState == BOTSTATE_COMBAT )
out << "COMBAT";
else if( m_botState == BOTSTATE_DEAD )
out << "DEAD";
else if( m_botState == BOTSTATE_DEADRELEASED )
out << "RELEASED";
else if( m_botState == BOTSTATE_LOOTING )
out << "LOOTING";
out << ". Movement order is ";
if( m_movementOrder == MOVEMENT_NONE )
out << "NONE";
else if( m_movementOrder == MOVEMENT_FOLLOW )
out << "FOLLOW " << (m_followTarget?m_followTarget->GetName():"unknown");
else if( m_movementOrder == MOVEMENT_STAY )
out << "STAY";
out << ". Got " << m_attackerInfo.size() << " attacker(s) in list.";
out << " Next action in " << (m_ignoreAIUpdatesUntilTime-time(0)) << "sec.";
}
TellMaster( out.str().c_str() );
}
// handle outgoing packets the server would send to the client
void PlayerbotAI::HandleBotOutgoingPacket(const WorldPacket& packet)
{
switch (packet.GetOpcode())
{
case SMSG_DUEL_WINNER:
{
m_bot->HandleEmoteCommand(EMOTE_ONESHOT_APPLAUD);
return;
}
case SMSG_DUEL_COMPLETE:
{
m_ignoreAIUpdatesUntilTime = time(0) + 4;
m_ScenarioType = SCENARIO_PVEEASY;
m_bot->GetMotionMaster()->Clear(true);
return;
}
case SMSG_DUEL_OUTOFBOUNDS:
{
m_bot->HandleEmoteCommand(EMOTE_ONESHOT_CHICKEN);
return;
}
case SMSG_DUEL_REQUESTED:
{
m_ignoreAIUpdatesUntilTime = 0;
WorldPacket p(packet);
uint64 flagGuid;
p >> flagGuid;
uint64 playerGuid;
p >> playerGuid;
Player* const pPlayer = ObjectAccessor::FindPlayer(playerGuid);
if (canObeyCommandFrom(*pPlayer))
{
m_bot->GetMotionMaster()->Clear(true);
WorldPacket* const packet = new WorldPacket(CMSG_DUEL_ACCEPTED, 8);
*packet << flagGuid;
m_bot->GetSession()->QueuePacket(packet); // queue the packet to get around race condition
// follow target in casting range
float angle = rand_float(0, M_PI_F);
float dist = rand_float(4, 10);
m_bot->GetMotionMaster()->Clear(true);
m_bot->GetMotionMaster()->MoveFollow(pPlayer, dist, angle);
m_bot->SetSelection(playerGuid);
m_ignoreAIUpdatesUntilTime = time(0) + 4;
m_ScenarioType = SCENARIO_DUEL;
}
return;
}
case SMSG_INVENTORY_CHANGE_FAILURE:
{
TellMaster("I can't use that.");
return;
}
case SMSG_SPELL_FAILURE:
{
WorldPacket p(packet);
uint64 casterGuid = extractGuid(p);
if (casterGuid != m_bot->GetGUID())
return;
uint32 spellId;
p >> spellId;
if (m_CurrentlyCastingSpellId == spellId)
{
m_ignoreAIUpdatesUntilTime = time(0) + 1;
m_CurrentlyCastingSpellId = 0;
}
return;
}
// if a change in speed was detected for the master
// make sure we have the same mount status
case SMSG_FORCE_RUN_SPEED_CHANGE:
{
WorldPacket p(packet);
uint64 guid = extractGuid(p);
if (guid != GetMaster()->GetGUID())
return;
if (GetMaster()->IsMounted() && !m_bot->IsMounted())
{
//Player Part
if (!GetMaster()->GetAurasByType(SPELL_AURA_MOUNTED).empty())
{
int32 master_speed1 = 0;
int32 master_speed2 = 0;
master_speed1 = GetMaster()->GetAurasByType(SPELL_AURA_MOUNTED).front()->GetSpellProto()->EffectBasePoints[1];
master_speed2 = GetMaster()->GetAurasByType(SPELL_AURA_MOUNTED).front()->GetSpellProto()->EffectBasePoints[2];
//Bot Part
uint32 spellMount = 0;
for(PlayerSpellMap::iterator itr = m_bot->GetSpellMap().begin(); itr != m_bot->GetSpellMap().end(); ++itr)
{
uint32 spellId = itr->first;
if(itr->second.state == PLAYERSPELL_REMOVED || itr->second.disabled || IsPassiveSpell(spellId))
continue;
const SpellEntry* pSpellInfo = sSpellStore.LookupEntry(spellId);
if (!pSpellInfo)
continue;
if(pSpellInfo->EffectApplyAuraName[0] == SPELL_AURA_MOUNTED)
{
if(pSpellInfo->EffectApplyAuraName[1] == SPELL_AURA_MOD_INCREASE_MOUNTED_SPEED)
{
if(pSpellInfo->EffectBasePoints[1] == master_speed1)
{
spellMount = spellId;
break;
}
}
else if((pSpellInfo->EffectApplyAuraName[1] == SPELL_AURA_MOD_INCREASE_MOUNTED_SPEED)
&& (pSpellInfo->EffectApplyAuraName[2] == SPELL_AURA_MOD_FLIGHT_SPEED_MOUNTED))
{
if((pSpellInfo->EffectBasePoints[1] == master_speed1)
&& (pSpellInfo->EffectBasePoints[2] == master_speed2))
{
spellMount = spellId;
break;
}
}
else if((pSpellInfo->EffectApplyAuraName[2] == SPELL_AURA_MOD_INCREASE_MOUNTED_SPEED)
&& (pSpellInfo->EffectApplyAuraName[1] == SPELL_AURA_MOD_FLIGHT_SPEED_MOUNTED))
{
if((pSpellInfo->EffectBasePoints[2] == master_speed2)
&& (pSpellInfo->EffectBasePoints[1] == master_speed1))
{
spellMount = spellId;
break;
}
}
}
}
if(spellMount > 0) m_bot->CastSpell(m_bot, spellMount, false);
}
}
else if (!GetMaster()->IsMounted() && m_bot->IsMounted())
{
WorldPacket emptyPacket;
m_bot->GetSession()->HandleCancelMountAuraOpcode(emptyPacket); //updated code
}
return;
}
// handle flying acknowledgement
case SMSG_MOVE_SET_CAN_FLY:
{
WorldPacket p(packet);
uint64 guid = extractGuid(p);
if (guid != m_bot->GetGUID())
return;
m_bot->m_movementInfo.AddMovementFlag(MOVEFLAG_FLYING);
//m_bot->SetSpeed(MOVE_RUN, GetMaster()->GetSpeed(MOVE_FLIGHT) +0.1f, true);
return;
}
// handle dismount flying acknowledgement
case SMSG_MOVE_UNSET_CAN_FLY:
{
WorldPacket p(packet);
uint64 guid = extractGuid(p);
if (guid != m_bot->GetGUID())
return;
m_bot->m_movementInfo.RemoveMovementFlag(MOVEFLAG_FLYING);
//m_bot->SetSpeed(MOVE_RUN,GetMaster()->GetSpeedRate(MOVE_RUN),true);
return;
}
// If the leader role was given to the bot automatically give it to the master
// if the master is in the group, otherwise leave group
case SMSG_GROUP_SET_LEADER:
{
WorldPacket p(packet);
std::string name;
p >> name;
if (m_bot->GetGroup() && name == m_bot->GetName())
{
if (m_bot->GetGroup()->IsMember(GetMaster()->GetGUID()))
{
p.resize(8);
p << GetMaster()->GetGUID();
m_bot->GetSession()->HandleGroupSetLeaderOpcode(p);
}
else
{
p.clear(); // not really needed
m_bot->GetSession()->HandleGroupDisbandOpcode(p); // packet not used updated code
}
}
return;
}
// If the master leaves the group, then the bot leaves too
case SMSG_PARTY_COMMAND_RESULT:
{
WorldPacket p(packet);
uint32 operation;
p >> operation;
std::string member;
p >> member;
uint32 result;
p >> result;
p.clear();
if (operation == PARTY_OP_LEAVE)
{
if (member == GetMaster()->GetName())
m_bot->GetSession()->HandleGroupDisbandOpcode(p); // packet not used updated code
}
return;
}
// Handle Group invites (auto accept if master is in group, otherwise decline & send message
case SMSG_GROUP_INVITE:
{
if (m_bot->GetGroupInvite())
{
const Group* const grp = m_bot->GetGroupInvite();
if (!grp)
return;
Player* const inviter = sObjectMgr.GetPlayer(grp->GetLeaderGUID());
if (!inviter)
return;
WorldPacket p;
if (!canObeyCommandFrom(*inviter))
{
std::string buf = "I can't accept your invite unless you first invite my master ";
buf += GetMaster()->GetName();
buf += ".";
SendWhisper(buf, *inviter);
m_bot->GetSession()->HandleGroupDeclineOpcode(p); // packet not used
}
else
m_bot->GetSession()->HandleGroupAcceptOpcode(p); // packet not used
}
return;
}
// Handle when another player opens the trade window with the bot
// also sends list of tradable items bot can trade if bot is allowed to obey commands from
case SMSG_TRADE_STATUS:
{
if (m_bot->GetTrader() == NULL)
break;
WorldPacket p(packet);
uint32 status;
p >> status;
p.resize(4);
//4 == TRADE_STATUS_TRADE_ACCEPT
if (status == 4)
m_bot->GetSession()->HandleAcceptTradeOpcode(p); // packet not used
//1 == TRADE_STATUS_BEGIN_TRADE
else if (status == 1)
{
m_bot->GetSession()->HandleBeginTradeOpcode(p); // packet not used
if (!canObeyCommandFrom(*(m_bot->GetTrader())))
{
SendWhisper("I'm not allowed to trade you any of my items, but you are free to give me money or items.", *(m_bot->GetTrader()));
return;
}
// list out items available for trade
std::ostringstream out;
// list out items in main backpack
for (uint8 slot = INVENTORY_SLOT_ITEM_START; slot < INVENTORY_SLOT_ITEM_END; slot++)
{
const Item* const pItem = m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, slot);
if (pItem && pItem->CanBeTraded())
{
const ItemPrototype* const pItemProto = pItem->GetProto();
std::string itemName = pItemProto->Name1;
ItemLocalization(itemName, pItemProto->ItemId);
out << " |cffffffff|Hitem:" << pItemProto->ItemId
<< ":0:0:0:0:0:0:0" << "|h[" << itemName << "]|h|r";
if (pItem->GetCount() > 1)
out << "x" << pItem->GetCount() << ' ';
}
}
// list out items in other removable backpacks
for (uint8 bag = INVENTORY_SLOT_BAG_START; bag < INVENTORY_SLOT_BAG_END; ++bag)
{
const Bag* const pBag = (Bag*) m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, bag);
if (pBag)
{
for (uint8 slot = 0; slot < pBag->GetBagSize(); ++slot)
{
const Item* const pItem = m_bot->GetItemByPos(bag, slot);
if (pItem && pItem->CanBeTraded())
{
const ItemPrototype* const pItemProto = pItem->GetProto();
std::string itemName = pItemProto->Name1;
ItemLocalization(itemName, pItemProto->ItemId);
// item link format: http://www.wowwiki.com/ItemString
// itemId, enchantId, jewelId1, jewelId2, jewelId3, jewelId4, suffixId, uniqueId
out << " |cffffffff|Hitem:" << pItemProto->ItemId
<< ":0:0:0:0:0:0:0" << "|h[" << itemName
<< "]|h|r";
if (pItem->GetCount() > 1)
out << "x" << pItem->GetCount() << ' ';
}
}
}
}
// calculate how much money bot has
uint32 copper = m_bot->GetMoney();
uint32 gold = uint32(copper / 10000);
copper -= (gold * 10000);
uint32 silver = uint32(copper / 100);
copper -= (silver * 100);
// send bot the message
std::ostringstream whisper;
whisper << "I have |cff00ff00" << gold
<< "|r|cfffffc00g|r|cff00ff00" << silver
<< "|r|cffcdcdcds|r|cff00ff00" << copper
<< "|r|cffffd333c|r" << " and the following items:";
SendWhisper(whisper.str().c_str(), *(m_bot->GetTrader()));
ChatHandler ch(m_bot->GetTrader());
ch.SendSysMessage(out.str().c_str());
}
return;
}
case SMSG_SPELL_GO:
{
WorldPacket p(packet);
uint64 castItemGuid = extractGuid(p);
uint64 casterGuid = extractGuid(p);
if (casterGuid != m_bot->GetGUID())
return;
uint32 spellId;
p >> spellId;
uint16 castFlags;
p >> castFlags;
uint32 msTime;
p >> msTime;
uint8 numHit;
p >> numHit;
if (m_CurrentlyCastingSpellId == spellId)
{
Spell* const pSpell = m_bot->FindCurrentSpellBySpellId(spellId);
if (!pSpell)
return;
if (pSpell->IsChannelActive() || pSpell->IsAutoRepeat())
m_ignoreAIUpdatesUntilTime = time(0) + (GetSpellDuration(pSpell->m_spellInfo) / 1000) + 1;
else if (pSpell->IsAutoRepeat())
m_ignoreAIUpdatesUntilTime = time(0) + 6;
else
{
m_ignoreAIUpdatesUntilTime = time(0) + 1;
m_CurrentlyCastingSpellId = 0;
}
}
return;
}
/* uncomment this and your bots will tell you all their outgoing packet opcode names
case SMSG_MONSTER_MOVE:
case SMSG_UPDATE_WORLD_STATE:
case SMSG_COMPRESSED_UPDATE_OBJECT:
case MSG_MOVE_SET_FACING:
case MSG_MOVE_STOP:
case MSG_MOVE_HEARTBEAT:
case MSG_MOVE_STOP_STRAFE:
case MSG_MOVE_START_STRAFE_LEFT:
case SMSG_UPDATE_OBJECT:
case MSG_MOVE_START_FORWARD:
case MSG_MOVE_START_STRAFE_RIGHT:
case SMSG_DESTROY_OBJECT:
case MSG_MOVE_START_BACKWARD:
case SMSG_AURA_UPDATE_ALL:
case MSG_MOVE_FALL_LAND:
case MSG_MOVE_JUMP:
return;
default:
{
const char* oc = LookupOpcodeName(packet.GetOpcode());
std::ostringstream out;
out << "botout: " << oc;
sLog.outError(out.str().c_str());
//TellMaster(oc);
}
*/
}
}
uint8 PlayerbotAI::GetHealthPercent(const Unit& target) const
{
return (static_cast<float> (target.GetHealth()) / target.GetMaxHealth()) * 100;
}
uint8 PlayerbotAI::GetHealthPercent() const
{
return GetHealthPercent(*m_bot);
}
uint8 PlayerbotAI::GetManaPercent(const Unit& target) const
{
return (static_cast<float> (target.GetPower(POWER_MANA)) / target.GetMaxPower(POWER_MANA)) * 100;
}
uint8 PlayerbotAI::GetManaPercent() const
{
return GetManaPercent(*m_bot);
}
uint8 PlayerbotAI::GetBaseManaPercent(const Unit& target) const
{
if (target.GetPower(POWER_MANA) >= target.GetCreateMana())
return (100);
else
return (static_cast<float> (target.GetPower(POWER_MANA)) / target.GetMaxPower(POWER_MANA)) * 100;
}
uint8 PlayerbotAI::GetBaseManaPercent() const
{
return GetBaseManaPercent(*m_bot);
}
uint8 PlayerbotAI::GetRageAmount(const Unit& target) const
{
return (static_cast<float> (target.GetPower(POWER_RAGE)));
}
uint8 PlayerbotAI::GetRageAmount() const
{
return GetRageAmount(*m_bot);
}
uint8 PlayerbotAI::GetEnergyAmount(const Unit& target) const
{
return (static_cast<float> (target.GetPower(POWER_ENERGY)));
}
uint8 PlayerbotAI::GetEnergyAmount() const
{
return GetEnergyAmount(*m_bot);
}
uint8 PlayerbotAI::GetRunicPower(const Unit& target) const
{
return (static_cast<float>(target.GetPower(POWER_RUNIC_POWER)));
}
uint8 PlayerbotAI::GetRunicPower() const
{
return GetRunicPower(*m_bot);
}
//typedef std::pair<uint32, uint8> spellEffectPair;
//typedef std::multimap<spellEffectPair, Aura*> AuraMap;
bool PlayerbotAI::HasAura(uint32 spellId, const Unit& player) const
{
if(spellId <= 0)
return false;
for (Unit::AuraMap::const_iterator iter = player.GetAuras().begin(); iter != player.GetAuras().end(); ++iter)
{
if (iter->second->GetId() == spellId)
return true;
}
return false;
}
bool PlayerbotAI::HasAura(const char* spellName) const
{
return HasAura(spellName, *m_bot);
}
bool PlayerbotAI::HasAura(const char* spellName, const Unit& player) const
{
uint32 spellId = getSpellId(spellName);
return (spellId) ? HasAura(spellId, player) : false;
}
// looks through all items / spells that bot could have to get a mount
Item* PlayerbotAI::FindMount(uint32 matchingRidingSkill) const
{
// list out items in main backpack
Item* partialMatch = NULL;
for (uint8 slot = INVENTORY_SLOT_ITEM_START; slot < INVENTORY_SLOT_ITEM_END; slot++)
{
Item* const pItem = m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, slot);
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto || !m_bot->CanUseItem(pItemProto) || pItemProto->RequiredSkill != SKILL_RIDING)
continue;
if (pItemProto->RequiredSkillRank == matchingRidingSkill)
return pItem;
else if (!partialMatch || (partialMatch && partialMatch->GetProto()->RequiredSkillRank < pItemProto->RequiredSkillRank))
partialMatch = pItem;
}
}
// list out items in other removable backpacks
for (uint8 bag = INVENTORY_SLOT_BAG_START; bag < INVENTORY_SLOT_BAG_END; ++bag)
{
const Bag* const pBag = (Bag*) m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, bag);
if (pBag)
{
for (uint8 slot = 0; slot < pBag->GetBagSize(); ++slot)
{
Item* const pItem = m_bot->GetItemByPos(bag, slot);
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto || !m_bot->CanUseItem(pItemProto) || pItemProto->RequiredSkill != SKILL_RIDING)
continue;
if (pItemProto->RequiredSkillRank == matchingRidingSkill)
return pItem;
else if (!partialMatch || (partialMatch && partialMatch->GetProto()->RequiredSkillRank < pItemProto->RequiredSkillRank))
partialMatch = pItem;
}
}
}
}
return partialMatch;
}
Item* PlayerbotAI::FindFood() const
{
// list out items in main backpack
for (uint8 slot = INVENTORY_SLOT_ITEM_START; slot < INVENTORY_SLOT_ITEM_END; slot++)
{
Item* const pItem = m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, slot);
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto || !m_bot->CanUseItem(pItemProto))
continue;
if (pItemProto->Class == ITEM_CLASS_CONSUMABLE && pItemProto->SubClass == ITEM_SUBCLASS_FOOD)
{
// if is FOOD
// this enum is no longer defined in mangos. Is it no longer valid?
// according to google it was 11
if (pItemProto->Spells[0].SpellCategory == 11)
return pItem;
}
}
}
// list out items in other removable backpacks
for (uint8 bag = INVENTORY_SLOT_BAG_START; bag < INVENTORY_SLOT_BAG_END; ++bag)
{
const Bag* const pBag = (Bag*) m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, bag);
if (pBag)
{
for (uint8 slot = 0; slot < pBag->GetBagSize(); ++slot)
{
Item* const pItem = m_bot->GetItemByPos(bag, slot);
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto || !m_bot->CanUseItem(pItemProto))
continue;
// this enum is no longer defined in mangos. Is it no longer valid?
// according to google it was 11
if (pItemProto->Class == ITEM_CLASS_CONSUMABLE && pItemProto->SubClass == ITEM_SUBCLASS_FOOD)
{
// if is FOOD
// this enum is no longer defined in mangos. Is it no longer valid?
// according to google it was 11
// if (pItemProto->Spells[0].SpellCategory == SPELL_CATEGORY_FOOD)
if (pItemProto->Spells[0].SpellCategory == 11)
return pItem;
}
}
}
}
}
return NULL;
}
Item* PlayerbotAI::FindDrink() const
{
// list out items in main backpack
for (uint8 slot = INVENTORY_SLOT_ITEM_START; slot < INVENTORY_SLOT_ITEM_END; slot++)
{
Item* const pItem = m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, slot);
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto || !m_bot->CanUseItem(pItemProto))
continue;
if (pItemProto->Class == ITEM_CLASS_CONSUMABLE && pItemProto->SubClass == ITEM_SUBCLASS_FOOD)
{
// if (pItemProto->Spells[0].SpellCategory == SPELL_CATEGORY_DRINK)
// this enum is no longer defined in mangos. Is it no longer valid?
// according to google it was 59
// if (pItemProto->Spells[0].SpellCategory == 59)
if (pItemProto->Spells[0].SpellCategory == 59)
return pItem;
}
}
}
// list out items in other removable backpacks
for (uint8 bag = INVENTORY_SLOT_BAG_START; bag < INVENTORY_SLOT_BAG_END; ++bag)
{
const Bag* const pBag = (Bag*) m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, bag);
if (pBag)
{
for (uint8 slot = 0; slot < pBag->GetBagSize(); ++slot)
{
Item* const pItem = m_bot->GetItemByPos(bag, slot);
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto || !m_bot->CanUseItem(pItemProto))
continue;
if (pItemProto->Class == ITEM_CLASS_CONSUMABLE && pItemProto->SubClass == ITEM_SUBCLASS_FOOD)
{
// if is WATER
// SPELL_CATEGORY_DRINK is no longer defined in an enum in mangos
// google says the valus is 59. Is this still valid?
// if (pItemProto->Spells[0].SpellCategory == SPELL_CATEGORY_DRINK)
if (pItemProto->Spells[0].SpellCategory == 59)
return pItem;
}
}
}
}
}
return NULL;
}
Item* PlayerbotAI::FindBandage() const
{
// list out items in main backpack
for (uint8 slot = INVENTORY_SLOT_ITEM_START; slot < INVENTORY_SLOT_ITEM_END; slot++)
{
Item* const pItem = m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, slot);
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto || !m_bot->CanUseItem(pItemProto))
continue;
if (pItemProto->Class == ITEM_CLASS_CONSUMABLE && pItemProto->SubClass == ITEM_SUBCLASS_BANDAGE)
return pItem;
}
}
// list out items in other removable backpacks
for (uint8 bag = INVENTORY_SLOT_BAG_START; bag < INVENTORY_SLOT_BAG_END; ++bag)
{
const Bag* const pBag = (Bag*) m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, bag);
if (pBag)
{
for (uint8 slot = 0; slot < pBag->GetBagSize(); ++slot)
{
Item* const pItem = m_bot->GetItemByPos(bag, slot);
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto || !m_bot->CanUseItem(pItemProto))
continue;
if (pItemProto->Class == ITEM_CLASS_CONSUMABLE && pItemProto->SubClass == ITEM_SUBCLASS_BANDAGE)
return pItem;
}
}
}
}
return NULL;
}
//Find Poison ...Natsukawa
Item* PlayerbotAI::FindPoison() const
{
// list out items in main backpack
for (uint8 slot = INVENTORY_SLOT_ITEM_START; slot < INVENTORY_SLOT_ITEM_END; slot++)
{
Item* const pItem = m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, slot);
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto || !m_bot->CanUseItem(pItemProto))
continue;
if (pItemProto->Class == ITEM_CLASS_CONSUMABLE && pItemProto->SubClass == 6)
return pItem;
}
}
// list out items in other removable backpacks
for (uint8 bag = INVENTORY_SLOT_BAG_START; bag < INVENTORY_SLOT_BAG_END; ++bag)
{
const Bag* const pBag = (Bag*) m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, bag);
if (pBag)
{
for (uint8 slot = 0; slot < pBag->GetBagSize(); ++slot)
{
Item* const pItem = m_bot->GetItemByPos(bag, slot);
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto || !m_bot->CanUseItem(pItemProto))
continue;
if (pItemProto->Class == ITEM_CLASS_CONSUMABLE && pItemProto->SubClass == 6)
return pItem;
}
}
}
}
return NULL;
}
void PlayerbotAI::InterruptCurrentCastingSpell()
{
//TellMaster("I'm interrupting my current spell!");
WorldPacket* const packet = new WorldPacket(CMSG_CANCEL_CAST, 5); //changed from thetourist suggestion
*packet << m_CurrentlyCastingSpellId;
*packet << m_targetGuidCommand; //changed from thetourist suggestion
m_CurrentlyCastingSpellId = 0;
m_bot->GetSession()->QueuePacket(packet);
}
void PlayerbotAI::Feast()
{
// stand up if we are done feasting
if (!(m_bot->GetHealth() < m_bot->GetMaxHealth() || (m_bot->getPowerType() == POWER_MANA && m_bot->GetPower(POWER_MANA) < m_bot->GetMaxPower(POWER_MANA))))
{
m_bot->SetStandState(UNIT_STAND_STATE_STAND);
return;
}
// wait 3 seconds before checking if we need to drink more or eat more
time_t currentTime = time(0);
m_ignoreAIUpdatesUntilTime = currentTime + 3;
// should we drink another
if (m_bot->getPowerType() == POWER_MANA && currentTime > m_TimeDoneDrinking
&& ((static_cast<float> (m_bot->GetPower(POWER_MANA)) / m_bot->GetMaxPower(POWER_MANA)) < 0.8))
{
Item* pItem = FindDrink();
if (pItem != NULL)
{
UseItem(*pItem);
m_TimeDoneDrinking = currentTime + 30;
return;
}
TellMaster("I need water.");
}
// should we eat another
if (currentTime > m_TimeDoneEating && ((static_cast<float> (m_bot->GetHealth()) / m_bot->GetMaxHealth()) < 0.8))
{
Item* pItem = FindFood();
if (pItem != NULL)
{
//TellMaster("eating now...");
UseItem(*pItem);
m_TimeDoneEating = currentTime + 30;
return;
}
TellMaster("I need food.");
}
// if we are no longer eating or drinking
// because we are out of items or we are above 80% in both stats
if (currentTime > m_TimeDoneEating && currentTime > m_TimeDoneDrinking)
{
TellMaster("done feasting!");
m_bot->SetStandState(UNIT_STAND_STATE_STAND);
}
}
// intelligently sets a reasonable combat order for this bot
// based on its class / level / etc
void PlayerbotAI::GetCombatTarget( Unit* forcedTarget )
{
// set combat state, and clear looting, etc...
if( m_botState != BOTSTATE_COMBAT )
{
SetState( BOTSTATE_COMBAT );
SetQuestNeedItems();
m_lootCreature.clear();
m_lootCurrent = 0;
m_targetCombat = 0;
}
// update attacker info now
UpdateAttackerInfo();
// check for attackers on protected unit, and make it a forcedTarget if any
if( !forcedTarget && (m_combatOrder&ORDERS_PROTECT) && m_targetProtect!=0 )
{
Unit *newTarget = FindAttacker( (ATTACKERINFOTYPE)(AIT_VICTIMNOTSELF|AIT_HIGHESTTHREAT), m_targetProtect );
if( newTarget && newTarget!=m_targetCombat )
{
forcedTarget = newTarget;
m_targetType = TARGET_THREATEN;
if( m_mgr->m_confDebugWhisper )
TellMaster( "Changing target to %s to protect %s", forcedTarget->GetName(), m_targetProtect->GetName() );
}
} else if( forcedTarget )
{
if( m_mgr->m_confDebugWhisper )
TellMaster( "Changing target to %s by force!", forcedTarget->GetName() );
m_targetType = (m_combatOrder==ORDERS_TANK ? TARGET_THREATEN : TARGET_NORMAL);
}
// we already have a target and we are not forced to change it
if( m_targetCombat && !forcedTarget )
return;
// are we forced on a target?
if( forcedTarget )
{
m_targetCombat = forcedTarget;
m_targetChanged = true;
}
// do we have to assist someone?
if( !m_targetCombat && (m_combatOrder&ORDERS_ASSIST) && m_targetAssist!=0 )
{
m_targetCombat = FindAttacker( (ATTACKERINFOTYPE)(AIT_VICTIMNOTSELF|AIT_LOWESTTHREAT), m_targetAssist );
if( m_mgr->m_confDebugWhisper && m_targetCombat )
TellMaster( "Attacking %s to assist %s", m_targetCombat->GetName(), m_targetAssist->GetName() );
m_targetType = (m_combatOrder==ORDERS_TANK ? TARGET_THREATEN : TARGET_NORMAL);
m_targetChanged = true;
}
// are there any other attackers?
if( !m_targetCombat )
{
m_targetCombat = FindAttacker();
m_targetType = (m_combatOrder==ORDERS_TANK ? TARGET_THREATEN : TARGET_NORMAL);
m_targetChanged = true;
}
// no attacker found anyway
if (!m_targetCombat)
{
m_targetType = TARGET_NORMAL;
m_targetChanged = false;
return;
}
// if thing to attack is in a duel, then ignore and don't call updateAI for 6 seconds
// this method never gets called when the bot is in a duel and this code
// prevents bot from helping
if (m_targetCombat->GetTypeId() == TYPEID_PLAYER && dynamic_cast<Player*> (m_targetCombat)->duel)
{
m_ignoreAIUpdatesUntilTime = time(0) + 6;
return;
}
m_bot->SetSelection(m_targetCombat->GetGUID());
m_ignoreAIUpdatesUntilTime = time(0) + 1;
if (m_bot->getStandState() != UNIT_STAND_STATE_STAND)
m_bot->SetStandState(UNIT_STAND_STATE_STAND);
m_bot->Attack(m_targetCombat, true);
// add thingToAttack to loot list
m_lootCreature.push_back( m_targetCombat->GetGUID() );
// set movement generators for combat movement
MovementClear();
return;
}
void PlayerbotAI::DoNextCombatManeuver()
{
// check for new targets
GetCombatTarget();
// check if we have a target - fixes crash reported by rrtn (kill hunter's pet bug)
// if current target for attacks doesn't make sense anymore
// clear our orders so we can get orders in next update
if( !m_targetCombat || m_targetCombat->isDead() || !m_targetCombat->IsInWorld() || !m_bot->IsHostileTo(m_targetCombat) )
{
m_bot->AttackStop();
m_bot->SetSelection(0);
MovementReset();
m_bot->InterruptNonMeleeSpells(true);
m_targetCombat = 0;
m_targetChanged = false;
m_targetType = TARGET_NORMAL;
return;
}
// do opening moves, if we changed target
if( m_targetChanged )
{
if( GetClassAI() )
m_targetChanged = GetClassAI()->DoFirstCombatManeuver( m_targetCombat );
else
m_targetChanged = false;
}
// do normal combat movement
DoCombatMovement();
if (GetClassAI() && !m_targetChanged )
(GetClassAI())->DoNextCombatManeuver( m_targetCombat );
}
void PlayerbotAI::DoCombatMovement() {
if( !m_targetCombat ) return;
float targetDist = m_bot->GetDistance( m_targetCombat );
if( m_combatStyle==COMBAT_MELEE && !m_bot->hasUnitState( UNIT_STAT_CHASE ) && ( (m_movementOrder==MOVEMENT_STAY && targetDist<=ATTACK_DISTANCE) || (m_movementOrder!=MOVEMENT_STAY) ) )
{
// melee combat - chase target if in range or if we are not forced to stay
m_bot->GetMotionMaster()->MoveChase( m_targetCombat );
}
else if( m_combatStyle==COMBAT_RANGED && m_movementOrder!=MOVEMENT_STAY )
{
// ranged combat - just move within spell range
// TODO: just follow in spell range! how to determine bots spell range?
if( targetDist>25.0f ) {
m_bot->GetMotionMaster()->MoveChase( m_targetCombat );
} else {
MovementClear();
}
}
}
void PlayerbotAI::SetQuestNeedItems()
{
// reset values first
m_needItemList.clear();
m_lootCreature.clear();
m_lootCurrent = 0;
// run through accepted quests, get quest infoand data
for( QuestStatusMap::iterator iter=m_bot->getQuestStatusMap().begin(); iter!=m_bot->getQuestStatusMap().end(); ++iter )
{
const Quest *qInfo = sObjectMgr.GetQuestTemplate( iter->first );
if( !qInfo )
continue;
QuestStatusData *qData = &iter->second;
// only check quest if it is incomplete
if( qData->m_status != QUEST_STATUS_INCOMPLETE )
continue;
// check for items we not have enough of
for( int i=0; i<QUEST_OBJECTIVES_COUNT; i++ )
{
if( !qInfo->ReqItemCount[i] || (qInfo->ReqItemCount[i]-qData->m_itemcount[i])<=0 )
continue;
m_needItemList[qInfo->ReqItemId[i]] = (qInfo->ReqItemCount[i]-qData->m_itemcount[i]);
}
}
}
void PlayerbotAI::SetState( BotState state )
{
//sLog.outDebug( "[PlayerbotAI]: %s switch state %d to %d", m_bot->GetName(), m_botState, state );
m_botState = state;
}
void PlayerbotAI::DoLoot()
{
if( !m_lootCurrent && m_lootCreature.empty() )
{
//sLog.outDebug( "[PlayerbotAI]: %s reset loot list / go back to idle", m_bot->GetName() );
m_botState = BOTSTATE_NORMAL;
SetQuestNeedItems();
return;
}
if( !m_lootCurrent )
{
m_lootCurrent = m_lootCreature.front();
m_lootCreature.pop_front();
Creature *c = m_bot->GetMap()->GetCreature( m_lootCurrent );
// check if we got a creature and if it is still a corpse, otherwise bot runs to spawn point
if( !c || c->getDeathState()!=CORPSE || GetMaster()->GetDistance( c )>BOTLOOT_DISTANCE )
{
m_lootCurrent = 0;
return;
}
m_bot->GetMotionMaster()->MovePoint( c->GetMapId(), c->GetPositionX(), c->GetPositionY(), c->GetPositionZ() );
//sLog.outDebug( "[PlayerbotAI]: %s is going to loot '%s' deathState=%d", m_bot->GetName(), c->GetName(), c->getDeathState() );
}
else
{
Creature *c = m_bot->GetMap()->GetCreature( m_lootCurrent );
if( !c || c->getDeathState()!=CORPSE || GetMaster()->GetDistance( c )>BOTLOOT_DISTANCE )
{
m_lootCurrent = 0;
return;
}
if( m_bot->IsWithinDistInMap( c, INTERACTION_DISTANCE ) )
{
// check for needed items
m_bot->SendLoot( m_lootCurrent, LOOT_CORPSE );
Loot *loot = &c->loot;
uint32 lootNum = loot->GetMaxSlotInLootFor( m_bot );
//sLog.outDebug( "[PlayerbotAI]: %s looting: '%s' got %d items", m_bot->GetName(), c->GetName(), loot->GetMaxSlotInLootFor( m_bot ) );
for( uint32 l=0; l<lootNum; l++ )
{
QuestItem *qitem=0, *ffaitem=0, *conditem=0;
LootItem *item = loot->LootItemInSlot( l, m_bot, &qitem, &ffaitem, &conditem );
if( !item )
continue;
if( !qitem && item->is_blocked )
{
m_bot->SendLootRelease( m_bot->GetLootGUID() );
continue;
}
if( m_needItemList[item->itemid]>0 )
{
//sLog.outDebug( "[PlayerbotAI]: %s looting: needed item '%s'", m_bot->GetName(), sObjectMgr.GetItemLocale(item->itemid)->Name );
ItemPosCountVec dest;
if( m_bot->CanStoreNewItem( NULL_BAG, NULL_SLOT, dest, item->itemid, item->count ) == EQUIP_ERR_OK )
{
Item * newitem = m_bot->StoreNewItem( dest, item->itemid, true, item->randomPropertyId);
if( qitem )
{
qitem->is_looted = true;
if( item->freeforall || loot->GetPlayerQuestItems().size() == 1 )
m_bot->SendNotifyLootItemRemoved( l );
else
loot->NotifyQuestItemRemoved( qitem->index );
}
else
{
if( ffaitem )
{
ffaitem->is_looted=true;
m_bot->SendNotifyLootItemRemoved( l );
}
else
{
if( conditem )
conditem->is_looted=true;
loot->NotifyItemRemoved( l );
}
}
if (!item->freeforall)
item->is_looted = true;
--loot->unlootedCount;
m_bot->SendNewItem( newitem, uint32(item->count), false, false, true );
m_bot->GetAchievementMgr().UpdateAchievementCriteria( ACHIEVEMENT_CRITERIA_TYPE_LOOT_ITEM, item->itemid, item->count );
}
}
}
// release loot
// if( uint64 lguid = m_bot->GetLootGUID() && m_bot->GetSession() )
m_bot->GetSession()->DoLootRelease( m_lootCurrent );
//else if( !m_bot->GetSession() )
// sLog.outDebug( "[PlayerbotAI]: %s has no session. Cannot release loot!", m_bot->GetName() );
// clear movement target, take next target on next update
m_bot->GetMotionMaster()->Clear();
m_bot->GetMotionMaster()->MoveIdle();
SetQuestNeedItems();
//sLog.outDebug( "[PlayerbotAI]: %s looted target 0x%08X", m_bot->GetName(), m_lootCurrent );
}
}
}
void PlayerbotAI::AcceptQuest( Quest const *qInfo, Player *pGiver )
{
if( !qInfo || !pGiver )
return;
uint32 quest = qInfo->GetQuestId();
if( !pGiver->CanShareQuest( qInfo->GetQuestId() ) )
{
// giver can't share quest
m_bot->SetDivider( 0 );
return;
}
if( !m_bot->CanTakeQuest( qInfo, false ) )
{
// can't take quest
m_bot->SetDivider( 0 );
return;
}
if( m_bot->GetDivider() != 0 )
{
// send msg to quest giving player
pGiver->SendPushToPartyResponse( m_bot, QUEST_PARTY_MSG_ACCEPT_QUEST );
m_bot->SetDivider( 0 );
}
if( m_bot->CanAddQuest( qInfo, false ) )
{
m_bot->AddQuest( qInfo, pGiver );
if( m_bot->CanCompleteQuest( quest ) )
m_bot->CompleteQuest( quest );
// Runsttren: did not add typeid switch from WorldSession::HandleQuestgiverAcceptQuestOpcode!
// I think it's not needed, cause typeid should be TYPEID_PLAYER - and this one is not handled
// there and there is no default case also.
if( qInfo->GetSrcSpell() > 0 )
m_bot->CastSpell( m_bot, qInfo->GetSrcSpell(), true );
}
}
void PlayerbotAI::TurnInQuests( WorldObject *questgiver )
{
uint64 giverGUID = questgiver->GetGUID();
if( !m_bot->IsInMap( questgiver ) )
TellMaster("hey you are turning in quests without me!");
else
{
m_bot->SetSelection( giverGUID );
// auto complete every completed quest this NPC has
m_bot->PrepareQuestMenu( giverGUID );
QuestMenu& questMenu = m_bot->PlayerTalkClass->GetQuestMenu();
for (uint32 iI = 0; iI < questMenu.MenuItemCount(); ++iI)
{
QuestMenuItem const& qItem = questMenu.GetItem(iI);
uint32 questID = qItem.m_qId;
Quest const* pQuest = sObjectMgr.GetQuestTemplate(questID);
std::ostringstream out;
std::string questTitle = pQuest->GetTitle();
QuestLocalization(questTitle, questID);
QuestStatus status = m_bot->GetQuestStatus(questID);
// if quest is complete, turn it in
if (status == QUEST_STATUS_COMPLETE)
{
// if bot hasn't already turned quest in
if (! m_bot->GetQuestRewardStatus(questID))
{
// auto reward quest if no choice in reward
if (pQuest->GetRewChoiceItemsCount() == 0)
{
if (m_bot->CanRewardQuest(pQuest, false))
{
m_bot->RewardQuest(pQuest, 0, questgiver, false);
out << "Quest complete: |cff808080|Hquest:" << questID << ':' << pQuest->GetQuestLevel() << "|h[" << questTitle << "]|h|r";
}
else
{
out << "|cffff0000Unable to turn quest in:|r |cff808080|Hquest:" << questID << ':' << pQuest->GetQuestLevel() << "|h[" << questTitle << "]|h|r";
}
}
// auto reward quest if one item as reward
else if (pQuest->GetRewChoiceItemsCount() == 1)
{
int rewardIdx = 0;
ItemPrototype const *pRewardItem = sObjectMgr.GetItemPrototype(pQuest->RewChoiceItemId[rewardIdx]);
std::string itemName = pRewardItem->Name1;
ItemLocalization(itemName, pRewardItem->ItemId);
if (m_bot->CanRewardQuest(pQuest, rewardIdx, false))
{
m_bot->RewardQuest(pQuest, rewardIdx, questgiver, true);
std::string itemName = pRewardItem->Name1;
ItemLocalization(itemName, pRewardItem->ItemId);
out << "Quest complete: "
<< " |cff808080|Hquest:" << questID << ':' << pQuest->GetQuestLevel()
<< "|h[" << questTitle << "]|h|r reward: |cffffffff|Hitem:"
<< pRewardItem->ItemId << ":0:0:0:0:0:0:0" << "|h[" << itemName << "]|h|r";
}
else
{
out << "|cffff0000Unable to turn quest in:|r "
<< "|cff808080|Hquest:" << questID << ':'
<< pQuest->GetQuestLevel() << "|h[" << questTitle << "]|h|r"
<< " reward: |cffffffff|Hitem:"
<< pRewardItem->ItemId << ":0:0:0:0:0:0:0" << "|h[" << itemName << "]|h|r";
}
}
// else multiple rewards - let master pick
else {
out << "What reward should I take for |cff808080|Hquest:" << questID << ':' << pQuest->GetQuestLevel()
<< "|h[" << questTitle << "]|h|r? ";
for (uint8 i=0; i < pQuest->GetRewChoiceItemsCount(); ++i)
{
ItemPrototype const * const pRewardItem = sObjectMgr.GetItemPrototype(pQuest->RewChoiceItemId[i]);
std::string itemName = pRewardItem->Name1;
ItemLocalization(itemName, pRewardItem->ItemId);
out << "|cffffffff|Hitem:" << pRewardItem->ItemId << ":0:0:0:0:0:0:0" << "|h[" << itemName << "]|h|r";
}
}
}
}
else if (status == QUEST_STATUS_INCOMPLETE) {
out << "|cffff0000Quest incomplete:|r "
<< " |cff808080|Hquest:" << questID << ':' << pQuest->GetQuestLevel() << "|h[" << questTitle << "]|h|r";
}
else if (status == QUEST_STATUS_AVAILABLE){
out << "|cff00ff00Quest available:|r "
<< " |cff808080|Hquest:" << questID << ':' << pQuest->GetQuestLevel() << "|h[" << questTitle << "]|h|r";
}
if (! out.str().empty())
TellMaster(out.str());
}
}
}
bool PlayerbotAI::IsInCombat()
{
bool inCombat = false;
inCombat |= m_bot->isInCombat();
inCombat |= GetMaster()->isInCombat();
if( m_bot->GetGroup() )
{
GroupReference *ref = m_bot->GetGroup()->GetFirstMember();
while( ref )
{
inCombat |= ref->getSource()->isInCombat();
ref = ref->next();
}
}
return inCombat;
}
void PlayerbotAI::UpdateAttackerInfo()
{
// clear old list
m_attackerInfo.clear();
// check own attackers
HostileReference *ref = m_bot->getHostileRefManager().getFirst();
while( ref )
{
ThreatManager *target = ref->getSource();
uint64 guid = target->getOwner()->GetGUID();
m_attackerInfo[guid].attacker = target->getOwner();
m_attackerInfo[guid].victim = target->getOwner()->getVictim();
m_attackerInfo[guid].threat = target->getThreat( m_bot );
m_attackerInfo[guid].count = 1;
m_attackerInfo[guid].source = 1;
ref = ref->next();
}
// check master's attackers
ref = GetMaster()->getHostileRefManager().getFirst();
while( ref )
{
ThreatManager *target = ref->getSource();
uint64 guid = target->getOwner()->GetGUID();
if( m_attackerInfo.find( guid ) == m_attackerInfo.end() )
{
m_attackerInfo[guid].attacker = target->getOwner();
m_attackerInfo[guid].victim = target->getOwner()->getVictim();
m_attackerInfo[guid].count = 0;
m_attackerInfo[guid].source = 2;
}
m_attackerInfo[guid].threat = target->getThreat( m_bot );
m_attackerInfo[guid].count++;
ref = ref->next();
}
// check all group members now
if( m_bot->GetGroup() )
{
GroupReference *gref = m_bot->GetGroup()->GetFirstMember();
while( gref )
{
if( gref->getSource() == m_bot || gref->getSource() == GetMaster() )
{
gref = gref->next();
continue;
}
ref = gref->getSource()->getHostileRefManager().getFirst();
while( ref )
{
ThreatManager *target = ref->getSource();
uint64 guid = target->getOwner()->GetGUID();
if( m_attackerInfo.find( guid ) == m_attackerInfo.end() )
{
m_attackerInfo[guid].attacker = target->getOwner();
m_attackerInfo[guid].victim = target->getOwner()->getVictim();
m_attackerInfo[guid].count = 0;
m_attackerInfo[guid].source = 3;
}
m_attackerInfo[guid].threat = target->getThreat( m_bot );
m_attackerInfo[guid].count++;
ref = ref->next();
}
gref = gref->next();
}
}
// get highest threat not caused by bot for every entry in AttackerInfoList...
for( AttackerInfoList::iterator itr=m_attackerInfo.begin(); itr!=m_attackerInfo.end(); ++itr )
{
if( !itr->second.attacker )
continue;
Unit *a = itr->second.attacker;
float t = 0.00;
std::list<HostileReference*>::const_iterator i=a->getThreatManager().getThreatList().begin();
for( ; i!=a->getThreatManager().getThreatList().end(); ++i )
{
if( (*i)->getThreat() > t && (*i)->getTarget() != m_bot )
t = (*i)->getThreat();
}
m_attackerInfo[itr->first].threat2 = t;
}
// DEBUG: output attacker info
//sLog.outBasic( "[PlayerbotAI]: %s m_attackerInfo = {", m_bot->GetName() );
//for( AttackerInfoList::iterator i=m_attackerInfo.begin(); i!=m_attackerInfo.end(); ++i )
// sLog.outBasic( "[PlayerbotAI]: [%016I64X] { %08X, %08X, %.2f, %.2f, %d, %d }",
// i->first,
// (i->second.attacker?i->second.attacker->GetGUIDLow():0),
// (i->second.victim?i->second.victim->GetGUIDLow():0),
// i->second.threat,
// i->second.threat2,
// i->second.count,
// i->second.source );
//sLog.outBasic( "[PlayerbotAI]: };" );
}
uint32 PlayerbotAI::EstRepairAll()
{
uint32 TotalCost = 0;
// equipped, backpack, bags itself
for(int i = EQUIPMENT_SLOT_START; i < INVENTORY_SLOT_ITEM_END; ++i)
TotalCost += EstRepair(( (INVENTORY_SLOT_BAG_0 << 8) | i ));
// bank, buyback and keys not repaired
// items in inventory bags
for(int j = INVENTORY_SLOT_BAG_START; j < INVENTORY_SLOT_BAG_END; ++j)
for(int i = 0; i < MAX_BAG_SIZE; ++i)
TotalCost += EstRepair(( (j << 8) | i ));
return TotalCost;
}
uint32 PlayerbotAI::EstRepair(uint16 pos)
{
Item* item = m_bot->GetItemByPos(pos);
uint32 TotalCost = 0;
if(!item)
return TotalCost;
uint32 maxDurability = item->GetUInt32Value(ITEM_FIELD_MAXDURABILITY);
if(!maxDurability)
return TotalCost;
uint32 curDurability = item->GetUInt32Value(ITEM_FIELD_DURABILITY);
uint32 LostDurability = maxDurability - curDurability;
if(LostDurability>0)
{
ItemPrototype const *ditemProto = item->GetProto();
DurabilityCostsEntry const *dcost = sDurabilityCostsStore.LookupEntry(ditemProto->ItemLevel);
if(!dcost)
{
sLog.outError("RepairDurability: Wrong item lvl %u", ditemProto->ItemLevel);
return TotalCost;
}
uint32 dQualitymodEntryId = (ditemProto->Quality+1)*2;
DurabilityQualityEntry const *dQualitymodEntry = sDurabilityQualityStore.LookupEntry(dQualitymodEntryId);
if(!dQualitymodEntry)
{
sLog.outError("RepairDurability: Wrong dQualityModEntry %u", dQualitymodEntryId);
return TotalCost;
}
uint32 dmultiplier = dcost->multiplier[ItemSubClassToDurabilityMultiplierId(ditemProto->Class,ditemProto->SubClass)];
uint32 costs = uint32(LostDurability*dmultiplier*double(dQualitymodEntry->quality_mod));
if (costs==0) //fix for ITEM_QUALITY_ARTIFACT
costs = 1;
TotalCost = costs;
}
return TotalCost;
}
Unit *PlayerbotAI::FindAttacker( ATTACKERINFOTYPE ait, Unit *victim )
{
// list empty? why are we here?
if( m_attackerInfo.empty() )
return 0;
// not searching something specific - return first in list
if( !ait )
return (m_attackerInfo.begin())->second.attacker;
float t = ( (ait & AIT_HIGHESTTHREAT) ? 0.00 : 9999.00 );
Unit *a = 0;
AttackerInfoList::iterator itr = m_attackerInfo.begin();
for( ; itr != m_attackerInfo.end(); ++itr )
{
if( (ait & AIT_VICTIMSELF) && !(ait & AIT_VICTIMNOTSELF) && itr->second.victim != m_bot )
continue;
if( !(ait & AIT_VICTIMSELF) && (ait & AIT_VICTIMNOTSELF) && itr->second.victim == m_bot )
continue;
if( (ait & AIT_VICTIMNOTSELF) && victim && itr->second.victim != victim )
continue;
if( !(ait & (AIT_LOWESTTHREAT|AIT_HIGHESTTHREAT)) )
{
a = itr->second.attacker;
itr = m_attackerInfo.end();
}
else
{
if( (ait & AIT_HIGHESTTHREAT) && /*(itr->second.victim==m_bot) &&*/ itr->second.threat>=t )
{
t = itr->second.threat;
a = itr->second.attacker;
}
else if( (ait & AIT_LOWESTTHREAT) && /*(itr->second.victim==m_bot) &&*/ itr->second.threat<=t )
{
t = itr->second.threat;
a = itr->second.attacker;
}
}
}
return a;
}
void PlayerbotAI::SetCombatOrderByStr( std::string str, Unit *target ) {
CombatOrderType co;
if( str == "tank" ) co = ORDERS_TANK;
else if( str == "assist" ) co = ORDERS_ASSIST;
else if( str == "heal" ) co = ORDERS_HEAL;
else if( str == "protect" ) co = ORDERS_PROTECT;
else co = ORDERS_RESET;
SetCombatOrder( co, target );
}
void PlayerbotAI::SetCombatOrder( CombatOrderType co, Unit *target ) {
if( (co == ORDERS_ASSIST || co == ORDERS_PROTECT) && !target )
return;
if( co == ORDERS_RESET ) {
m_combatOrder = ORDERS_NONE;
m_targetAssist = 0;
m_targetProtect = 0;
return;
}
if( co == ORDERS_PROTECT )
m_targetProtect = target;
else if( co == ORDERS_ASSIST )
m_targetAssist = target;
if( (co&ORDERS_PRIMARY) )
m_combatOrder = (CombatOrderType)(((uint32)m_combatOrder&(uint32)ORDERS_SECONDARY)|(uint32)co);
else
m_combatOrder = (CombatOrderType)(((uint32)m_combatOrder&(uint32)ORDERS_PRIMARY)|(uint32)co);
}
void PlayerbotAI::SetMovementOrder( MovementOrderType mo, Unit *followTarget ) {
m_movementOrder = mo;
m_followTarget = followTarget;
MovementReset();
}
void PlayerbotAI::MovementReset() {
// stop moving...
MovementClear();
if( m_movementOrder == MOVEMENT_FOLLOW )
{
if( !m_followTarget ) return;
// target player is teleporting...
if( m_followTarget->GetTypeId()==TYPEID_PLAYER && ((Player*)m_followTarget)->IsBeingTeleported() )
return;
// check if bot needs to teleport to reach target...
if( !m_bot->isInCombat() )
{
if( m_followTarget->GetTypeId()==TYPEID_PLAYER && ((Player*)m_followTarget)->GetCorpse() )
{
if( !FollowCheckTeleport( *((Player*)m_followTarget)->GetCorpse() ) ) return;
}
else
{
if( !FollowCheckTeleport( *m_followTarget ) ) return;
}
}
if( m_bot->isAlive() )
{
float angle = rand_float(0, M_PI_F);
float dist = rand_float( m_mgr->m_confFollowDistance[0], m_mgr->m_confFollowDistance[1] );
m_bot->GetMotionMaster()->MoveFollow( m_followTarget, dist, angle );
}
}
}
void PlayerbotAI::MovementUpdate()
{
// send heartbeats to world
WorldPacket data;
m_bot->BuildHeartBeatMsg( &data );
m_bot->SendMessageToSet( &data, false );
// call set position (updates states, exploration, etc.)
m_bot->SetPosition( m_bot->GetPositionX(), m_bot->GetPositionY(), m_bot->GetPositionZ(), m_bot->GetOrientation(), false );
}
void PlayerbotAI::MovementClear()
{
// stop...
m_bot->GetMotionMaster()->Clear( true );
m_bot->clearUnitState( UNIT_STAT_CHASE );
m_bot->clearUnitState( UNIT_STAT_FOLLOW );
// stand up...
if (!m_bot->IsStandState())
m_bot->SetStandState(UNIT_STAND_STATE_STAND);
}
bool PlayerbotAI::IsMoving()
{
return (m_bot->GetMotionMaster()->GetCurrentMovementGeneratorType() == IDLE_MOTION_TYPE ? false : true);
}
void PlayerbotAI::SetInFront( const Unit* obj )
{
// removed SendUpdateToPlayer (is not updating movement/orientation)
if( !m_bot->HasInArc( M_PI_F, obj ) )
m_bot->SetInFront( obj );
}
// some possible things to use in AI
//GetRandomContactPoint
//GetPower, GetMaxPower
// HasSpellCooldown
// IsAffectedBySpellmod
// isMoving
// hasUnitState(FLAG) FLAG like: UNIT_STAT_ROOT, UNIT_STAT_CONFUSED, UNIT_STAT_STUNNED
// hasAuraType
void PlayerbotAI::UpdateAI(const uint32 p_time)
{
if (m_bot->IsBeingTeleported() || m_bot->GetTrader())
return;
time_t currentTime = time(0);
if (currentTime < m_ignoreAIUpdatesUntilTime)
return;
// default updates occur every two seconds
m_ignoreAIUpdatesUntilTime = time(0) + 2;
// send heartbeat
MovementUpdate();
if( !m_bot->isAlive() )
{
if( m_botState != BOTSTATE_DEAD && m_botState != BOTSTATE_DEADRELEASED )
{
//sLog.outDebug( "[PlayerbotAI]: %s died and is not in correct state...", m_bot->GetName() );
// clear loot list on death
m_lootCreature.clear();
m_lootCurrent = 0;
// clear combat orders
m_bot->SetSelection(0);
m_bot->GetMotionMaster()->Clear(true);
// set state to dead
SetState( BOTSTATE_DEAD );
// wait 30sec
m_ignoreAIUpdatesUntilTime = time(0) + 30;
}
else if( m_botState == BOTSTATE_DEAD )
{
// become ghost
if( m_bot->GetCorpse() ){
//sLog.outDebug( "[PlayerbotAI]: %s already has a corpse...", m_bot->GetName() );
SetState( BOTSTATE_DEADRELEASED );
return;
}
m_bot->SetBotDeathTimer();
m_bot->BuildPlayerRepop();
// relocate ghost
WorldLocation loc;
Corpse *corpse = m_bot->GetCorpse();
corpse->GetPosition( loc );
m_bot->TeleportTo( loc.mapid, loc.coord_x, loc.coord_y, loc.coord_z, m_bot->GetOrientation() );
// set state to released
SetState( BOTSTATE_DEADRELEASED );
}
else if( m_botState == BOTSTATE_DEADRELEASED )
{
// get bot's corpse
Corpse *corpse = m_bot->GetCorpse();
if( !corpse )
{
//sLog.outDebug( "[PlayerbotAI]: %s has no corpse!", m_bot->GetName() );
return;
}
// teleport ghost from graveyard to corpse
//sLog.outDebug( "[PlayerbotAI]: Teleport %s to corpse...", m_bot->GetName() );
FollowCheckTeleport( *corpse );
// check if we are allowed to resurrect now
if( corpse->GetGhostTime() + m_bot->GetCorpseReclaimDelay( corpse->GetType()==CORPSE_RESURRECTABLE_PVP ) > time(0) )
{
m_ignoreAIUpdatesUntilTime = corpse->GetGhostTime() + m_bot->GetCorpseReclaimDelay( corpse->GetType()==CORPSE_RESURRECTABLE_PVP );
//sLog.outDebug( "[PlayerbotAI]: %s has to wait for %d seconds to revive...", m_bot->GetName(), m_ignoreAIUpdatesUntilTime-time(0) );
return;
}
// resurrect now
//sLog.outDebug( "[PlayerbotAI]: Reviving %s to corpse...", m_bot->GetName() );
m_ignoreAIUpdatesUntilTime = time(0) + 6;
PlayerbotChatHandler ch(GetMaster());
if (! ch.revive(*m_bot))
{
ch.sysmessage(".. could not be revived ..");
return;
}
// set back to normal
SetState( BOTSTATE_NORMAL );
}
}
else
{
// if we are casting a spell then interrupt it
// make sure any actions that cast a spell set a proper m_ignoreAIUpdatesUntilTime!
Spell* const pSpell = GetCurrentSpell();
if (pSpell && !(pSpell->IsChannelActive() || pSpell->IsAutoRepeat()))
InterruptCurrentCastingSpell();
// direct cast command from master
else if (m_spellIdCommand != 0)
{
Unit* pTarget = ObjectAccessor::GetUnit(*m_bot, m_targetGuidCommand);
if (pTarget != NULL)
CastSpell(m_spellIdCommand, *pTarget);
m_spellIdCommand = 0;
m_targetGuidCommand = 0;
}
// handle combat (either self/master/group in combat, or combat state and valid target)
else if ( IsInCombat() || (m_botState == BOTSTATE_COMBAT && m_targetCombat) )
DoNextCombatManeuver();
// bot was in combat recently - loot now
else if (m_botState == BOTSTATE_COMBAT)
{
SetState( BOTSTATE_LOOTING );
m_attackerInfo.clear();
}
else if (m_botState == BOTSTATE_LOOTING)
DoLoot();
/*
// are we sitting, if so feast if possible
else if (m_bot->getStandState() == UNIT_STAND_STATE_SIT)
Feast();
*/
// if commanded to follow master and not already following master then follow master
else if (!m_bot->isInCombat() && !IsMoving() )
MovementReset();
// do class specific non combat actions
else if (GetClassAI())
(GetClassAI())->DoNonCombatActions();
}
}
Spell* PlayerbotAI::GetCurrentSpell() const
{
if (m_CurrentlyCastingSpellId == 0)
return NULL;
Spell* const pSpell = m_bot->FindCurrentSpellBySpellId(m_CurrentlyCastingSpellId);
return pSpell;
}
void PlayerbotAI::TellMaster(const std::string& text) const
{
SendWhisper(text, *GetMaster());
}
void PlayerbotAI::TellMaster( const char *fmt, ... ) const
{
char temp_buf[1024];
va_list ap;
va_start( ap, fmt );
size_t temp_len = vsnprintf( temp_buf, 1024, fmt, ap );
va_end( ap );
std::string str = temp_buf;
TellMaster( str );
}
void PlayerbotAI::SendWhisper(const std::string& text, Player& player) const
{
WorldPacket data(SMSG_MESSAGECHAT, 200);
m_bot->BuildPlayerChat(&data, CHAT_MSG_WHISPER, text, LANG_UNIVERSAL);
player.GetSession()->SendPacket(&data);
}
bool PlayerbotAI::canObeyCommandFrom(const Player& player) const
{
return player.GetSession()->GetAccountId() == GetMaster()->GetSession()->GetAccountId();
}
bool PlayerbotAI::CastSpell(const char* args)
{
uint32 spellId = getSpellId(args);
return (spellId) ? CastSpell(spellId) : false;
}
bool PlayerbotAI::CastSpell(uint32 spellId, Unit& target)
{
uint64 oldSel = m_bot->GetSelection();
m_bot->SetSelection(target.GetGUID());
bool rv = CastSpell(spellId);
m_bot->SetSelection(oldSel);
return rv;
}
bool PlayerbotAI::CastSpell(uint32 spellId)
{
// some AIs don't check if the bot doesn't have spell before using it
// so just return false when this happens
if (spellId == 0)
return false;
// check spell cooldown
if( m_bot->HasSpellCooldown( spellId ) )
return false;
// see Creature.cpp 1738 for reference
// don't allow bot to cast damage spells on friends
const SpellEntry* const pSpellInfo = sSpellStore.LookupEntry(spellId);
if (!pSpellInfo)
{
TellMaster("missing spell entry in CastSpell.");
return false;
}
// set target
uint64 targetGUID = m_bot->GetSelection();
Unit* pTarget = ObjectAccessor::GetUnit(*m_bot, m_bot->GetSelection());
if (IsPositiveSpell(spellId))
{
if (pTarget && !m_bot->IsFriendlyTo(pTarget))
pTarget = m_bot;
}
else
{
if (pTarget && m_bot->IsFriendlyTo(pTarget))
return false;
// search for Creature::reachWithSpellAttack to also see some examples of spell distance usage
if (!m_bot->isInFrontInMap(pTarget, 10))
{
m_bot->SetInFront(pTarget);
MovementUpdate();
}
}
if (HasAura(spellId, *pTarget))
return false;
// stop movement to prevent cancel spell casting
MovementClear();
// actually cast spell
m_bot->CastSpell(pTarget, pSpellInfo, false);
Spell* const pSpell = m_bot->FindCurrentSpellBySpellId(spellId);
if (!pSpell)
return false;
m_CurrentlyCastingSpellId = spellId;
m_ignoreAIUpdatesUntilTime = time(0) + (int32)((float)pSpell->GetCastTime()/1000.0f) + 1;
// if this caused the caster to move (blink) update the position
// I think this is normally done on the client
// this should be done on spell success
/*
if (name == "Blink") {
float x,y,z;
m_bot->GetPosition(x,y,z);
m_bot->GetNearPoint(m_bot, x, y, z, 1, 5, 0);
m_bot->Relocate(x,y,z);
WorldPacket data;
m_bot->BuildHeartBeatMsg(&data);
m_bot->SendMessageToSet(&data,true);
}
*/
return true;
}
Item* PlayerbotAI::FindItem(uint32 ItemId)
{
// list out items in main backpack
//INVENTORY_SLOT_ITEM_START = 23
//INVENTORY_SLOT_ITEM_END = 39
for (uint8 slot = INVENTORY_SLOT_ITEM_START; slot < INVENTORY_SLOT_ITEM_END; slot++)
{
// sLog.outDebug("[%s's]backpack slot = %u",m_bot->GetName(),slot); // 23 to 38 = 16
Item* const pItem = m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, slot); // 255, 23 to 38
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto )
continue;
if( pItemProto->ItemId == ItemId) // have required item
return pItem;
}
}
// list out items in other removable backpacks
//INVENTORY_SLOT_BAG_START = 19
//INVENTORY_SLOT_BAG_END = 23
for (uint8 bag = INVENTORY_SLOT_BAG_START; bag < INVENTORY_SLOT_BAG_END; ++bag) // 20 to 23 = 4
{
const Bag* const pBag = (Bag*) m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, bag); // 255, 20 to 23
if (pBag)
{
for (uint8 slot = 0; slot < pBag->GetBagSize(); ++slot)
{
sLog.outDebug("[%s's]bag[%u] slot = %u",m_bot->GetName(),bag,slot); // 1 to bagsize = ?
Item* const pItem = m_bot->GetItemByPos(bag, slot); // 20 to 23, 1 to bagsize
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto )
continue;
if( pItemProto->ItemId == ItemId ) // have required item
return pItem;
}
}
}
}
return NULL;
}
bool PlayerbotAI::HasPick()
{
QueryResult *result;
// list out equiped items
for( uint8 slot = EQUIPMENT_SLOT_START; slot < EQUIPMENT_SLOT_END; slot++)
{
Item* const pItem = m_bot->GetItemByPos( INVENTORY_SLOT_BAG_0, slot );
if (pItem )
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto )
continue;
result = WorldDatabase.PQuery("SELECT TotemCategory FROM item_template WHERE entry = '%i'", pItemProto->ItemId);
if (result)
{
Field *fields = result->Fetch();
uint32 tc = fields[0].GetUInt32();
// sLog.outDebug("HasPick %u",tc);
if(tc == 165 || tc == 167) // pick = 165, hammer = 162 or hammer pick = 167
return true;
}
}
}
// list out items in backpack
for (uint8 slot = INVENTORY_SLOT_ITEM_START; slot < INVENTORY_SLOT_ITEM_END; slot++)
{
// sLog.outDebug("[%s's]backpack slot = %u",m_bot->GetName(),slot); // 23 to 38 = 16
Item* const pItem = m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, slot); // 255, 23 to 38
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto )
continue;
result = WorldDatabase.PQuery("SELECT TotemCategory FROM item_template WHERE entry = '%i'", pItemProto->ItemId);
if (result)
{
Field *fields = result->Fetch();
uint32 tc = fields[0].GetUInt32();
// sLog.outDebug("HasPick %u",tc);
if(tc == 165 || tc == 167) // pick = 165, hammer = 162 or hammer pick = 167
return true;
}
}
}
// list out items in other removable backpacks
for (uint8 bag = INVENTORY_SLOT_BAG_START; bag < INVENTORY_SLOT_BAG_END; ++bag) // 20 to 23 = 4
{
const Bag* const pBag = (Bag*) m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, bag); // 255, 20 to 23
if (pBag)
{
for (uint8 slot = 0; slot < pBag->GetBagSize(); ++slot)
{
// sLog.outDebug("[%s's]bag[%u] slot = %u",m_bot->GetName(),bag,slot); // 1 to bagsize = ?
Item* const pItem = m_bot->GetItemByPos(bag, slot); // 20 to 23, 1 to bagsize
if (pItem)
{
const ItemPrototype* const pItemProto = pItem->GetProto();
if (!pItemProto )
continue;
result = WorldDatabase.PQuery("SELECT TotemCategory FROM item_template WHERE entry = '%i'", pItemProto->ItemId);
if (result)
{
Field *fields = result->Fetch();
uint32 tc = fields[0].GetUInt32();
// sLog.outDebug("HasPick %u",tc);
if(tc == 165 || tc == 167)
return true;
}
}
}
}
}
std::ostringstream out;
out << "|cffffffffI do not have a pick!";
TellMaster( out.str().c_str() );
return false;
}
// extracts all item ids in format below
// I decided to roll my own extractor rather then use the one in ChatHandler
// because this one works on a const string, and it handles multiple links
// |color|linkType:key:something1:...:somethingN|h[name]|h|r
void PlayerbotAI::extractItemIds(const std::string& text, std::list<uint32>& itemIds) const
{
uint8 pos = 0;
while (true)
{
int i = text.find("Hitem:", pos);
if (i == -1)
break;
pos = i + 6;
int endPos = text.find(':', pos);
if (endPos == -1)
break;
std::string idC = text.substr(pos, endPos - pos);
uint32 id = atol(idC.c_str());
pos = endPos;
if (id)
itemIds.push_back(id);
}
}
bool PlayerbotAI::extractGOinfo(const std::string& text, uint32 &guid, uint32 &entry, int &mapid, float &x, float &y, float &z) const
{
// Link format
// |cFFFFFF00|Hfound:" << guid << ':' << entry << ':' << x << ':' << y << ':' << z << ':' << mapid << ':' << "|h[" << gInfo->name << "]|h|r";
// |cFFFFFF00|Hfound:5093:1731:-9295:-270:81.874:0:|h[Copper Vein]|h|r
uint8 pos = 0;
// extract GO guid
int i = text.find("Hfound:", pos); // base H = 11
if (i == -1) // break if error
return false;
pos = i + 7; //start of window in text 11 + 7 = 18
int endPos = text.find(':', pos); // end of window in text 22
if (endPos == -1) //break if error
return false;
std::string guidC = text.substr(pos, endPos - pos); // get string within window i.e guid 22 - 18 = 4
guid = atol(guidC.c_str()); // convert ascii to long int
// extract GO entry
pos = endPos + 1;
endPos = text.find(':', pos); // end of window in text
if (endPos == -1) //break if error
return false;
std::string entryC = text.substr(pos, endPos - pos); // get string within window i.e entry
entry = atol(entryC.c_str()); // convert ascii to float
// extract GO x
pos = endPos + 1;
endPos = text.find(':', pos); // end of window in text
if (endPos == -1) //break if error
return false;
std::string xC = text.substr(pos, endPos - pos); // get string within window i.e x
x = atof(xC.c_str()); // convert ascii to float
// extract GO y
pos = endPos + 1;
endPos = text.find(':', pos); // end of window in text
if (endPos == -1) //break if error
return false;
std::string yC = text.substr(pos, endPos - pos); // get string within window i.e y
y = atof(yC.c_str()); // convert ascii to float
// extract GO z
pos = endPos + 1;
endPos = text.find(':', pos); // end of window in text
if (endPos == -1) //break if error
return false;
std::string zC = text.substr(pos, endPos - pos); // get string within window i.e z
z = atof(zC.c_str()); // convert ascii to float
//extract GO mapid
pos = endPos + 1;
endPos = text.find(':', pos); // end of window in text
if (endPos == -1) //break if error
return false;
std::string mapidC = text.substr(pos, endPos - pos); // get string within window i.e mapid
mapid = atoi(mapidC.c_str()); // convert ascii to int
pos = endPos; // end
return true;
}
// extracts currency in #g#s#c format
uint32 PlayerbotAI::extractMoney(const std::string& text) const
{
// if user specified money in ##g##s##c format
std::string acum = "";
uint32 copper = 0;
for (uint8 i = 0; i < text.length(); i++)
{
if (text[i] == 'g')
{
copper += (atol(acum.c_str()) * 100 * 100);
acum = "";
}
else if (text[i] == 'c')
{
copper += atol(acum.c_str());
acum = "";
}
else if (text[i] == 's')
{
copper += (atol(acum.c_str()) * 100);
acum = "";
}
else if (text[i] == ' ')
break;
else if (text[i] >= 48 && text[i] <= 57)
acum += text[i];
else
{
copper = 0;
break;
}
}
return copper;
}
// finds items in equipment and adds Item* to foundItemList
// also removes found item IDs from itemIdSearchList when found
void PlayerbotAI::findItemsInEquip(std::list<uint32>& itemIdSearchList, std::list<Item*>& foundItemList) const
{
for( uint8 slot=EQUIPMENT_SLOT_START; itemIdSearchList.size()>0 && slot<EQUIPMENT_SLOT_END; slot++ ) {
Item* const pItem = m_bot->GetItemByPos( INVENTORY_SLOT_BAG_0, slot );
if( !pItem )
continue;
for (std::list<uint32>::iterator it = itemIdSearchList.begin(); it != itemIdSearchList.end(); ++it)
{
if (pItem->GetProto()->ItemId != *it)
continue;
foundItemList.push_back(pItem);
itemIdSearchList.erase(it);
break;
}
}
}
// finds items in inventory and adds Item* to foundItemList
// also removes found item IDs from itemIdSearchList when found
void PlayerbotAI::findItemsInInv(std::list<uint32>& itemIdSearchList, std::list<Item*>& foundItemList) const
{
// look for items in main bag
for (uint8 slot = INVENTORY_SLOT_ITEM_START; itemIdSearchList.size() > 0 && slot < INVENTORY_SLOT_ITEM_END; ++slot)
{
Item* const pItem = m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, slot);
if (!pItem)
continue;
for (std::list<uint32>::iterator it = itemIdSearchList.begin(); it != itemIdSearchList.end(); ++it)
{
if (pItem->GetProto()->ItemId != *it)
continue;
foundItemList.push_back(pItem);
itemIdSearchList.erase(it);
break;
}
}
// for all for items in other bags
for (uint8 bag = INVENTORY_SLOT_BAG_START; itemIdSearchList.size() > 0 && bag < INVENTORY_SLOT_BAG_END; ++bag)
{
Bag* const pBag = (Bag*) m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, bag);
if (!pBag)
continue;
for (uint8 slot = 0; itemIdSearchList.size() > 0 && slot < pBag->GetBagSize(); ++slot)
{
Item* const pItem = m_bot->GetItemByPos(bag, slot);
if (!pItem)
continue;
for (std::list<uint32>::iterator it = itemIdSearchList.begin(); it != itemIdSearchList.end(); ++it)
{
if (pItem->GetProto()->ItemId != *it)
continue;
foundItemList.push_back(pItem);
itemIdSearchList.erase(it);
break;
}
}
}
}
// submits packet to use an item
void PlayerbotAI::UseItem(Item& item)
{
uint8 bagIndex = item.GetBagSlot();
uint8 slot = item.GetSlot();
uint8 cast_count = 1;
uint32 spellid = 0; // only used in combat
uint64 item_guid = item.GetGUID();
uint32 glyphIndex = 0; // ??
uint8 unk_flags = 0; // not 0x02
// create target data
// note other targets are possible but not supported at the moment
// see SpellCastTargets::read in Spell.cpp to see other options
// for setting target
uint32 target = TARGET_FLAG_SELF;
WorldPacket* const packet = new WorldPacket(CMSG_USE_ITEM, 1 + 1 + 1 + 4 + 8 + 4 + 1);
*packet << bagIndex << slot << cast_count << spellid << item_guid
<< glyphIndex << unk_flags << target;
m_bot->GetSession()->QueuePacket(packet); // queue the packet to get around race condition
// certain items cause player to sit (food,drink)
// tell bot to stop following if this is the case
// (doesn't work since we queued the packet!)
// maybe its not needed???
//if (! m_bot->IsStandState())
// m_bot->GetMotionMaster()->Clear();
}
// submits packet to use an item
void PlayerbotAI::EquipItem(Item& item)
{
uint8 bagIndex = item.GetBagSlot();
uint8 slot = item.GetSlot();
WorldPacket* const packet = new WorldPacket(CMSG_AUTOEQUIP_ITEM, 2);
*packet << bagIndex << slot;
m_bot->GetSession()->QueuePacket(packet);
}
// submits packet to trade an item (trade window must already be open)
// default slot is -1 which means trade slots 0 to 5. if slot is set
// to TRADE_SLOT_NONTRADED (which is slot 6) item will be shown in the
// 'Will not be traded' slot.
bool PlayerbotAI::TradeItem(const Item& item, int8 slot)
{
sLog.outDebug( "[PlayerbotAI::TradeItem]: slot=%d, hasTrader=%d, itemInTrade=%d, itemTradeable=%d",
slot,
(m_bot->GetTrader()?1:0),
(item.IsInTrade()?1:0),
(item.CanBeTraded()?1:0)
);
if (!m_bot->GetTrader() || item.IsInTrade() || (!item.CanBeTraded() && slot!=TRADE_SLOT_NONTRADED) )
return false;
int8 tradeSlot = -1;
TradeData* pTrade = m_bot->GetTradeData();
if( (slot>=0 && slot<TRADE_SLOT_COUNT) && pTrade->GetTraderData()->GetItem(TradeSlots(slot)) == NULL )
tradeSlot = slot;
else
{
for( uint8 i=0; i<TRADE_SLOT_TRADED_COUNT && tradeSlot==-1; i++ )
{
if( pTrade->GetTraderData()->GetItem(TradeSlots(i)) == NULL )
tradeSlot = i;
}
}
if( tradeSlot == -1 ) return false;
WorldPacket* const packet = new WorldPacket(CMSG_SET_TRADE_ITEM, 3);
*packet << (uint8) tradeSlot << (uint8) item.GetBagSlot()
<< (uint8) item.GetSlot();
m_bot->GetSession()->QueuePacket(packet);
return true;
}
// submits packet to trade copper (trade window must be open)
bool PlayerbotAI::TradeCopper(uint32 copper)
{
if (copper > 0)
{
WorldPacket* const packet = new WorldPacket(CMSG_SET_TRADE_GOLD, 4);
*packet << copper;
m_bot->GetSession()->QueuePacket(packet);
return true;
}
return false;
}
/*void PlayerbotAI::Stay()
{
m_IsFollowingMaster = false;
m_bot->GetMotionMaster()->Clear(true);
m_bot->HandleEmoteCommand(EMOTE_ONESHOT_SALUTE);
}*/
/*bool PlayerbotAI::Follow(Player& player)
{
if (GetMaster()->IsBeingTeleported())
return false;
m_IsFollowingMaster = true;
if (!m_bot->IsStandState())
m_bot->SetStandState(UNIT_STAND_STATE_STAND);
if (!m_bot->isInCombat())
{
// follow player or his corpse if dead (stops bot from running to graveyard if player repops...)
if( player.GetCorpse() )
{
if( !FollowCheckTeleport( *player.GetCorpse() ) )
return false;
}
else
{
if( !FollowCheckTeleport( player ) )
return false;
}
}
if (m_bot->isAlive())
{
float angle = rand_float(0, M_PI);
float dist = rand_float(0.5f, 1.0f);
m_bot->GetMotionMaster()->Clear(true);
m_bot->GetMotionMaster()->MoveFollow(&player, dist, angle);
return true;
}
return false;
}*/
bool PlayerbotAI::FollowCheckTeleport( WorldObject &obj )
{
// if bot has strayed too far from the master, teleport bot
if (!m_bot->IsWithinDistInMap( &obj, 50, true ) && GetMaster()->isAlive())
{
m_ignoreAIUpdatesUntilTime = time(0) + 6;
PlayerbotChatHandler ch(GetMaster());
if (! ch.teleport(*m_bot))
{
ch.sysmessage(".. could not be teleported ..");
//sLog.outDebug( "[PlayerbotAI]: %s failed to teleport", m_bot->GetName() );
return false;
}
}
return true;
}
void PlayerbotAI::HandleTeleportAck()
{
m_ignoreAIUpdatesUntilTime = time(0) + 6;
m_bot->GetMotionMaster()->Clear(true);
if (m_bot->IsBeingTeleportedNear())
{
WorldPacket p = WorldPacket(MSG_MOVE_TELEPORT_ACK, 8 + 4 + 4);
p.appendPackGUID(m_bot->GetGUID());
p << (uint32) 0; // supposed to be flags? not used currently
p << (uint32) time(0); // time - not currently used
m_bot->GetSession()->HandleMoveTeleportAck(p);
}
else if (m_bot->IsBeingTeleportedFar())
m_bot->GetSession()->HandleMoveWorldportAckOpcode();
}
// Localization support
void PlayerbotAI::ItemLocalization(std::string& itemName, const uint32 itemID) const
{
uint32 loc = GetMaster()->GetSession()->GetSessionDbLocaleIndex();
std::wstring wnamepart;
ItemLocale const *pItemInfo = sObjectMgr.GetItemLocale(itemID);
if (pItemInfo)
{
if (pItemInfo->Name.size() > loc && !pItemInfo->Name[loc].empty())
{
const std::string name = pItemInfo->Name[loc];
if (Utf8FitTo(name, wnamepart))
itemName = name.c_str();
}
}
}
void PlayerbotAI::QuestLocalization(std::string& questTitle, const uint32 questID) const
{
uint32 loc = GetMaster()->GetSession()->GetSessionDbLocaleIndex();
std::wstring wnamepart;
QuestLocale const *pQuestInfo = sObjectMgr.GetQuestLocale(questID);
if (pQuestInfo)
{
if (pQuestInfo->Title.size() > loc && !pQuestInfo->Title[loc].empty())
{
const std::string title = pQuestInfo->Title[loc];
if (Utf8FitTo(title, wnamepart))
questTitle = title.c_str();
}
}
}
// handle commands sent through chat channels
void PlayerbotAI::HandleCommand(const std::string& text, Player& fromPlayer)
{
// ignore any messages from Addons
if (text.empty() ||
text.find("X-Perl") != std::wstring::npos ||
text.find("HealBot") != std::wstring::npos ||
text.find("LOOT_OPENED") != std::wstring::npos ||
text.find("CTRA") != std::wstring::npos)
return;
// if message is not from a player in the masters account auto reply and ignore
if (!canObeyCommandFrom(fromPlayer))
{
std::string msg = "I can't talk to you. Please speak to my master ";
msg += GetMaster()->GetName();
SendWhisper(msg, fromPlayer);
m_bot->HandleEmoteCommand(EMOTE_ONESHOT_NO);
}
// if in the middle of a trade, and player asks for an item/money
else if (m_bot->GetTrader() && m_bot->GetTrader()->GetGUID() == fromPlayer.GetGUID())
{
uint32 copper = extractMoney(text);
if (copper > 0)
TradeCopper(copper);
std::list<uint32> itemIds;
extractItemIds(text, itemIds);
if (itemIds.size() == 0)
SendWhisper("Show me what item you want by shift clicking the item in the chat window.", fromPlayer);
else if( !strncmp( text.c_str(), "nt ", 3 ) )
{
if( itemIds.size() > 1 )
SendWhisper( "There is only one 'Will not be traded' slot. Shift-click just one item, please!", fromPlayer );
else
{
std::list<Item*> itemList;
findItemsInEquip( itemIds, itemList );
findItemsInInv( itemIds, itemList );
if( itemList.size()>0 )
TradeItem( (**itemList.begin()), TRADE_SLOT_NONTRADED );
else
SendWhisper( "I do not have this item equipped or in my bags!", fromPlayer );
}
}
else
{
std::list<Item*> itemList;
findItemsInInv(itemIds, itemList);
for (std::list<Item*>::iterator it = itemList.begin(); it != itemList.end(); ++it)
TradeItem(**it);
}
}
// if we are turning in a quest
else if (text == "reset")
{
SetState( BOTSTATE_NORMAL );
MovementReset();
SetQuestNeedItems();
UpdateAttackerInfo();
m_lootCreature.clear();
m_lootCurrent = 0;
m_targetCombat = 0;
// do we want to reset all states on this command?
// m_combatOrder = ORDERS_NONE;
// m_targetCombat = 0;
// m_targetAssisst = 0;
// m_targetProtect = 0;
}
else if (text == "report")
SendQuestItemList( *GetMaster() );
else if (text == "orders")
SendOrders( *GetMaster() );
else if (text == "follow" || text == "come")
SetMovementOrder( MOVEMENT_FOLLOW, GetMaster() );
else if (text == "stay" || text == "stop")
SetMovementOrder( MOVEMENT_STAY );
else if (text == "attack")
{
uint64 attackOnGuid = fromPlayer.GetSelection();
if (attackOnGuid)
{
Unit* thingToAttack = ObjectAccessor::GetUnit(*m_bot, attackOnGuid);
if (!m_bot->IsFriendlyTo(thingToAttack) && m_bot->IsWithinLOSInMap(thingToAttack))
GetCombatTarget( thingToAttack );
}
else
{
TellMaster("No target is selected.");
m_bot->HandleEmoteCommand(EMOTE_ONESHOT_TALK);
}
}
// handle cast command
else if (text.size() > 2 && text.substr(0, 2) == "c " || text.size() > 5 && text.substr(0, 5) == "cast ")
{
std::string spellStr = text.substr(text.find(" ") + 1);
uint32 spellId = (uint32) atol(spellStr.c_str());
// try and get spell ID by name
if (spellId == 0)
spellId = getSpellId(spellStr.c_str(), true);
uint64 castOnGuid = fromPlayer.GetSelection();
if (spellId != 0 && castOnGuid != 0 && m_bot->HasSpell(spellId))
{
m_spellIdCommand = spellId;
m_targetGuidCommand = castOnGuid;
}
}
// use items
else if (text.size() > 2 && text.substr(0, 2) == "u " || text.size() > 4 && text.substr(0, 4) == "use ")
{
std::list<uint32> itemIds;
std::list<Item*> itemList;
extractItemIds(text, itemIds);
findItemsInInv(itemIds, itemList);
for (std::list<Item*>::iterator it = itemList.begin(); it != itemList.end(); ++it)
UseItem(**it);
}
// equip items
else if (text.size() > 2 && text.substr(0, 2) == "e " || text.size() > 6 && text.substr(0, 6) == "equip ")
{
std::list<uint32> itemIds;
std::list<Item*> itemList;
extractItemIds(text, itemIds);
findItemsInInv(itemIds, itemList);
for (std::list<Item*>::iterator it = itemList.begin(); it != itemList.end(); ++it)
EquipItem(**it);
}
// find item in world
else if (text.size() > 2 && text.substr(0, 2) == "f " || text.size() > 5 && text.substr(0, 5) == "find ")
{
uint32 guid;
float x,y,z;
uint32 entry;
int mapid;
if(extractGOinfo(text, guid, entry, mapid, x, y, z))
{ // sLog.outDebug("find: guid : %u entry : %u x : (%f) y : (%f) z : (%f) mapid : %d",guid, entry, x, y, z, mapid);
m_bot->UpdateGroundPositionZ(x,y,z);
SetMovementOrder( MOVEMENT_STAY );
m_bot->GetMotionMaster()->MovePoint( mapid, x, y, z );
}
else
SendWhisper("I have no info on that object", fromPlayer);
}
// get project: 18:50 03/05/10 rev.3 allows bots to retrieve all lootable & quest items from gameobjects
else if (text.size() > 2 && text.substr(0, 2) == "g " || text.size() > 4 && text.substr(0, 4) == "get ")
{
uint32 guid;
float x,y,z;
uint32 entry;
int mapid;
bool looted = false;
if (extractGOinfo(text, guid, entry, mapid, x, y, z))
{
//sLog.outDebug("find: guid : %u entry : %u x : (%f) y : (%f) z : (%f) mapid : %d",guid, entry, x, y, z, mapid);
m_lootCurrent = MAKE_NEW_GUID(guid, entry, HIGHGUID_GAMEOBJECT);
GameObject *go = m_bot->GetMap()->GetGameObject(m_lootCurrent);
if (!go)
{
m_lootCurrent = 0;
return;
}
if ( !go->isSpawned() )
return;
m_bot->UpdateGroundPositionZ(x,y,z);
m_bot->GetMotionMaster()->MovePoint( mapid, x, y, z );
m_bot->SetPosition(x, y, z, m_bot->GetOrientation());
m_bot->SendLoot( m_lootCurrent, LOOT_CORPSE );
Loot *loot = &go->loot;
uint32 lootNum = loot->GetMaxSlotInLootFor( m_bot );
// sLog.outDebug( "[PlayerbotAI]: GetGOType %u - %s looting: '%s' got %d items", go->GetGoType(), m_bot->GetName(), go->GetGOInfo()->name, loot->GetMaxSlotInLootFor( m_bot ));
if(lootNum == 0) // Handle opening gameobjects that contain no items
{
uint32 lockId = go->GetGOInfo()->GetLockId();
LockEntry const *lockInfo = sLockStore.LookupEntry(lockId);
if(lockInfo)
{
for(int i = 0; i < 8; ++i)
{
uint32 skillId = SkillByLockType(LockType(lockInfo->Index[i]));
if(skillId > 0)
{
if (m_bot->HasSkill(skillId)) // Has skill
{
uint32 reqSkillValue = lockInfo->Skill[i];
uint32 SkillValue = m_bot->GetPureSkillValue(skillId);
if (SkillValue >= reqSkillValue)
{
// sLog.outDebug("[PlayerbotAI]i: skillId : %u SkillValue : %u reqSkillValue : %u",skillId,SkillValue,reqSkillValue);
m_bot->UpdateGatherSkill(skillId, SkillValue, reqSkillValue);
looted = true;
}
}
break;
}
}
}
}
for ( uint32 l=0; l<lootNum; l++ )
{
// sLog.outDebug("[PlayerbotAI]: lootNum : %u",lootNum);
QuestItem *qitem=0, *ffaitem=0, *conditem=0;
LootItem *item = loot->LootItemInSlot( l, m_bot, &qitem, &ffaitem, &conditem );
if ( !item )
continue;
if ( !qitem && item->is_blocked )
{
m_bot->SendLootRelease( m_lootCurrent );
continue;
}
if ( m_needItemList[item->itemid]>0 )
{
ItemPosCountVec dest;
if ( m_bot->CanStoreNewItem( NULL_BAG, NULL_SLOT, dest, item->itemid, item->count ) == EQUIP_ERR_OK )
{
Item * newitem = m_bot->StoreNewItem( dest, item->itemid, true, item->randomPropertyId);
if ( qitem )
{<|fim▁hole|> qitem->is_looted = true;
if ( item->freeforall || loot->GetPlayerQuestItems().size() == 1 )
m_bot->SendNotifyLootItemRemoved( l );
else
loot->NotifyQuestItemRemoved( qitem->index );
}
else
{
if ( ffaitem )
{
ffaitem->is_looted=true;
m_bot->SendNotifyLootItemRemoved( l );
}
else
{
if ( conditem )
conditem->is_looted=true;
loot->NotifyItemRemoved( l );
}
}
if (!item->freeforall)
item->is_looted = true;
--loot->unlootedCount;
m_bot->SendNewItem( newitem, uint32(item->count), false, false, true );
m_bot->GetAchievementMgr().UpdateAchievementCriteria( ACHIEVEMENT_CRITERIA_TYPE_LOOT_ITEM, item->itemid, item->count );
looted = true;
}
continue;
}
uint32 lockId = go->GetGOInfo()->GetLockId();
LockEntry const *lockInfo = sLockStore.LookupEntry(lockId);
if(lockInfo)
{
uint32 skillId = 0;
uint32 reqSkillValue = 0;
for(int i = 0; i < 8; ++i)
{
skillId = SkillByLockType(LockType(lockInfo->Index[i]));
if(skillId > 0)
{
reqSkillValue = lockInfo->Skill[i];
break;
}
}
if (m_bot->HasSkill(skillId) || skillId == SKILL_NONE) // Has skill or skill not required
{
if((skillId == SKILL_MINING) && !HasPick())
continue;
ItemPosCountVec dest;
if ( m_bot->CanStoreNewItem( NULL_BAG, NULL_SLOT, dest, item->itemid, item->count) == EQUIP_ERR_OK )
{
Item* pItem = m_bot->StoreNewItem (dest,item->itemid,true,item->randomPropertyId);
uint32 SkillValue = m_bot->GetPureSkillValue(skillId);
if (SkillValue >= reqSkillValue)
{
m_bot->SendNewItem(pItem, uint32(item->count), false, false, true);
m_bot->UpdateGatherSkill(skillId, SkillValue, reqSkillValue);
--loot->unlootedCount;
looted = true;
}
}
}
}
}
// release loot
if(looted)
m_bot->GetSession()->DoLootRelease( m_lootCurrent );
else
m_bot->RemoveFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_LOOTING);
// sLog.outDebug( "[PlayerbotAI]: %s looted target 0x%08X", m_bot->GetName(), m_lootCurrent );
SetQuestNeedItems();
}
else
SendWhisper("I have no info on that object", fromPlayer);
}
else if (text == "quests")
{
bool hasIncompleteQuests = false;
std::ostringstream incomout;
incomout << "my incomplete quests are:";
bool hasCompleteQuests = false;
std::ostringstream comout;
comout << "my complete quests are:";
for (uint16 slot = 0; slot < MAX_QUEST_LOG_SIZE; ++slot)
{
if(uint32 questId = m_bot->GetQuestSlotQuestId(slot))
{
Quest const* pQuest = sObjectMgr.GetQuestTemplate(questId);
std::string questTitle = pQuest->GetTitle();
m_bot->GetPlayerbotAI()->QuestLocalization(questTitle, questId);
if (m_bot->GetQuestStatus(questId) == QUEST_STATUS_COMPLETE) {
hasCompleteQuests = true;
comout << " |cFFFFFF00|Hquest:" << questId << ':' << pQuest->GetQuestLevel() << "|h[" << questTitle << "]|h|r";
}
else {
hasIncompleteQuests = true;
incomout << " |cFFFFFF00|Hquest:" << questId << ':' << pQuest->GetQuestLevel() << "|h[" << questTitle << "]|h|r";
}
}
}
if (hasCompleteQuests)
SendWhisper(comout.str(), fromPlayer);
if (hasIncompleteQuests)
SendWhisper(incomout.str(), fromPlayer);
if (! hasCompleteQuests && ! hasIncompleteQuests)
SendWhisper("I have no quests!", fromPlayer);
}
// drop a quest
else if (text.size() > 5 && text.substr(0, 5) == "drop ")
{
uint64 oldSelectionGUID = 0;
if (fromPlayer.GetSelection() != m_bot->GetGUID()) {
oldSelectionGUID = m_bot->GetGUID();
fromPlayer.SetSelection(m_bot->GetGUID());
}
PlayerbotChatHandler ch(GetMaster());
if (! ch.dropQuest(text.substr(5).c_str()))
ch.sysmessage("ERROR: could not drop quest");
if (oldSelectionGUID)
fromPlayer.SetSelection(oldSelectionGUID);
}
else if (text == "spells")
{
int loc = GetMaster()->GetSession()->GetSessionDbcLocale();
std::ostringstream posOut;
std::ostringstream negOut;
const std::string ignoreList = ",Opening,Closing,Stuck,Remove Insignia,Opening - No Text,Grovel,Duel,Honorless Target,";
std::string alreadySeenList = ",";
for (PlayerSpellMap::iterator itr = m_bot->GetSpellMap().begin(); itr != m_bot->GetSpellMap().end(); ++itr) {
const uint32 spellId = itr->first;
if (itr->second.state == PLAYERSPELL_REMOVED || itr->second.disabled || IsPassiveSpell(spellId))
continue;
const SpellEntry* const pSpellInfo = sSpellStore.LookupEntry(spellId);
if (!pSpellInfo)
continue;
//|| name.find("Teleport") != -1
std::string comp = ",";
comp.append(pSpellInfo->SpellName[loc]);
comp.append(",");
if (!(ignoreList.find(comp) == std::string::npos && alreadySeenList.find(comp) == std::string::npos))
continue;
alreadySeenList += pSpellInfo->SpellName[loc];
alreadySeenList += ",";
if (IsPositiveSpell(spellId))
posOut << " |cffffffff|Hspell:" << spellId << "|h["
<< pSpellInfo->SpellName[loc] << "]|h|r";
else
negOut << " |cffffffff|Hspell:" << spellId << "|h["
<< pSpellInfo->SpellName[loc] << "]|h|r";
}
ChatHandler ch(&fromPlayer);
SendWhisper("here's my non-attack spells:", fromPlayer);
ch.SendSysMessage(posOut.str().c_str());
SendWhisper("and here's my attack spells:", fromPlayer);
ch.SendSysMessage(negOut.str().c_str());
}
// survey project: 18:30 29/04/10 rev.3 filter out event triggered objects & now updates list
else if (text == "survey")
{
uint32 count = 0;
std::ostringstream detectout;
QueryResult *result;
GameEventMgr::ActiveEvents const& activeEventsList = sGameEventMgr.GetActiveEventList();
std::ostringstream eventFilter;
eventFilter << " AND (event IS NULL ";
bool initString = true;
for (GameEventMgr::ActiveEvents::const_iterator itr = activeEventsList.begin(); itr != activeEventsList.end(); ++itr)
{
if (initString)
{
eventFilter << "OR event IN (" <<*itr;
initString =false;
}
else
eventFilter << "," << *itr;
}
if (!initString)
eventFilter << "))";
else
eventFilter << ")";
result = WorldDatabase.PQuery("SELECT gameobject.guid, id, position_x, position_y, position_z, map, "
"(POW(position_x - %f, 2) + POW(position_y - %f, 2) + POW(position_z - %f, 2)) AS order_ FROM gameobject "
"LEFT OUTER JOIN game_event_gameobject on gameobject.guid=game_event_gameobject.guid WHERE map = '%i' %s ORDER BY order_ ASC LIMIT 10",
m_bot->GetPositionX(), m_bot->GetPositionY(), m_bot->GetPositionZ(), m_bot->GetMapId(),eventFilter.str().c_str());
if (result)
{
do
{
Field *fields = result->Fetch();
uint32 guid = fields[0].GetUInt32();
uint32 entry = fields[1].GetUInt32();
float x = fields[2].GetFloat();
float y = fields[3].GetFloat();
float z = fields[4].GetFloat();
int mapid = fields[5].GetUInt16();
GameObject *go = m_bot->GetMap()->GetGameObject(MAKE_NEW_GUID(guid, entry, HIGHGUID_GAMEOBJECT));
if (!go)
continue;
if ( !go->isSpawned() )
continue;
detectout << "|cFFFFFF00|Hfound:" << guid << ":" << entry << ":" << x << ":" << y << ":" << z << ":" << mapid << ":" << "|h[" << go->GetGOInfo()->name << "]|h|r";
++count;
} while (result->NextRow());
delete result;
}
SendWhisper(detectout.str().c_str(), fromPlayer);
}
// stats project: 10:00 19/04/10 rev.1 display bot statistics
else if (text == "stats")
{
std::ostringstream out;
uint32 totalused = 0;
// list out items in main backpack
for (uint8 slot = INVENTORY_SLOT_ITEM_START; slot < INVENTORY_SLOT_ITEM_END; slot++)
{
const Item* const pItem = m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, slot);
if (pItem)
totalused++;
}
uint32 totalfree = 16 - totalused;
// list out items in other removable backpacks
for (uint8 bag = INVENTORY_SLOT_BAG_START; bag < INVENTORY_SLOT_BAG_END; ++bag)
{
const Bag* const pBag = (Bag*) m_bot->GetItemByPos(INVENTORY_SLOT_BAG_0, bag);
if (pBag)
{
ItemPrototype const* pBagProto = pBag->GetProto();
if (pBagProto->Class == ITEM_CLASS_CONTAINER && pBagProto->SubClass == ITEM_SUBCLASS_CONTAINER)
totalfree = totalfree + pBag->GetFreeSlots();
}
}
// calculate how much money bot has
uint32 copper = m_bot->GetMoney();
uint32 gold = uint32(copper / 10000);
copper -= (gold * 10000);
uint32 silver = uint32(copper / 100);
copper -= (silver * 100);
out << "|cffffffff[|h|cff00ffff" << m_bot->GetName() << "|h|cffffffff]" << " has |r|cff00ff00" << gold
<< "|r|cfffffc00g|r|cff00ff00" << silver
<< "|r|cffcdcdcds|r|cff00ff00" << copper
<< "|r|cffffd333c" << "|h|cffffffff bag slots |h|cff00ff00" << totalfree;
// estimate how much item damage the bot has
copper = EstRepairAll();
gold = uint32(copper / 10000);
copper -= (gold * 10000);
silver = uint32(copper / 100);
copper -= (silver * 100);
out << "|h|cffffffff & item damage cost " << "|r|cff00ff00" << gold
<< "|r|cfffffc00g|r|cff00ff00" << silver
<< "|r|cffcdcdcds|r|cff00ff00" << copper
<< "|r|cffffd333c";
ChatHandler ch(&fromPlayer);
ch.SendSysMessage(out.str().c_str());
}
else
{
// if this looks like an item link, reward item it completed quest and talking to NPC
std::list<uint32> itemIds;
extractItemIds(text, itemIds);
if (!itemIds.empty()) {
uint32 itemId = itemIds.front();
bool wasRewarded = false;
uint64 questRewarderGUID = m_bot->GetSelection();
Object* const pNpc = (WorldObject*) m_bot->GetObjectByTypeMask(questRewarderGUID, TYPEMASK_CREATURE_OR_GAMEOBJECT);
if (!pNpc)
return;
QuestMenu& questMenu = m_bot->PlayerTalkClass->GetQuestMenu();
for (uint32 iI = 0; !wasRewarded && iI < questMenu.MenuItemCount(); ++iI)
{
QuestMenuItem const& qItem = questMenu.GetItem(iI);
uint32 questID = qItem.m_qId;
Quest const* pQuest = sObjectMgr.GetQuestTemplate(questID);
QuestStatus status = m_bot->GetQuestStatus(questID);
// if quest is complete, turn it in
if (status == QUEST_STATUS_COMPLETE &&
! m_bot->GetQuestRewardStatus(questID) &&
pQuest->GetRewChoiceItemsCount() > 1 &&
m_bot->CanRewardQuest(pQuest, false))
{
for (uint8 rewardIdx=0; !wasRewarded && rewardIdx < pQuest->GetRewChoiceItemsCount(); ++rewardIdx)
{
ItemPrototype const * const pRewardItem = sObjectMgr.GetItemPrototype(pQuest->RewChoiceItemId[rewardIdx]);
if (itemId == pRewardItem->ItemId)
{
m_bot->RewardQuest(pQuest, rewardIdx, pNpc, false);
std::string questTitle = pQuest->GetTitle();
m_bot->GetPlayerbotAI()->QuestLocalization(questTitle, questID);
std::string itemName = pRewardItem->Name1;
m_bot->GetPlayerbotAI()->ItemLocalization(itemName, pRewardItem->ItemId);
std::ostringstream out;
out << "|cffffffff|Hitem:" << pRewardItem->ItemId << ":0:0:0:0:0:0:0" << "|h[" << itemName << "]|h|r rewarded";
SendWhisper(out.str(), fromPlayer);
wasRewarded = true;
}
}
}
}
}
else {
std::string msg = "What? follow, stay, (c)ast <spellname>, spells, (e)quip <itemlink>, (u)se <itemlink>, drop <questlink>, report, quests, stats";
SendWhisper(msg, fromPlayer);
m_bot->HandleEmoteCommand(EMOTE_ONESHOT_TALK);
}
}
}<|fim▁end|> | |
<|file_name|>visualization_cellsystem.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010-2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Visualize the system cells and MPI domains. Run ESPResSo in parallel
to color particles by node. With OpenMPI, this can be achieved using
``mpiexec -n 4 ./pypresso ../samples/visualization_cellsystem.py``.
Set property ``system.cell_system.node_grid = [i, j, k]`` (with ``i * j * k``
equal to the number of MPI ranks) to change the way the cellsystem is
partitioned. Only the domain of MPI rank 0 will be shown in wireframe.
"""
import espressomd
import espressomd.visualization_opengl
import numpy as np
required_features = ["LENNARD_JONES"]
espressomd.assert_features(required_features)
<|fim▁hole|> system,
window_size=[800, 800],
background_color=[0, 0, 0],
camera_position=[20, 15, 80],
particle_coloring='node',
draw_nodes=True,
draw_cells=True)
system.time_step = 0.0005
system.cell_system.set_regular_decomposition(use_verlet_lists=True)
system.cell_system.skin = 0.4
#system.cell_system.node_grid = [i, j, k]
for i in range(100):
system.part.add(pos=box * np.random.random(3))
system.non_bonded_inter[0, 0].lennard_jones.set_params(
epsilon=100.0, sigma=1.0, cutoff=3.0, shift="auto")
energy = system.analysis.energy()
print(f"Before Minimization: E_total = {energy['total']:.2e}")
system.integrator.set_steepest_descent(f_max=50, gamma=30.0,
max_displacement=0.001)
system.integrator.run(10000)
system.integrator.set_vv()
energy = system.analysis.energy()
print(f"After Minimization: E_total = {energy['total']:.2e}")
print("Tune skin")
system.cell_system.tune_skin(0.1, 4.0, 1e-1, 1000)
print(system.cell_system.get_state())
system.thermostat.set_langevin(kT=1, gamma=1, seed=42)
visualizer.run(1)<|fim▁end|> | box = [40, 30, 20]
system = espressomd.System(box_l=box)
visualizer = espressomd.visualization_opengl.openGLLive( |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.shortcuts import redirect
from main.models import Link
from main.models import Tag<|fim▁hole|># Create your views here.
def index(request):
context = RequestContext(request)
links = Link.objects.all()
return render_to_response('main/index.html', {'links': links}, context)
def tags(request):
context = RequestContext(request)
tags = Tag.objects.all()
return render_to_response('main/tags.html', {'tags': tags}, context)
def tag(request, tag_name):
context = RequestContext(request)
the_tag = Tag.objects.get(name=tag_name)
links=the_tag.link_set.all()
return render_to_response('main/index.html',{'links':links, 'tag_name': '#' + tag_name}, context)
def add_link(request):
context = RequestContext(request)
if request.method == 'POST':
url = request.POST.get("url","")
tags = request.POST.get("tags","")
title = request.POST.get("title","")
tags = tags.split(',')
l = Link.objects.get_or_create(title=title, url=url)[0]
for x in tags:
l.tags.add(Tag.objects.get_or_create(name=x)[0])
return redirect(index)<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup
setup(
name="kafka-python",
version="0.1-alpha",<|fim▁hole|> packages=["kafka"],
license="Copyright 2012, David Arthur under Apache License, v2.0",
description="Pure Python client for Apache Kafka",
long_description=open("README.md").read(),
)<|fim▁end|> | author="David Arthur",
author_email="[email protected]",
url="https://github.com/mumrah/kafka-python", |
<|file_name|>ConfigurationPanel.java<|end_file_name|><|fim▁begin|>package es.uvigo.esei.sing.bdbm.gui.configuration;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.EventListener;
import java.util.EventObject;
import java.util.concurrent.Callable;
import javax.swing.AbstractAction;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;<|fim▁hole|>import javax.swing.JTextField;
import es.uvigo.esei.sing.bdbm.gui.BDBMGUIController;
public class ConfigurationPanel extends JPanel {
private static final long serialVersionUID = 1L;
private final BDBMGUIController controller;
private final JTextField txtRepository;
private final JTextField txtBLAST;
private final JTextField txtEMBOSS;
private final JTextField txtBedTools;
private final JTextField txtSplign;
private final JTextField txtCompart;
private final JButton btnBuildRepository;
public ConfigurationPanel(BDBMGUIController controller) {
super();
this.controller = controller;
// this.setPreferredSize(new Dimension(600, 140));
final GroupLayout layout = new GroupLayout(this);
layout.setAutoCreateContainerGaps(true);
layout.setAutoCreateGaps(true);
this.setLayout(layout);
final JLabel lblRepository = new JLabel("Repository Path");
final JLabel lblBLAST = new JLabel("BLAST Path");
final JLabel lblEMBOSS = new JLabel("EMBOSS Path");
final JLabel lblBedTools = new JLabel("BedTools Path");
final JLabel lblSplign = new JLabel("Splign Path");
final JLabel lblCompart = new JLabel("Compart Path");
final File repositoryPath = this.controller.getEnvironment()
.getRepositoryPaths().getBaseDirectory();
final File blastBD = this.controller.getEnvironment()
.getBLASTBinaries().getBaseDirectory();
final File embossBD = this.controller.getEnvironment()
.getEMBOSSBinaries().getBaseDirectory();
final File bedToolsBD = this.controller.getEnvironment()
.getBedToolsBinaries().getBaseDirectory();
final File splignBD = this.controller.getEnvironment()
.getSplignBinaries().getBaseDirectory();
final File compartBD = this.controller.getEnvironment()
.getCompartBinaries().getBaseDirectory();
this.txtRepository = new JTextField(repositoryPath.getAbsolutePath());
this.txtBLAST = new JTextField(blastBD == null ? "" : blastBD.getAbsolutePath());
this.txtEMBOSS = new JTextField(embossBD == null ? "" : embossBD.getAbsolutePath());
this.txtBedTools = new JTextField(bedToolsBD == null ? "" : bedToolsBD.getAbsolutePath());
this.txtSplign = new JTextField(splignBD == null ? "" : splignBD.getAbsolutePath());
this.txtCompart = new JTextField(compartBD == null ? "" : compartBD.getAbsolutePath());
this.txtRepository.setEditable(false);
this.txtBLAST.setEditable(false);
this.txtEMBOSS.setEditable(false);
this.txtBedTools.setEditable(false);
this.txtSplign.setEditable(false);
this.txtCompart.setEditable(false);
final JButton btnRepository = new JButton("Select...");
final JButton btnBLASTSelect = new JButton("Select...");
final JButton btnEMBOSSSelect = new JButton("Select...");
final JButton btnBedToolsSelect = new JButton("Select...");
final JButton btnSplignSelect = new JButton("Select...");
final JButton btnCompartSelect = new JButton("Select...");
final JButton btnBLASTInPath = new JButton("In system path");
final JButton btnEMBOSSInPath = new JButton("In system path");
final JButton btnBedToolsInPath = new JButton("In system path");
final JButton btnSplignInPath = new JButton("In system path");
final JButton btnCompartInPath = new JButton("In system path");
this.btnBuildRepository = new JButton(new AbstractAction("Build") {
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
ConfigurationPanel.this.buildRepository();
}
});
this.btnBuildRepository.setEnabled(false);
layout.setVerticalGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup()
.addComponent(lblRepository, Alignment.CENTER)
.addComponent(this.txtRepository)
.addComponent(btnRepository)
.addComponent(this.btnBuildRepository)
)
.addGroup(layout.createParallelGroup()
.addComponent(lblBLAST, Alignment.CENTER)
.addComponent(this.txtBLAST)
.addComponent(btnBLASTSelect)
.addComponent(btnBLASTInPath)
)
.addGroup(layout.createParallelGroup()
.addComponent(lblEMBOSS, Alignment.CENTER)
.addComponent(this.txtEMBOSS)
.addComponent(btnEMBOSSSelect)
.addComponent(btnEMBOSSInPath)
)
.addGroup(layout.createParallelGroup()
.addComponent(lblBedTools, Alignment.CENTER)
.addComponent(this.txtBedTools)
.addComponent(btnBedToolsSelect)
.addComponent(btnBedToolsInPath)
)
.addGroup(layout.createParallelGroup()
.addComponent(lblSplign, Alignment.CENTER)
.addComponent(this.txtSplign)
.addComponent(btnSplignSelect)
.addComponent(btnSplignInPath)
)
.addGroup(layout.createParallelGroup()
.addComponent(lblCompart, Alignment.CENTER)
.addComponent(this.txtCompart)
.addComponent(btnCompartSelect)
.addComponent(btnCompartInPath)
)
);
layout.setHorizontalGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup()
.addComponent(lblRepository)
.addComponent(lblBLAST)
.addComponent(lblEMBOSS)
.addComponent(lblBedTools)
.addComponent(lblSplign)
.addComponent(lblCompart)
)
.addGroup(layout.createParallelGroup()
.addComponent(this.txtRepository)
.addComponent(this.txtBLAST)
.addComponent(this.txtEMBOSS)
.addComponent(this.txtBedTools)
.addComponent(this.txtSplign)
.addComponent(this.txtCompart)
)
.addGroup(layout.createParallelGroup()
.addComponent(btnRepository)
.addComponent(btnBLASTSelect)
.addComponent(btnEMBOSSSelect)
.addComponent(btnBedToolsSelect)
.addComponent(btnSplignSelect)
.addComponent(btnCompartSelect)
)
.addGroup(layout.createParallelGroup()
.addComponent(this.btnBuildRepository)
.addComponent(btnBLASTInPath)
.addComponent(btnEMBOSSInPath)
.addComponent(btnBedToolsInPath)
.addComponent(btnSplignInPath)
.addComponent(btnCompartInPath)
)
);
final Callable<Boolean> callbackRepositorySelection = new Callable<Boolean>() {
@Override
public Boolean call() {
if (ConfigurationPanel.this.isValidRepositoryPath()) {
btnBuildRepository.setEnabled(false);
} else {
btnBuildRepository.setEnabled(true);
if (JOptionPane.showConfirmDialog(
ConfigurationPanel.this,
"Repository path does not exist or its structure is incomplete. Do you wish to build repository structure?",
"Invalid Repository",
JOptionPane.YES_NO_OPTION,
JOptionPane.WARNING_MESSAGE
) == JOptionPane.YES_OPTION) {
ConfigurationPanel.this.buildRepository();
}
}
return true;
}
};
btnRepository.addActionListener(
new PathSelectionActionListener(this.txtRepository, callbackRepositorySelection)
);
final Callable<Boolean> callbackCheckBLAST = new Callable<Boolean>() {
@Override
public Boolean call() {
if (ConfigurationPanel.this.isValidBLASTPath()) {
return true;
} else {
JOptionPane.showMessageDialog(
ConfigurationPanel.this,
"Invalid BLAST binaries path. Please, change the selected path",
"Invalid Path",
JOptionPane.ERROR_MESSAGE
);
return false;
}
}
};
btnBLASTSelect.addActionListener(
new PathSelectionActionListener(this.txtBLAST, callbackCheckBLAST)
);
btnBLASTInPath.addActionListener(
new SystemPathSelectionActionListener(this.txtBLAST, callbackCheckBLAST)
);
final Callable<Boolean> callbackCheckEMBOSS = new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
if (ConfigurationPanel.this.isValidEMBOSSPath()) {
return true;
} else {
JOptionPane.showMessageDialog(
ConfigurationPanel.this,
"Invalid EMBOSS binaries path. Please, change the selected path",
"Invalid Path",
JOptionPane.ERROR_MESSAGE
);
return false;
}
}
};
btnEMBOSSSelect.addActionListener(
new PathSelectionActionListener(this.txtEMBOSS, callbackCheckEMBOSS)
);
btnEMBOSSInPath.addActionListener(
new SystemPathSelectionActionListener(this.txtEMBOSS, callbackCheckEMBOSS)
);
final Callable<Boolean> callbackCheckBedTools = new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
if (ConfigurationPanel.this.isValidBedToolsPath()) {
return true;
} else {
JOptionPane.showMessageDialog(
ConfigurationPanel.this,
"Invalid bedtools binaries path. Please, change the selected path",
"Invalid Path",
JOptionPane.ERROR_MESSAGE
);
return false;
}
}
};
btnBedToolsSelect.addActionListener(
new PathSelectionActionListener(this.txtBedTools, callbackCheckBedTools)
);
btnBedToolsInPath.addActionListener(
new SystemPathSelectionActionListener(this.txtBedTools, callbackCheckBedTools)
);
final Callable<Boolean> callbackCheckSplign = new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
if (ConfigurationPanel.this.isValidSplignPath()) {
return true;
} else {
JOptionPane.showMessageDialog(
ConfigurationPanel.this,
"Invalid splign binaries path. Please, change the selected path",
"Invalid Path",
JOptionPane.ERROR_MESSAGE
);
return false;
}
}
};
btnSplignSelect.addActionListener(
new PathSelectionActionListener(this.txtSplign, callbackCheckSplign)
);
btnSplignInPath.addActionListener(
new SystemPathSelectionActionListener(this.txtSplign, callbackCheckSplign)
);
final Callable<Boolean> callbackCheckCompart = new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
if (ConfigurationPanel.this.isValidCompartPath()) {
return true;
} else {
JOptionPane.showMessageDialog(
ConfigurationPanel.this,
"Invalid compart binaries path. Please, change the selected path",
"Invalid Path",
JOptionPane.ERROR_MESSAGE
);
return false;
}
}
};
btnCompartSelect.addActionListener(
new PathSelectionActionListener(this.txtCompart, callbackCheckCompart)
);
btnCompartInPath.addActionListener(
new SystemPathSelectionActionListener(this.txtCompart, callbackCheckCompart)
);
}
public void addConfigurationChangeListener(ConfigurationChangeEventListener listener) {
this.listenerList.add(ConfigurationChangeEventListener.class, listener);
}
public void removeConfigurationChangeListener(ConfigurationChangeEventListener listener) {
this.listenerList.remove(ConfigurationChangeEventListener.class, listener);
}
protected void fireChangeEvent(ConfigurationChangeEvent event) {
final ConfigurationChangeEventListener[] listeners =
this.listenerList.getListeners(ConfigurationChangeEventListener.class);
for (ConfigurationChangeEventListener listener : listeners) {
listener.configurationChanged(event);
}
}
protected void fireChange() {
this.fireChangeEvent(new ConfigurationChangeEvent(this));
}
protected File getRepositoryDirectory() {
return new File(this.txtRepository.getText());
}
protected String getBLASTPath() {
return this.txtBLAST.getText().isEmpty() ?
null : new File(this.txtBLAST.getText()).getAbsolutePath();
}
protected String getEMBOSSPath() {
return this.txtEMBOSS.getText().isEmpty() ?
null : new File(this.txtEMBOSS.getText()).getAbsolutePath();
}
protected String getBedToolsPath() {
return this.txtBedTools.getText().isEmpty() ?
null : new File(this.txtBedTools.getText()).getAbsolutePath();
}
protected String getSplignPath() {
return this.txtSplign.getText().isEmpty() ?
null : new File(this.txtSplign.getText()).getAbsolutePath();
}
protected String getCompartPath() {
return this.txtCompart.getText().isEmpty() ?
null : new File(this.txtCompart.getText()).getAbsolutePath();
}
public boolean isValidRepositoryPath() {
return this.controller.getEnvironment()
.getRepositoryPaths()
.checkBaseDirectory(getRepositoryDirectory());
}
public boolean isValidBLASTPath() {
return this.controller.getManager().checkBLASTPath(getBLASTPath());
}
protected boolean isValidEMBOSSPath() {
return this.controller.getManager().checkEMBOSSPath(getEMBOSSPath());
}
protected boolean isValidBedToolsPath() {
return this.controller.getManager().checkBedToolsPath(getBedToolsPath());
}
protected boolean isValidSplignPath() {
return this.controller.getManager().checkSplignPath(getSplignPath());
}
protected boolean isValidCompartPath() {
return this.controller.getManager().checkCompartPath(getCompartPath());
}
protected void buildRepository() {
try {
this.controller.getEnvironment()
.getRepositoryPaths()
.buildBaseDirectory(this.getRepositoryDirectory());
this.btnBuildRepository.setEnabled(false);
JOptionPane.showMessageDialog(
ConfigurationPanel.this,
"Repository structure was correctly built.",
"Repository Built",
JOptionPane.INFORMATION_MESSAGE
);
this.fireChange();
} catch (Exception e) {
this.btnBuildRepository.setEnabled(true);
JOptionPane.showMessageDialog(
ConfigurationPanel.this,
"Error building repository. Please, check path and press 'Build' or change path",
"Repository Building Error",
JOptionPane.ERROR_MESSAGE
);
}
}
public PathsConfiguration getConfiguration() {
if (this.isValidRepositoryPath() && this.isValidBLASTPath()) {
final String blastPath = this.getBLASTPath();
final String embossPath = this.getEMBOSSPath();
final String bedToolsPath = this.getBedToolsPath();
final String splignPath = this.getSplignPath();
final String compartPath = this.getCompartPath();
return new PathsConfiguration(
this.getRepositoryDirectory(),
blastPath == null ? null : new File(blastPath),
embossPath == null ? null : new File(embossPath),
bedToolsPath == null ? null : new File(bedToolsPath),
splignPath == null ? null : new File(splignPath),
compartPath == null ? null : new File(compartPath)
);
} else {
return null;
}
}
private final class SystemPathSelectionActionListener implements
ActionListener {
private final JTextField txtAssociated;
private final Callable<Boolean> callback;
private SystemPathSelectionActionListener(JTextField txtAssociated, Callable<Boolean> callback) {
this.txtAssociated = txtAssociated;
this.callback = callback;
}
@Override
public void actionPerformed(ActionEvent e) {
final String previousPath = this.txtAssociated.getText();
this.txtAssociated.setText("");
try {
if (this.callback.call()) {
ConfigurationPanel.this.fireChange();
} else {
txtAssociated.setText(previousPath);
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
}
private final class PathSelectionActionListener implements ActionListener {
private final JTextField txtAssociated;
private final Callable<Boolean> callback;
private PathSelectionActionListener(JTextField txtAssociated, Callable<Boolean> callback) {
this.txtAssociated = txtAssociated;
this.callback = callback;
}
@Override
public void actionPerformed(ActionEvent e) {
final JFileChooser chooser = new JFileChooser(
new File(txtAssociated.getText())
);
chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
chooser.setMultiSelectionEnabled(false);
if (chooser.showOpenDialog(ConfigurationPanel.this) == JFileChooser.APPROVE_OPTION) {
final String previousPath = txtAssociated.getText();
txtAssociated.setText(chooser.getSelectedFile().getAbsolutePath());
try {
if (this.callback.call()) {
ConfigurationPanel.this.fireChange();
} else {
txtAssociated.setText(previousPath);
}
} catch (Exception e1) {
throw new RuntimeException(e1);
}
}
}
}
public static class ConfigurationChangeEvent extends EventObject {
private static final long serialVersionUID = 1L;
private final PathsConfiguration configuration;
protected ConfigurationChangeEvent(ConfigurationPanel panel) {
this(panel, panel.getConfiguration());
}
public ConfigurationChangeEvent(Object source, PathsConfiguration configuration) {
super(source);
this.configuration = configuration;
}
public PathsConfiguration getConfiguration() {
return configuration;
}
}
public static interface ConfigurationChangeEventListener extends EventListener {
public void configurationChanged(ConfigurationChangeEvent event);
}
}<|fim▁end|> | |
<|file_name|>shader.cpp<|end_file_name|><|fim▁begin|>#include "shader.h"
#include <iostream>
Shader::Shader(GLuint shader_id)
: shader_id(shader_id) {}
void Shader::use()
{
glUseProgram(shader_id);
}
void Shader::send_cam_pos(glm::vec3 cam_pos)
{
this->cam_pos = cam_pos;
}<|fim▁hole|>{
this->V = V;
this->P = P;
}
void Shader::send_mesh_model(glm::mat4 mesh_model)
{
this->mesh_model = mesh_model;
}
void Shader::set_material(Material m) {}
void Shader::draw(Geometry *g, glm::mat4 to_world) {}<|fim▁end|> |
void Shader::set_VP(glm::mat4 V, glm::mat4 P) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Charm Helpers saltstack - declare the state of your machines.
This helper enables you to declare your machine state, rather than
program it procedurally (and have to test each change to your procedures).
Your install hook can be as simple as:
{{{
from charmhelpers.contrib.saltstack import (
install_salt_support,
update_machine_state,
)
def install():
install_salt_support()
update_machine_state('machine_states/dependencies.yaml')
update_machine_state('machine_states/installed.yaml')
}}}
and won't need to change (nor will its tests) when you change the machine
state.
It's using a python package called salt-minion which allows various formats for
specifying resources, such as:
{{{
/srv/{{ basedir }}:<|fim▁hole|> file.directory:
- group: ubunet
- user: ubunet
- require:
- user: ubunet
- recurse:
- user
- group
ubunet:
group.present:
- gid: 1500
user.present:
- uid: 1500
- gid: 1500
- createhome: False
- require:
- group: ubunet
}}}
The docs for all the different state definitions are at:
http://docs.saltstack.com/ref/states/all/
TODO:
* Add test helpers which will ensure that machine state definitions
are functionally (but not necessarily logically) correct (ie. getting
salt to parse all state defs.
* Add a link to a public bootstrap charm example / blogpost.
* Find a way to obviate the need to use the grains['charm_dir'] syntax
in templates.
"""
# Copyright 2013 Canonical Ltd.
#
# Authors:
# Charm Helpers Developers <[email protected]>
import subprocess
import charmhelpers.contrib.templating.contexts
import charmhelpers.core.host
import charmhelpers.core.hookenv
salt_grains_path = '/etc/salt/grains'
def install_salt_support(from_ppa=True):
"""Installs the salt-minion helper for machine state.
By default the salt-minion package is installed from
the saltstack PPA. If from_ppa is False you must ensure
that the salt-minion package is available in the apt cache.
"""
if from_ppa:
subprocess.check_call([
'/usr/bin/add-apt-repository',
'--yes',
'ppa:saltstack/salt',
])
subprocess.check_call(['/usr/bin/apt-get', 'update'])
# We install salt-common as salt-minion would run the salt-minion
# daemon.
charmhelpers.fetch.apt_install('salt-common')
def update_machine_state(state_path):
"""Update the machine state using the provided state declaration."""
charmhelpers.contrib.templating.contexts.juju_state_to_yaml(
salt_grains_path)
subprocess.check_call([
'salt-call',
'--local',
'state.template',
state_path,
])<|fim▁end|> | |
<|file_name|>server.py<|end_file_name|><|fim▁begin|># Copyright 2018 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Server to run Google Data Loss Prevention API DeID.
For now, no authentication is implemented, to be run on localhost.
Requires Apache Beam client, Flask, Google Python API Client:
pip install --upgrade apache_beam
pip install --upgrade flask
pip install --upgrade google-api-python-client
"""
from __future__ import absolute_import
from datetime import datetime
import json
import logging
import posixpath
from apiclient import discovery
import enum
import flask
from common import gcsutil
from common import unicodecsv
from deid_app.backend import config
from deid_app.backend import model
from dlp import run_deid_lib
from eval import run_pipeline_lib as eval_lib
import jsonschema
from google.cloud import bigquery
from google.cloud import storage
from google.cloud.exceptions import exceptions
logging.getLogger().setLevel(logging.INFO)
app = flask.Flask(__name__)
app.config.from_object(config.Config)
bq_client = bigquery.Client(app.config['PROJECT_ID'])
with app.app_context():
model.init_app(app)
PATIENT_ID = 'patient_id'
RECORD_NUM = 'record_number'
NOTE = 'note'
FINDINGS = 'findings'
EXPECTED_CSV_SCHEMA = [RECORD_NUM, PATIENT_ID, NOTE]
EXPECTED_FINDINGS_SCHEMA = [RECORD_NUM, PATIENT_ID, FINDINGS,
run_deid_lib.DLP_FINDINGS_TIMESTAMP]
EXPECTED_OUTPUT_SCHEMA = (EXPECTED_CSV_SCHEMA +
[run_deid_lib.DLP_DEID_TIMESTAMP])
CSV_FIELD_TYPE = {
RECORD_NUM: 'INT64',
PATIENT_ID: 'STRING',
NOTE: 'STRING',
}
deid_schema = {
'type':
'object',
'properties': {
'name': {
'type': 'string'
},
'inputMethod': {
'type': 'string'
},
'inputInfo': {
'type': 'string'
},
'outputMethod': {
'type': 'string'
},
'outputInfo': {
'type': 'string'
},
'findingsTable': {
'type': 'string'
},
'maeTable': {
'type': 'string'
},
'maeDir': {
'type': 'string'
},
'batchSize': {
'type': 'number'
},
},
'required': [
'name',
'inputMethod',
'inputInfo',
'outputMethod',
'outputInfo',
],
}
eval_pipeline_shema = {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'input': {
'type': 'object',
'properties': {
'gcs': {
'type': 'object',
'properties': {
'pattern': {'type': 'string'},
'golden': {'type': 'string'},
},
'required': [
'pattern',
'golden',
],
},
'bigquery': {
'type': 'object',
'properties': {
'query': {'type': 'string'},
'golden': {'type': 'string'},
},
'required': [
'query',
'golden',
],
},
},
'oneOf': [
{'required': ['gcs']},
{'required': ['bigquery']},
],
},
'output': {
'type': 'object',
'properties': {
'gcs': {
'type': 'object',
'properties': {
'dir': {'type': 'string'},
'debug': {'type': 'boolean'},
},
'required': [
'dir',
'debug',
],
},
'bigquery': {
'type': 'object',
'properties': {
'stats': {'type': 'string'},
'debug': {'type': 'string'},
'perNote': {'type': 'string'},
},
'required': [
'stats',
'debug',
],
},
},
'anyOf': [
{'required': ['gcs']},
{'required': ['bigquery']},
],
},
'ignoreTypes': {
'type': 'array',
'items': {'type': 'string'},
},
},
'required': [
'name',
'input',
'output',
],
}
dlp_image_demo_schema = {
'type': 'object',
'properties': {
'type': {
'type': 'string',
'enum': ['image/jpeg',
'image/bmp',
'image/png',
'image/svg',
'text/plain',
]
},
'data': {'type': 'string'},
},
'required': [
'data',
'type'
],
}
bq_table_schema = {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'mode': {
'type': 'string',
'enum': ['NULLABLE',
'REQUIRED',
'REPEATED',
]
},
'name': {'type': 'string'},
'type': {'type': 'string'},
},
'required': [
'name',
'type',
],
},
}
dlp_image_redaction_configs = [{
'redactionColor': {
'blue': 0.1,
'green': 0.1,
'red': 0.8
},
'redactAllText': 'true'
}]
def get_bq_dataset(dataset_id):
"""Returns a dataset instance from BigQuery."""
dataset_ref = bq_client.dataset(dataset_id)
try:
dataset = bq_client.get_dataset(dataset_ref)
except exceptions.NotFound as e:
raise e
return dataset
def append_project(table_name):
"""formats a table name to 'project:dataset.table'."""
return '{}:{}'.format(app.config['PROJECT_ID'], table_name)
def get_bq_table(dataset_id, table_id):
"""Return a table instance from BigQuery."""
dataset_ref = bq_client.dataset(dataset_id)
table_ref = dataset_ref.table(table_id)
try:
return bq_client.get_table(table_ref)
except exceptions.NotFound as e:
raise e
def get_bq_rows(query):
"""Returns a BigQuery query as a list of rows."""
query_job = bq_client.query(query)
res = query_job.result() # blocks until query is done.
return [dict(list(row.items())) for row in res]
def verify_bq_table(dataset_id, table_id, expected_schema):
"""Verifies that a table exists and has an expected schema.
Args:
dataset_id: The name of the BigQuery dataset.
table_id: The name of the BigQuery table.
expected_schema: A list of the expected names of columns.
Raises:
exceptions.NotFound: If the table does not exist in BigQuery.
Returns:
A boolean of the verification status.
"""
table = get_bq_table(dataset_id, table_id)
table_headers = [col.name for col in table.schema]
return set(table_headers) == set(expected_schema)
def verify_gcs_path(path):
"""Verifies that a GCS path exists.
Args:
path: A string that represents the target path.
Returns:
A boolean of the verification status.
"""
storage_client = storage.Client()
path_info = gcsutil.GcsFileName.from_path(path)
try:
bucket = storage_client.get_bucket(path_info.bucket)
except exceptions.NotFound:
return False
return storage.Blob(bucket=bucket,
name=path_info.blob).exists(storage_client)
@app.route('/')
@app.route('/index')
@app.route('/api')
def index():
return flask.jsonify(data='Deid backend server', status=200), 200
@app.route('/api/project')
def get_project():
return flask.jsonify(project=app.config['PROJECT_ID']), 200
@app.route('/api/datasets')
def get_datasets():
datasets = list(bq_client.list_datasets())
dataset_ids = [dataset.dataset_id for dataset in datasets]
return flask.jsonify(datasets=dataset_ids), 200
@app.route('/api/datasets/<dataset_id>', methods=['POST', 'DELETE'])
def manage_dataset(dataset_id):
"""Create and delete datasets from BigQuery."""
dataset_ref = bq_client.dataset(dataset_id)
method = flask.request.method
if method == 'POST':
dataset = bigquery.Dataset(dataset_ref)
dataset.location = 'US'
payload = flask.request.json
if payload:
dataset.location = payload.get('location') or dataset.location
dataset.description = payload.get('description') or ''
try:
dataset = bq_client.create_dataset(dataset)
except exceptions.Conflict as e:
error_msg = 'There already exists a dataset with this name'
return flask.jsonify(error=e.code, text=error_msg), e.code
return flask.jsonify(result='success'), 200
elif method == 'DELETE':
try:
bq_client.delete_dataset(dataset_ref, delete_contents=True)
except exceptions.NotFound as e:
error_msg = 'Dataset Does not exist'
return flask.jsonify(error=e.code, text=error_msg), e.code
return flask.jsonify(result='success'), 200
@app.route('/api/datasets/<dataset_id>/tables')
def get_tables(dataset_id):
"""Get table names for a provided dataset."""
try:
get_bq_dataset(dataset_id)
except exceptions.NotFound as e:
return flask.jsonify(error=e.code, text=e.message), e.code
dataset_ref = bq_client.dataset(dataset_id)
tables = list(bq_client.list_tables(dataset_ref))
table_ids = [table.table_id for table in tables]
return flask.jsonify(dataset=dataset_id, tables=table_ids), 200
@app.route('/api/datasets/<dataset_id>/tables/<table_id>',
methods=['POST'])
def manage_tables(dataset_id, table_id):
"""Create tables in datasets in BigQuery."""
try:
get_bq_dataset(dataset_id)
except exceptions.NotFound as e:
return flask.jsonify(error=e.code, text=e.message), e.code
table_ref = bq_client.dataset(dataset_id).table(table_id)
try:
jsonschema.validate(flask.request.json, bq_table_schema)
except jsonschema.ValidationError:
error_msg = 'unable to validate provided payload.'
return flask.jsonify(error=400, text=error_msg), 400
schema = [bigquery.SchemaField(field['name'], field['type'],
field.get('mode') or 'NULLABLE')
for field in flask.request.json]
table = bigquery.Table(table_ref, schema=schema)
try:
table = bq_client.create_table(table)
except exceptions.GoogleAPIError as e:
return flask.jsonify(error=e.message), 400
return flask.jsonify(result='success'), 200
@app.route('/api/deidentify/<job_id>/metadata')
def get_job_metadata(job_id):
"""Gets the list of patient_id, record_num for a given job."""
job = model.DeidJobTable.query.get(job_id)
if not job:
error_msg = 'Job does not exist'
return flask.jsonify(text=error_msg, error=404), 404
try:
orig_data = get_bq_rows(job.original_query)
except exceptions.NotFound as e:
return flask.jsonify(text=e.message, error=e.code), e.code
# The metadata is only the patient_id and record_number
metadata = [{
'patientId': row[PATIENT_ID],
'recordNumber': row[RECORD_NUM],
} for row in orig_data]
return flask.jsonify(notesMetadata=metadata), 200
class NoteAnnotation(enum.Enum):
HIGHLIGHTED = 0
UNHIGHLIGHTED = 1
class NoteHighlight(object):
"""Represents a chunk of a note that was deidentified and its metadata.
A note is split into a list of NoteHighlight objects. Each NoteHighlight can
indicate that the note is highlighted. In that case, the NoteHighlight should
contain a replacement and color information for the chunk that should be
highlighted.
Attributes:
annotation: A string representation of a NoteAnnotation that indicates
whether this range is highlighted or not.
quote: A string that represents the original chunk of the note.
replacement: a string that indicates the value to replace a highlighted
chunk with.
begin: An integer of the index of this chunk compared to the rest of the
note.
length: An integer with the length of the chunk.
color: A string that represents the color to be associated with a
highlighted chunk.
"""
def __init__(self, annotation, quote, replacement, begin, length, color):
"""Initializes a NoteHighlight object with all attributed."""
self.annotation = annotation
self.quote = quote
self.replacement = replacement
self.begin = begin
self.length = length
self.color = color
@app.route('/api/deidentify/<job_id>/note/<record_number>')
def get_note_highlights(job_id, record_number):
"""returns a list of ranges to highlight."""
job = model.DeidJobTable.query.get(job_id)
if not job:
error_msg = 'Job does not exist'
return flask.jsonify(text=error_msg, error=404), 404
orig_query = job.original_query + ' where {}={}'.format(
RECORD_NUM, record_number)
findings_query = 'select findings from {} where {}=\'{}\' and {}={}'.format(
job.findings_table, run_deid_lib.DLP_FINDINGS_TIMESTAMP, job.timestamp,
RECORD_NUM, record_number)
try:
orig_row = get_bq_rows(orig_query)
findings_data = get_bq_rows(findings_query)
except exceptions.NotFound as e:
return flask.jsonify(text=e.message, error=e.code), e.code
if len(findings_data) != 1 or len(orig_row) != 1:
error_msg = 'Selected record number does not exist or is not unique'
return flask.jsonify(text=error_msg, error=400), 400
findings = json.loads(findings_data[0]['findings'])['findings']
note = orig_row[0][NOTE]
res = []
findings.sort(key=lambda x: int(x['location']['codepointRange']['start']))
# Assumption:
# The location attribute always has a codepointRange field that indicates
# the offset of the identified string in unicode format.
# The original text is always replaced with its detected info type.
offset = 0
for finding in findings:
location = finding['location']['codepointRange']
start, end = int(location['start']), int(location['end'])
# This check handles overlapping findings. For now, this ensures that the
# code doesn't crash in that case.
if start < offset:
continue
color = 'Bisque'
# For every detected text, there is 2 chunks that can be created: the one
# preceding the detected text (unhighlighted) and the highlighted one (the
# detected text).
# The unhighlighted chunk
first_quote = note[offset:start]
first_replacement = first_quote
first_annotation = NoteAnnotation.UNHIGHLIGHTED
first_length = start - offset - 1
first_chunk = NoteHighlight(first_annotation.name, first_quote,
first_replacement, offset, first_length, color)
res.append(first_chunk.__dict__) # dict is json serializable.
# The highlighted chunk
second_quote = note[start:end]
second_replacement = finding['infoType']['name']
second_annotation = NoteAnnotation.HIGHLIGHTED
second_length = end - start
second_chunk = NoteHighlight(second_annotation.name, second_quote,
second_replacement, start, second_length,
color)
res.append(second_chunk.__dict__)
offset = end
# If the last info type isn't at the end of the note, then there is some
# leftover unhighlighted chunk.
final_chunk = NoteHighlight(NoteAnnotation.UNHIGHLIGHTED.name, note[offset:],
'', offset, len(note) - offset, '')
res.append(final_chunk.__dict__)
return flask.jsonify(data=res), 200
@app.route('/api/deidentify', methods=['GET', 'POST'])
def deidentify():
"""run dlp pipeline."""
if flask.request.method == 'GET':
jobs, offset = model.get_list(model.DeidJobTable)
result = [{
'id': job['id'],
'name': job['name'],
'originalQuery': job['original_query'],
'deidTable': job['deid_table'],
'status': job['status'],
'logTrace': job['log_trace'],
'timestamp': job['timestamp'],
} for job in jobs]
return flask.jsonify(jobs=result, offset=offset), 200
try:
jsonschema.validate(flask.request.json, deid_schema)
except jsonschema.ValidationError:
error_msg = 'unable to validate provided payload.'
return flask.jsonify(error=400, text=error_msg), 400
job_data = {
'name': flask.request.json['name'],
'timestamp': datetime.utcnow(),
}
(input_query, input_table, deid_table, findings_table, mae_dir, mae_table,
mae_task_name, batch_size, dtd_dir, input_csv, output_csv) = (
None, None, None, None, None,
None, None, None, None, None, None)
request = flask.request
# determine input
input_method, input_info = (request.json['inputMethod'],
request.json['inputInfo'])
if input_method == 'input_table':
input_table = input_info
try:
dataset, table = input_table.split('.')
if not verify_bq_table(dataset, table, EXPECTED_CSV_SCHEMA):
error_msg = ('input table schema does not match the expected one. '
'Expecting: {}'.format(', '.join(EXPECTED_CSV_SCHEMA)))
return flask.jsonify(error=400, text=error_msg), 400
except exceptions.NotFound:
return flask.jsonify(error=400, text='unable to locate input data'), 400
job_data['original_query'] = 'SELECT * FROM {}'.format(input_table)
elif input_method == 'input_query':
input_query = input_info
job_data['original_query'] = input_query
try:
get_bq_rows(input_query)
except exceptions.BadRequest:
error_msg = 'invalid input query'
return flask.jsonify(error=400, text=error_msg), 400
elif input_method == 'input_csv':
input_csv = input_info
else:
error_msg = 'wrong input method provided'
return flask.jsonify(error=400, text=error_msg), 400
# Determine output
output_method, output_info = (request.json['outputMethod'],
request.json['outputInfo'])
job_data['deid_table'] = output_info
if output_method == 'deid_table':
deid_table = output_info
dataset, table = deid_table.split('.')
try:
if not verify_bq_table(dataset, table, EXPECTED_OUTPUT_SCHEMA):
error_msg = ('output table schema does not match the expected one. '
'Expecting: {}'.format(', '.join(EXPECTED_OUTPUT_SCHEMA)))
return flask.jsonify(error=400, text=error_msg), 400
except exceptions.NotFound:
# if table not found, a new one will be created
pass
elif output_method == 'output_csv':
output_csv = output_info
else:
error_msg = 'wrong output method provided'
return flask.jsonify(error=400, text=error_msg), 400
deid_config_json = run_deid_lib.parse_config_file(
app.config['DEID_CONFIG_FILE'])
findings_table = request.json.get('findingsTable')
job_data['findings_table'] = findings_table
try:
dataset, table = findings_table.split('.')
if not verify_bq_table(dataset, table, EXPECTED_FINDINGS_SCHEMA):
error_msg = ('findings table schema does not match the expected one. '
'Expecting: {}'.format(', '.join(EXPECTED_FINDINGS_SCHEMA)))
return flask.jsonify(error=400, text=error_msg), 400
except exceptions.NotFound:
# if table not found, a new one will be created
pass
mae_table = request.json.get('maeTable')
mae_dir = request.json.get('maeDir')
batch_size = request.json.get('batchSize') or 1
pipeline_args = ['--project', app.config['PROJECT_ID']]
deid_job = model.create(model.DeidJobTable, job_data)
errors = run_deid_lib.run_pipeline(
input_query, input_table, deid_table, findings_table, mae_dir, mae_table,
deid_config_json, mae_task_name, app.config['PROJECT_ID'], storage.Client,
bq_client, bigquery.job.QueryJobConfig, app.config['DLP_API_NAME'],
batch_size, dtd_dir, input_csv, output_csv, deid_job.timestamp,
pipeline_args)
if errors:
deid_job.update(status=400, log_trace=errors)
return flask.jsonify(error=400, text=errors), 400
deid_job.update(status=200)
return flask.jsonify(result='success'), 200
@app.route('/api/eval', methods=['GET', 'POST'])
def evaluate():
"""Run evaluation pipeline."""
if flask.request.method == 'GET':
jobs, offset = model.get_list(model.EvalJobTable)
return flask.jsonify(jobs=jobs, offset=offset), 200
# Process POST requests.
try:
jsonschema.validate(flask.request.json, eval_pipeline_shema)
except jsonschema.ValidationError:
error_msg = 'unable to validate provided payload.'
return flask.jsonify(error=400, text=error_msg), 400
(mae_input_pattern, mae_golden_dir, results_dir, mae_input_query,
mae_golden_table, write_per_note_stats_to_gcs, results_table,
per_note_results_table, debug_output_table, types_to_ignore) = (
None, None, None, None, None, None, None, None, None, None)
job_data = {
'name': flask.request.json['name'],
'timestamp': datetime.utcnow(),
}
# Get input info
input_json = flask.request.json['input']
gcs_input, bq_input = input_json.get('gcs'), input_json.get('bigquery')
if gcs_input:
mae_input_pattern = job_data['findings'] = gcs_input['pattern'] + '*.xml'
mae_golden_dir = job_data['goldens'] = gcs_input['golden']
if bq_input:
job_data['findings'] = bq_input['query']
mae_input_query = append_project(job_data['findings'])
job_data['goldens'] = bq_input['golden']
mae_golden_table = append_project(job_data['goldens'])
try:
findings_dataset, findings_table = job_data['findings'].split('.')
get_bq_table(findings_dataset, findings_table)
golden_dataset, golden_table = job_data['golden'].split('.')
get_bq_table(golden_dataset, golden_table)
except exceptions.NotFound:
error_msg = 'unable to locate input BigQuery tables'
return flask.jsonify(error=400, text=error_msg), 400
# Get output info
output_json = flask.request.json['output']
gcs_output, bq_output = output_json.get('gcs'), output_json.get('bigquery')
if gcs_output:
results_dir = job_data['stats'] = gcs_output['dir']
write_per_note_stats_to_gcs = gcs_output['debug']
if write_per_note_stats_to_gcs:
job_data['debug'] = gcs_output['dir']
if bq_output:
job_data['stats'] = bq_output['stats']
results_table = append_project(job_data['stats'])
job_data['debug'] = bq_output['debug']
debug_output_table = append_project(job_data['debug'])
if bq_output.get('perNote'):
per_note_results_table = append_project(bq_output.get('perNote'))
# Get types to ignore
types_to_ignore = flask.request.json.get('ignoreTypes') or []
# Get pipeline args
pipeline_args = []
eval_job = model.create(model.EvalJobTable, job_data)
errors = eval_lib.run_pipeline(mae_input_pattern, mae_golden_dir, results_dir,
mae_input_query, mae_golden_table,
write_per_note_stats_to_gcs, results_table,
per_note_results_table, debug_output_table,
types_to_ignore, eval_job.timestamp,
pipeline_args)
if errors:
eval_job.update(status=400, log_trace=errors)
return flask.jsonify(error=400, text=errors), 400
eval_job.update(status=200)
return flask.jsonify(result='success'), 200
@app.route('/api/eval/stats/<job_id>', methods=['GET'])
def get_eval_stats(job_id):
"""Returns the evaluation statistics of an EvalJob."""
job = model.EvalJobTable.query.get(job_id)
if not job:
error_msg = 'evaluation job does not exist'
return flask.jsonify(text=error_msg, error=404), 404
if job.status != 200:
error_msg = 'selected job did not finish successfully'
return flask.jsonify(text=error_msg, error=400), 400
stats = job.stats
if stats.startswith('gs://'):
st_client = storage.Client()
filename = gcsutil.GcsFileName.from_path(
posixpath.join(stats, 'aggregate_results.txt'))
bucket = st_client.lookup_bucket(filename.bucket)
if not bucket:
error_msg = 'stats bucket was not found'
return flask.jsonify(error=404, text=error_msg), 404
blob = bucket.blob(filename.blob)
contents = blob.download_as_string()
stats_rows = eval_lib.format_aggregate_text_for_bq(contents,
str(job.timestamp))
else:
query = 'SELECT * FROM {} where timestamp = \'{}\''.format(job.stats,
job.timestamp)
try:
stats_rows = get_bq_rows(query)
except exceptions.NotFound as e:
return flask.jsonify(error=e.code, text=e.message), e.code
# Change the key format from snake_case into camelCase and remove any keys
# with None values
result = [
dict( # pylint: disable=g-complex-comprehension
(k, v) for (k, v) in {
'infoType': stat.get('info_type'),
'recall': stat.get('recall'),
'precision': stat.get('precision'),
'fScore': stat.get('f_score'),
'truePositives': stat.get('true_positives'),
'falsePositives': stat.get('false_positives'),
'falseNegatives': stat.get('false_negatives'),
'timestamp': stat.get('timestamp'),
}.items() if v is not None) for stat in stats_rows
]
return flask.jsonify(stats=result), 200
@app.route('/api/deidentify/upload/table', methods=['POST'])
def upload_dlp_csv():
"""Uploads a csv table to BigQuery dataset.
The table is expected to have the config schema:
[RECORD_NUM, PATIENT_ID, NOTE].
Returns:
A flask response indicating the result of the operation.
"""
csv_file = flask.request.files.get('csv')
if not csv_file:
return flask.jsonify(error=400, text='no file provided'), 400
form = flask.request.form
dataset_id, table_id = form.get('dataset'), form.get('table')
if not dataset_id or not table_id:
return flask.jsonify(error=400, text='table or dataset not provided'), 400
csv_iter = unicodecsv.UnicodeReader(csv_file)
try:
headers = csv_iter.next()
except StopIteration:
return flask.jsonify(error=400, text='file is empty'), 400
if set(headers) != set(EXPECTED_CSV_SCHEMA):
return flask.jsonify(
error=400, text='expected table schema is: {}'.format(
', '.join(EXPECTED_CSV_SCHEMA))), 400
try:
if not verify_bq_table(dataset_id, table_id, EXPECTED_CSV_SCHEMA):
error_msg = ('selected table schema does not match the expected one. '
'Expecting: {}'.format(', '.join(EXPECTED_CSV_SCHEMA)))
return flask.jsonify(error=400, text=error_msg), 400
except exceptions.NotFound:
# Table not found, create it
dataset_ref = bq_client.dataset(dataset_id)
table_ref = dataset_ref.table(table_id)
schema = [bigquery.schema.SchemaField(
name=col, field_type=CSV_FIELD_TYPE[col])
for col in headers]
table = bq_client.create_table(bigquery.table.Table(table_ref, schema))
rows = [
{header: entry for header, entry in zip(headers, row)}
for row in csv_iter]
if not rows:
return flask.jsonify(error=400, text='no rows provided'), 400
bq_client.insert_rows_json(table, rows)
return flask.jsonify(res='success'), 200
@app.route('/api/demo/image', methods=['POST'])
def deid_image():
"""redact all text from provided image."""
request = flask.request
try:
jsonschema.validate(request.json, dlp_image_demo_schema)
except jsonschema.ValidationError:
error_msg = 'unable to validate provided parameter'
return flask.jsonify(error=400, text=error_msg), 400
dlp = discovery.build(app.config['DLP_API_NAME'], 'v2',
cache_discovery=False)
def get_image_type(req_type):
"""change image type format to match what's expected from dlp."""
if req_type == 'image/jpeg':
return 'IMAGE_JPEG'
elif req_type == 'image/bmp':
return 'IMAGE_BMP'
elif req_type == 'image/png':
return 'IMAGE_PNG'
elif req_type == 'image/svg':
return 'IMAGE/SVG'
else:
return None
byte_item = {
'type': get_image_type(request.json['type']),
'data': request.json['data'],
}
body = {
'byteItem': byte_item,
'imageRedactionConfigs': dlp_image_redaction_configs,
}
projects = dlp.projects()
image = projects.image()
parent = 'projects/{0}'.format(app.config['PROJECT_ID'])
response = image.redact(body=body, parent=parent).execute()
return flask.jsonify(redactedByteStream=response['redactedImage'], status=200)
if __name__ == '__main__':
app.run(threaded=True)<|fim▁end|> | |
<|file_name|>alert.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2011-2015 libbitcoin developers (see AUTHORS)
* Copyright (c) 2016-2017 metaverse core developers (see MVS-AUTHORS)
*
* This file is part of metaverse.
*
* metaverse is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License with
* additional permissions to the one published by the Free Software
* Foundation, either version 3 of the License, or (at your option)
* any later version. For more information see LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <metaverse/bitcoin/message/alert.hpp>
#include <boost/iostreams/stream.hpp>
#include <metaverse/bitcoin/message/version.hpp>
#include <metaverse/bitcoin/utility/assert.hpp>
#include <metaverse/bitcoin/utility/container_sink.hpp>
#include <metaverse/bitcoin/utility/container_source.hpp>
#include <metaverse/bitcoin/utility/istream_reader.hpp>
#include <metaverse/bitcoin/utility/ostream_writer.hpp>
namespace libbitcoin {
namespace message {
const std::string alert::command = "alert";
const uint32_t alert::version_minimum = version::level::minimum;
const uint32_t alert::version_maximum = version::level::maximum;
alert alert::factory_from_data(uint32_t version, const data_chunk& data)
{
alert instance;
instance.from_data(version, data);
return instance;
}
alert alert::factory_from_data(uint32_t version, std::istream& stream)
{
alert instance;
instance.from_data(version, stream);
return instance;
}
alert alert::factory_from_data(uint32_t version, reader& source)
{
alert instance;
instance.from_data(version, source);
return instance;
}
bool alert::is_valid() const
{
return !payload.empty() || !signature.empty();
}
void alert::reset()
{
payload.clear();
payload.shrink_to_fit();
signature.clear();
signature.shrink_to_fit();<|fim▁hole|>
bool alert::from_data(uint32_t version, const data_chunk& data)
{
boost::iostreams::stream<byte_source<data_chunk>> istream(data);
return from_data(version, istream);
}
bool alert::from_data(uint32_t version, std::istream& stream)
{
istream_reader source(stream);
return from_data(version, source);
}
bool alert::from_data(uint32_t version, reader& source)
{
reset();
auto size = source.read_variable_uint_little_endian();
BITCOIN_ASSERT(size <= bc::max_size_t);
const auto payload_size = static_cast<size_t>(size);
size_t signature_size = 0;
auto result = static_cast<bool>(source);
if (result)
{
payload = source.read_data(payload_size);
result = source && (payload.size() == payload_size);
}
if (result)
{
size = source.read_variable_uint_little_endian();
BITCOIN_ASSERT(size <= bc::max_size_t);
signature_size = static_cast<size_t>(size);
result = source;
}
if (result)
{
signature = source.read_data(signature_size);
result = source && (signature.size() == signature_size);
}
if (!result)
reset();
return result;
}
data_chunk alert::to_data(uint32_t version) const
{
data_chunk data;
boost::iostreams::stream<byte_sink<data_chunk>> ostream(data);
to_data(version, ostream);
ostream.flush();
BITCOIN_ASSERT(data.size() == serialized_size(version));
return data;
}
void alert::to_data(uint32_t version, std::ostream& stream) const
{
ostream_writer sink(stream);
to_data(version, sink);
}
void alert::to_data(uint32_t version, writer& sink) const
{
sink.write_variable_uint_little_endian(payload.size());
sink.write_data(payload);
sink.write_variable_uint_little_endian(signature.size());
sink.write_data(signature);
}
uint64_t alert::serialized_size(uint32_t version) const
{
return variable_uint_size(payload.size()) + payload.size() +
variable_uint_size(signature.size()) + signature.size();
}
bool operator==(const alert& left, const alert& right)
{
bool result = (left.payload.size() == right.payload.size()) &&
(left.signature.size() == right.signature.size());
for (size_t i = 0; i < left.payload.size() && result; i++)
result = (left.payload[i] == right.payload[i]);
for (size_t i = 0; i < left.signature.size() && result; i++)
result = (left.signature[i] == right.signature[i]);
return result;
}
bool operator!=(const alert& left, const alert& right)
{
return !(left == right);
}
} // end message
} // end libbitcoin<|fim▁end|> | } |
<|file_name|>FTPRemoteClient.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2005-2010 Ignis Software Tools Ltd. All rights reserved.
*/
package com.aqua.filetransfer.ftp;
import java.io.File;
import java.io.FileInputStream;
import java.util.Properties;
import jsystem.framework.JSystemProperties;
import jsystem.framework.system.SystemObjectImpl;
import jsystem.utils.FileUtils;
import jsystem.utils.ReflectionUtils;
import jsystem.utils.StringUtils;
import systemobject.terminal.Cli;
import systemobject.terminal.Prompt;
import com.aqua.sysobj.conn.CliConnection;
import com.aqua.sysobj.conn.CliConnectionImpl;
import com.aqua.sysobj.conn.CliFactory;
/**
* <b>SystemObject for running FTP client on a remote machine.</b><br>
* The main purpose of this system object is to enable file transfer
* without assuming an FTP server is running on the remote machine.<br>
* In a typical usage of this SystemObject, an embedded FTP server
* will be activated on the local machine.
* A {@link Cli} session is opened with the remote client the session
* activates the FTP client on the remote machine. <br>
*
* <u>Using FTPRemoteClient</u><br>
* SystemObject can be instantiated from sut file or directly in the code.
* Once initiated copy operations can be used.
* The copy operations identifies whether a connection is already open if not
* a connection is opened.<br>
* In many cases the remote server (telnet/ssh) limits number of connections;
* use the {@link #closeFTPSession()} to close connection when needed.<br>
*
* Passivation: since TAS 4.9 the sys object support passivation. Please note that passivation
* is only supported when the remote client is a linux machine.
* In case the built-in prompts are not enough to open an FTP session
* with the FTP server you are using the system object also supports adding additional FTP prompts.
* To do that write a property file called {@link #FILE_TRANSFER_PROPERTIES_FILE_NAME}
* (in run directory) and add to it the following properties:
* {@link #FTP_LOGIN_PROMPTS} - comma seperated prompts which identifies that
* the FTP server waits for the user to enter the login user name
*
* {@link #FTP_PASSWORD_PROMPTS} - comma seperated prompts which identifies that
* the FTP server waits for the user to enter the password
*
* {@link #FTP_PROMPTS} - comma seperated prompts which identifies that
* the FTP server is waiting for an ftp command
*
* Since TAS 4.9 cli connectivity parameters to can be set using CliConnection.
* This can be done either by passing a CliConnection to the FtpRemoteClient constructor
* or setting the <code>cliConnection</code> member through the SUT file.
* When connectivity parameters are set using a CliConnection other connectivity
* parameters are ignored (host,operatingSystem,protocol,port,user,password).
*
* FTP Server address:
* -------------------
* FTP Server address is fetched as following:
* If the user gave value to the member {@link #ftpServerHostName} through the SUT file
* or by activating it's setter this will be the server to which the remote ftp client will
* try to connect.
* Next, when connecting, the system object will try to fetch the property {@value #LOCAL_HOST_ADDRESS_PROPERTY}<|fim▁hole|>public class FTPRemoteClient extends SystemObjectImpl {
public static final String FILE_TRANSFER_PROPERTIES_FILE_NAME = "filetransfer.properties";
public static final String FTP_PROMPTS = "ftp.prompts";
public static final String FTP_LOGIN_PROMPTS = "ftp.login.prompts";
public static final String FTP_PASSWORD_PROMPTS = "ftp.password.prompts";
public static final String LOCAL_HOST_ADDRESS_PROPERTY = "local.host.external.name";
public CliConnection cliConnection;
private Cli cli;
private String host;
private String operatingSystem = CliFactory.OPERATING_SYSTEM_WINDOWS;
private String protocol = "telnet";
private int port = 23;
private String user;
private String password;
private String ftpServerHostName;
private String ftpUserName="aqua";
private String ftpPassword="aqua";
private boolean ascii ;
private Prompt[] ftpGeneralPrompts;
private Prompt[] ftpLoginPrompts;
private Prompt[] ftpPasswordPrompts;
private java.net.InetAddress localMachine;
private boolean promptOn = true;
/**
*/
public FTPRemoteClient(CliConnection cliConn,String ftpServerHostName) throws Exception{
cliConnection = cliConn;
setFtpServerHostName(ftpServerHostName);
}
/**
* Constructs a FTPRemoteClient for working on local machine as the remote machine.<br>
* Used for testing purposes.
*/
public FTPRemoteClient() throws Exception{
localMachine = java.net.InetAddress.getLocalHost();
setHost(localMachine.getHostName());
}
/**
* Constructs a FTPRemoteClient were remote machine is this machine.
* The FTPRemoteClient assumes Aqua's embedded FTP server is running on
* this machine.
*/
public FTPRemoteClient(String user,String password) throws Exception {
this();
setUser(user);
setPassword(password);
}
/**
* Constructs a FTPRemoteClient were remote machine is <code>host</code>.
* The FTPRemoteClient assumes Aqua's embedded FTP server is running on
* this machine.
*/
public FTPRemoteClient(String host,String telnetUser,String telnetPassword,String ftpServerHostName) throws Exception{
this(telnetUser,telnetPassword);
setHost(host);
setFtpServerHostName(ftpServerHostName);
}
/**
* Initializes {@link FTPRemoteClient} members and verifies that
* a telnet connection can be opened to the remote client and
* that the remote client can open a FTP connection to the server.<br>
* All connections are closed when initialization is done.
* @see SystemObjectImpl#init()
*/
public void init() throws Exception {
super.init();
initPrompts();
}
/**
* Closes connection to remote machine.
*/
public void closeFTPSession(){
closeFtp();
closeCli();
}
/**
* Copies a file from FTP server machine(in most cases it will be the local machine)
* to the remote client.<br>
* Source file path should be relative to FTP user home directory and not absolute
* file path.
* Destination can be either absolute destination path or relative to client's
* user directory.<br>
*/
public void copyFileFromLocalMachineToRemoteClient(String source, String destination) throws Exception {
StringBuffer stringbuffer = new StringBuffer("get ");
destination = adjustPath(destination);
stringbuffer.append(source);
stringbuffer.append(" ");
stringbuffer.append(destination);
copyFileViaFTP(stringbuffer.toString());
}
/**
* Copies all files from FTP server machine(in most cases it will be the local machine)
* to the remote client.<br>
*
* @param filesPath - String Array (String...) of full file path.<br>
* @throws Exception
*/
public void copyAllFilesFromLocalMachineToLocalRemote(String... filesPath) throws Exception{
copyAllFilesViaFTP("mget ", filesPath);
}
/**
* Copies a file from the remote client to FTP server machine(in most cases it will be
* the local machine)
*
* Source file path can be either absolute destination path or relative to client's
* user directory.
* Destination should be relative to FTP user home directory and not absolute
* file path.
*/
public void copyFileFromRemoteClientToLocalMachine(String source, String destination) throws Exception {
source = adjustPath(source);
StringBuffer stringbuffer = new StringBuffer("put ");
stringbuffer.append(source);
stringbuffer.append(" ");
stringbuffer.append(destination);
copyFileViaFTP(stringbuffer.toString());
}
/**
* Copies all files from remote client to FTP server machine(in most cases it will be
* the local machine).<br>
*
* @param filesPath - String Array (String...) of full file path.<br>
* @throws Exception
*/
public void copyAllFilesFromRemoteMachineToLocalMachine(String... filesPath) throws Exception{
copyAllFilesViaFTP("mput ", filesPath);
}
private void copyFileViaFTP(String command) throws Exception {
openFTPSession();
setAsciiMode(isAscii());
setPromptMode(isPromptOn());
runCliCommand(command);
}
private void copyAllFilesViaFTP(String command, String... filesPath) throws Exception {
StringBuffer stringBuffer = new StringBuffer(command);
openFTPSession();
setAsciiMode(isAscii());
setPromptMode(isPromptOn());
for(String currentFilePath : filesPath){
String source = adjustPath(currentFilePath);
stringBuffer.append(source);
stringBuffer.append(" ");
}
runCliCommand(stringBuffer.toString());
}
private void runCliCommand(String command) throws Exception{
cli.command(command , 1000 *60 * 5,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("226") < 0){
throw new Exception("Failed in files transfer");
}
}
/**
* Changes ftp session mode to passive
*/
public void passivate(boolean isPassive) throws Exception {
openFTPSession();
for (int i = 0; i < 2;i++){
cli.command("passive",1000*60,true,false,null,ftpGeneralPrompts);
String result = cli.getResult().toLowerCase();
boolean on = result.indexOf("on") >= 0;
boolean off = result.indexOf("off")>= 0;
boolean notSupported = result.indexOf("invalid")>= 0;
if (notSupported){
throw new Exception("Passivation not supported");
}
if ((isPassive && on) ||(!isPassive && off) ){
break;
}
}
}
/**
* Terminates FTPRemoteClient.
*/
public void close() {
closeFTPSession();
super.close();
}
/**
* Opens FTP session
*/
private void openFTPSession() throws Exception {
initCli();
ftpLogin();
}
/**
*/
private void initCli() throws Exception {
if (cli == null){
if (cliConnection != null){
initCliFromCliConnectionImpl();
return;
}
Prompt p = new Prompt();
p.setPrompt(">");
p.setCommandEnd(true);
cli =
CliFactory.createCli(getHost(),getOperatingSystem(), getProtocol(),getUser(),getPassword(),new Prompt[]{p});
}
}
private void initCliFromCliConnectionImpl() throws Exception{
if (!cliConnection.isConnected()){
cliConnection.connect();
}
cli = (Cli)ReflectionUtils.getField("cli", CliConnectionImpl.class).get(cliConnection);
}
/**
*/
private void closeFtp(){
try {
cli.command("bye", 1000 *2 ,true,false,null,new Prompt[]{new Prompt("bye.",true)});
if (cli.getResult().indexOf("221") < 0){
report.report("Did not find success code 221");
}
}catch (Exception e){
report.report("Could not find prompt after closing session. " + e.getMessage());
}
}
/**
*/
private void closeCli(){
if (cli != null){
try {
if (cliConnection != null){
closeCliConnectionImpl();
}
cli.close();
}catch (Exception e){
report.report("Failed closing telnet connection",e);
}
}
cli=null;
}
private void closeCliConnectionImpl() throws Exception{
if (cliConnection.isConnected()){
cliConnection.disconnect();
}
}
/**
* Starts FTP client and performs login.
*/
private void ftpLogin() throws Exception{
cli.command("");
String result = cli.getResult();
for (String ftpPrompt:promptsToStringArray(ftpGeneralPrompts)){
if (result.indexOf(ftpPrompt) >=0 ){
//we are already logged in
return;
}
}
String serverAddress = getFTPServerAddress();
cli.command("ftp " + serverAddress, 1000*60,true,false,null,ftpLoginPrompts);
if (cli.getResult().indexOf("220") < 0){
throw new Exception("Failed connecting to FTP server.("+serverAddress+"). Please verify that there is a ping between the remote client to the runner machine");
}
cli.command(getFtpUserName(),1000*60,true,false,null,ftpPasswordPrompts);
if (cli.getResult().indexOf("331") < 0){
throw new Exception("Failed in login process");
}
cli.command(getFtpPassword(),1000*60,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("230") < 0){
throw new Exception("User not authorized to login");
}
}
/**
* Changes ftp session mode (ascii/binary)
*/
private void setAsciiMode(boolean isAscii) throws Exception {
String command = "binary";
if (isAscii){
command="ascii";
}
cli.command(command,1000*60,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("200") < 0){
throw new Exception("Failed changing to binary mode");
}
}
/**
* Changes the FTP session mode ( on / off )
* @param promptOn
* @throws Exception
*/
private void setPromptMode(boolean promptOn) throws Exception{
String command = "prompt off";
if (promptOn){
command="prompt on";
}
cli.command(command,1000*60,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("Interactive") < 0){
throw new Exception("Failed changing prompt mode");
}
}
public boolean isPromptOn() {
return promptOn;
}
public void setPromptOn(boolean promptOn) {
this.promptOn = promptOn;
}
/**
* Adjusts file path to operating system.
*/
private String adjustPath(String path) {
if (CliFactory.OPERATING_SYSTEM_WINDOWS.equals(getOperatingSystem())){
String toReturn = FileUtils.convertToWindowsPath(path);
if (!toReturn.startsWith("\"")){
toReturn = "\""+toReturn+"\"";
}
return toReturn;
}else {
return FileUtils.replaceSeparator(path);
}
}
/**
*
*/
private void initPrompts() throws Exception {
String[] defaultFTPPrompts = new String[]{"ftp>"};
String[] defaultLoginPrompts = new String[]{"):"};
String[] defaultPasswordPrompts = new String[]{"for "+getFtpUserName(),"Password:"};
if (!new File(FILE_TRANSFER_PROPERTIES_FILE_NAME).exists()){
ftpGeneralPrompts = stringArrayToPrompts(defaultFTPPrompts);
ftpLoginPrompts = stringArrayToPrompts(defaultLoginPrompts);
ftpPasswordPrompts = stringArrayToPrompts(defaultPasswordPrompts);
return;
}
Properties props = new Properties();
FileInputStream stream = new FileInputStream(FILE_TRANSFER_PROPERTIES_FILE_NAME);
try {
props.load(stream);
}finally{
try{stream.close();}catch(Exception e){};
}
String ftpPrompts = props.getProperty(FTP_PROMPTS);
String[] ftpPromptsAsStringArray = StringUtils.split(ftpPrompts, ";, ");
ftpPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpPromptsAsStringArray,defaultFTPPrompts});
ftpGeneralPrompts = stringArrayToPrompts(ftpPromptsAsStringArray);
String _ftpLoginPrompts = props.getProperty(FTP_LOGIN_PROMPTS);
String[] ftpLoginPromptsAsStringArray = StringUtils.split(_ftpLoginPrompts, ";, ");
ftpLoginPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpLoginPromptsAsStringArray,defaultLoginPrompts});
ftpLoginPrompts = stringArrayToPrompts(ftpLoginPromptsAsStringArray);
String _ftpPasswordPrompts = props.getProperty(FTP_PASSWORD_PROMPTS);
String[] ftpPasswordPromptsAsStringArray = StringUtils.split(_ftpPasswordPrompts, ";, ");
ftpPasswordPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpPasswordPromptsAsStringArray,defaultPasswordPrompts});
ftpPasswordPrompts = stringArrayToPrompts(ftpPasswordPromptsAsStringArray);
}
private String[] promptsToStringArray(Prompt[] prompts){
if (prompts == null){
return new String[0];
}
String[] res = new String[prompts.length];
int i=0;
for (Prompt p:prompts){
res[i]=p.getPrompt();
i++;
}
return res;
}
private Prompt[] stringArrayToPrompts(String[] promptsAsString){
if (promptsAsString == null){
return new Prompt[0];
}
Prompt[] res = new Prompt[promptsAsString.length];
int i=0;
for (String s:promptsAsString){
res[i]=new Prompt(s,false);
res[i].setCommandEnd(true);
i++;
}
return res;
}
private String getFTPServerAddress(){
if (!StringUtils.isEmpty(getFtpServerHostName())){
return getFtpServerHostName();
}
if (!StringUtils.isEmpty(JSystemProperties.getInstance().getPreference(LOCAL_HOST_ADDRESS_PROPERTY))){
return JSystemProperties.getInstance().getPreference(LOCAL_HOST_ADDRESS_PROPERTY);
}
return localMachine.getHostName();
}
/**********************************************************************
* FTPRemoteClient setters and getters
*********************************************************************/
public String getHost() {
return host;
}
public String getOperatingSystem() {
return operatingSystem;
}
public void setOperatingSystem(String operatingSystem) {
this.operatingSystem = operatingSystem;
}
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
public void setHost(String remoteHost) {
this.host = remoteHost;
}
public String getPassword() {
return password;
}
public void setPassword(String telnetPassword) {
this.password = telnetPassword;
}
public int getPort() {
return port;
}
public void setPort(int telnetPort) {
this.port = telnetPort;
}
public String getUser() {
return user;
}
public void setUser(String telnetUser) {
this.user = telnetUser;
}
public String getFtpServerHostName() {
return ftpServerHostName;
}
public void setFtpServerHostName(String ftpServerHostName) {
this.ftpServerHostName = ftpServerHostName;
}
public String getFtpUserName() {
return ftpUserName;
}
public void setFtpUserName(String ftpUserName) {
this.ftpUserName = ftpUserName;
}
public String getFtpPassword() {
return ftpPassword;
}
public void setFtpPassword(String ftpPassword) {
this.ftpPassword = ftpPassword;
}
public boolean isAscii() {
return ascii;
}
public void setAscii(boolean ascii) {
this.ascii = ascii;
}
}<|fim▁end|> | * from the jsystem.properties file, if the property was set it will use it as server address
* otherwise, the system object uses java API to get local machine host name and uses it as server address.
*/ |
<|file_name|>test_edit_video.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from django.core.urlresolvers import reverse
from working_waterfronts.working_waterfronts_api.models import Video
from django.contrib.auth.models import User
class EditVideoTestCase(TestCase):
"""
Test that the Edit Video page works as expected.
Things tested:
URLs reverse correctly
The outputted page has the correct form fields
POSTing "correct" data will result in the update of the video
object with the specified ID
"""
fixtures = ['test_fixtures']
def setUp(self):
user = User.objects.create_user(
'temporary', '[email protected]', 'temporary')
user.save()
response = self.client.login(
username='temporary', password='temporary')
self.assertEqual(response, True)
def test_not_logged_in(self):
self.client.logout()
response = self.client.get(
reverse('edit-video', kwargs={'id': '1'}))
self.assertRedirects(response, '/login?next=/entry/videos/1')
def test_url_endpoint(self):
url = reverse('edit-video', kwargs={'id': '1'})
self.assertEqual(url, '/entry/videos/1')
def test_successful_video_update(self):
"""
POST a proper "update video" command to the server, and see if
the update appears in the database
"""
# Data that we'll post to the server to get the new video created
new_video = {
'caption': "A thrilling display of utmost might",
'name': "You won't believe number 3!",
'video': 'http://www.youtube.com/watch?v=dQw4w9WgXcQ'}
self.client.post(
reverse('edit-video', kwargs={'id': '1'}),
new_video)
video = Video.objects.get(id=1)
for field in new_video:
self.assertEqual(
getattr(video, field), new_video[field])
def test_form_fields(self):
"""
Tests to see if the form contains all of the right fields
"""
response = self.client.get(
reverse('edit-video', kwargs={'id': '1'}))
fields = {
'name': 'A Starship',
'caption': "Traveling at the speed of light!",
'video': 'http://www.youtube.com/watch?v=efgDdSWDg0g'
}
form = response.context['video_form']
for field in fields:<|fim▁hole|> Tests that DELETing entry/videos/<id> deletes the item
"""
response = self.client.delete(
reverse('edit-video', kwargs={'id': '2'}))
self.assertEqual(response.status_code, 200)
with self.assertRaises(Video.DoesNotExist):
Video.objects.get(id=2)
response = self.client.delete(
reverse('edit-video', kwargs={'id': '2'}))
self.assertEqual(response.status_code, 404)<|fim▁end|> | self.assertEqual(fields[field], form[field].value())
def test_delete_video(self):
""" |
<|file_name|>expr-match-unique.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>#![feature(box_syntax)]
// Tests for match as expressions resulting in boxed types
fn test_box() {
let res: Box<_> = match true { true => { box 100 }, _ => panic!() };
assert_eq!(*res, 100);
}
pub fn main() { test_box(); }<|fim▁end|> | // option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass |
<|file_name|>account_move_line.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sys
import time
from datetime import datetime
from operator import itemgetter
from lxml import etree
from openerp import netsvc
from openerp.osv import fields, osv, orm
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
from openerp import tools
class account_move_line(osv.osv):
_name = "account.move.line"
_description = "Journal Items"
def _query_get(self, cr, uid, obj='l', context=None):
fiscalyear_obj = self.pool.get('account.fiscalyear')
fiscalperiod_obj = self.pool.get('account.period')
account_obj = self.pool.get('account.account')
fiscalyear_ids = []
if context is None:
context = {}
initial_bal = context.get('initial_bal', False)
company_clause = " "
if context.get('company_id', False):
company_clause = " AND " +obj+".company_id = %s" % context.get('company_id', False)
if not context.get('fiscalyear', False):
if context.get('all_fiscalyear', False):
#this option is needed by the aged balance report because otherwise, if we search only the draft ones, an open invoice of a closed fiscalyear won't be displayed
fiscalyear_ids = fiscalyear_obj.search(cr, uid, [])
else:
fiscalyear_ids = fiscalyear_obj.search(cr, uid, [('state', '=', 'draft')])
else:
#for initial balance as well as for normal query, we check only the selected FY because the best practice is to generate the FY opening entries
fiscalyear_ids = [context['fiscalyear']]
fiscalyear_clause = (','.join([str(x) for x in fiscalyear_ids])) or '0'
state = context.get('state', False)
where_move_state = ''
where_move_lines_by_date = ''
if context.get('date_from', False) and context.get('date_to', False):
if initial_bal:
where_move_lines_by_date = " AND " +obj+".move_id IN (SELECT id FROM account_move WHERE date < '" +context['date_from']+"')"
else:
where_move_lines_by_date = " AND " +obj+".move_id IN (SELECT id FROM account_move WHERE date >= '" +context['date_from']+"' AND date <= '"+context['date_to']+"')"
if state:
if state.lower() not in ['all']:
where_move_state= " AND "+obj+".move_id IN (SELECT id FROM account_move WHERE account_move.state = '"+state+"')"
if context.get('period_from', False) and context.get('period_to', False) and not context.get('periods', False):
if initial_bal:
period_company_id = fiscalperiod_obj.browse(cr, uid, context['period_from'], context=context).company_id.id
first_period = fiscalperiod_obj.search(cr, uid, [('company_id', '=', period_company_id)], order='date_start', limit=1)[0]
context['periods'] = fiscalperiod_obj.build_ctx_periods(cr, uid, first_period, context['period_from'])
else:
context['periods'] = fiscalperiod_obj.build_ctx_periods(cr, uid, context['period_from'], context['period_to'])
if context.get('periods', False):
if initial_bal:
query = obj+".state <> 'draft' AND "+obj+".period_id IN (SELECT id FROM account_period WHERE fiscalyear_id IN (%s)) %s %s" % (fiscalyear_clause, where_move_state, where_move_lines_by_date)
period_ids = fiscalperiod_obj.search(cr, uid, [('id', 'in', context['periods'])], order='date_start', limit=1)
if period_ids and period_ids[0]:
first_period = fiscalperiod_obj.browse(cr, uid, period_ids[0], context=context)
ids = ','.join([str(x) for x in context['periods']])
query = obj+".state <> 'draft' AND "+obj+".period_id IN (SELECT id FROM account_period WHERE fiscalyear_id IN (%s) AND date_start <= '%s' AND id NOT IN (%s)) %s %s" % (fiscalyear_clause, first_period.date_start, ids, where_move_state, where_move_lines_by_date)
else:
ids = ','.join([str(x) for x in context['periods']])
query = obj+".state <> 'draft' AND "+obj+".period_id IN (SELECT id FROM account_period WHERE fiscalyear_id IN (%s) AND id IN (%s)) %s %s" % (fiscalyear_clause, ids, where_move_state, where_move_lines_by_date)
else:
query = obj+".state <> 'draft' AND "+obj+".period_id IN (SELECT id FROM account_period WHERE fiscalyear_id IN (%s)) %s %s" % (fiscalyear_clause, where_move_state, where_move_lines_by_date)
if initial_bal and not context.get('periods', False) and not where_move_lines_by_date:
#we didn't pass any filter in the context, and the initial balance can't be computed using only the fiscalyear otherwise entries will be summed twice
#so we have to invalidate this query
raise osv.except_osv(_('Warning!'),_("You have not supplied enough arguments to compute the initial balance, please select a period and a journal in the context."))
if context.get('journal_ids', False):
query += ' AND '+obj+'.journal_id IN (%s)' % ','.join(map(str, context['journal_ids']))
if context.get('chart_account_id', False):
child_ids = account_obj._get_children_and_consol(cr, uid, [context['chart_account_id']], context=context)
query += ' AND '+obj+'.account_id IN (%s)' % ','.join(map(str, child_ids))
query += company_clause
return query
def _amount_residual(self, cr, uid, ids, field_names, args, context=None):
"""
This function returns the residual amount on a receivable or payable account.move.line.
By default, it returns an amount in the currency of this journal entry (maybe different
of the company currency), but if you pass 'residual_in_company_currency' = True in the
context then the returned amount will be in company currency.
"""
res = {}
if context is None:
context = {}
cur_obj = self.pool.get('res.currency')
for move_line in self.browse(cr, uid, ids, context=context):
res[move_line.id] = {
'amount_residual': 0.0,
'amount_residual_currency': 0.0,
}
if move_line.reconcile_id:
continue
if not move_line.account_id.type in ('payable', 'receivable'):
#this function does not suport to be used on move lines not related to payable or receivable accounts
continue
if move_line.currency_id:
move_line_total = move_line.amount_currency
sign = move_line.amount_currency < 0 and -1 or 1
else:
move_line_total = move_line.debit - move_line.credit
sign = (move_line.debit - move_line.credit) < 0 and -1 or 1
line_total_in_company_currency = move_line.debit - move_line.credit
context_unreconciled = context.copy()
if move_line.reconcile_partial_id:
for payment_line in move_line.reconcile_partial_id.line_partial_ids:
if payment_line.id == move_line.id:
continue
if payment_line.currency_id and move_line.currency_id and payment_line.currency_id.id == move_line.currency_id.id:
move_line_total += payment_line.amount_currency
else:
if move_line.currency_id:
context_unreconciled.update({'date': payment_line.date})
amount_in_foreign_currency = cur_obj.compute(cr, uid, move_line.company_id.currency_id.id, move_line.currency_id.id, (payment_line.debit - payment_line.credit), round=False, context=context_unreconciled)
move_line_total += amount_in_foreign_currency
else:
move_line_total += (payment_line.debit - payment_line.credit)
line_total_in_company_currency += (payment_line.debit - payment_line.credit)
result = move_line_total
res[move_line.id]['amount_residual_currency'] = sign * (move_line.currency_id and self.pool.get('res.currency').round(cr, uid, move_line.currency_id, result) or result)
res[move_line.id]['amount_residual'] = sign * line_total_in_company_currency
return res
def default_get(self, cr, uid, fields, context=None):
data = self._default_get(cr, uid, fields, context=context)
for f in data.keys():
if f not in fields:
del data[f]
return data
def _prepare_analytic_line(self, cr, uid, obj_line, context=None):
"""
Prepare the values given at the create() of account.analytic.line upon the validation of a journal item having
an analytic account. This method is intended to be extended in other modules.
:param obj_line: browse record of the account.move.line that triggered the analytic line creation
"""
return {'name': obj_line.name,
'date': obj_line.date,
'account_id': obj_line.analytic_account_id.id,
'unit_amount': obj_line.quantity,
'product_id': obj_line.product_id and obj_line.product_id.id or False,
'product_uom_id': obj_line.product_uom_id and obj_line.product_uom_id.id or False,
'amount': (obj_line.credit or 0.0) - (obj_line.debit or 0.0),
'general_account_id': obj_line.account_id.id,
'journal_id': obj_line.journal_id.analytic_journal_id.id,
'ref': obj_line.ref,
'move_id': obj_line.id,
'user_id': uid,
}
def create_analytic_lines(self, cr, uid, ids, context=None):
acc_ana_line_obj = self.pool.get('account.analytic.line')
for obj_line in self.browse(cr, uid, ids, context=context):
if obj_line.analytic_account_id:
if not obj_line.journal_id.analytic_journal_id:
raise osv.except_osv(_('No Analytic Journal !'),_("You have to define an analytic journal on the '%s' journal!") % (obj_line.journal_id.name, ))
vals_line = self._prepare_analytic_line(cr, uid, obj_line, context=context)
acc_ana_line_obj.create(cr, uid, vals_line)
return True
def _default_get_move_form_hook(self, cursor, user, data):
'''Called in the end of default_get method for manual entry in account_move form'''
if data.has_key('analytic_account_id'):
del(data['analytic_account_id'])
if data.has_key('account_tax_id'):
del(data['account_tax_id'])
return data
def convert_to_period(self, cr, uid, context=None):
if context is None:
context = {}
period_obj = self.pool.get('account.period')
#check if the period_id changed in the context from client side
if context.get('period_id', False):
period_id = context.get('period_id')
if type(period_id) == str:
ids = period_obj.search(cr, uid, [('name', 'ilike', period_id)])
context.update({
'period_id': ids and ids[0] or False
})
return context
def _default_get(self, cr, uid, fields, context=None):
#default_get should only do the following:
# -propose the next amount in debit/credit in order to balance the move
# -propose the next account from the journal (default debit/credit account) accordingly
if context is None:
context = {}
account_obj = self.pool.get('account.account')
period_obj = self.pool.get('account.period')
journal_obj = self.pool.get('account.journal')
move_obj = self.pool.get('account.move')
tax_obj = self.pool.get('account.tax')
fiscal_pos_obj = self.pool.get('account.fiscal.position')
partner_obj = self.pool.get('res.partner')
currency_obj = self.pool.get('res.currency')
if not context.get('journal_id', False):
context['journal_id'] = context.get('search_default_journal_id', False)
if not context.get('period_id', False):
context['period_id'] = context.get('search_default_period_id', False)
context = self.convert_to_period(cr, uid, context)
# Compute simple values
data = super(account_move_line, self).default_get(cr, uid, fields, context=context)
if context.get('journal_id'):
total = 0.0
#in account.move form view, it is not possible to compute total debit and credit using
#a browse record. So we must use the context to pass the whole one2many field and compute the total
if context.get('line_id'):
for move_line_dict in move_obj.resolve_2many_commands(cr, uid, 'line_id', context.get('line_id'), context=context):
data['name'] = data.get('name') or move_line_dict.get('name')
data['partner_id'] = data.get('partner_id') or move_line_dict.get('partner_id')
total += move_line_dict.get('debit', 0.0) - move_line_dict.get('credit', 0.0)
elif context.get('period_id'):
#find the date and the ID of the last unbalanced account.move encoded by the current user in that journal and period
move_id = False
cr.execute('''SELECT move_id, date FROM account_move_line
WHERE journal_id = %s AND period_id = %s AND create_uid = %s AND state = %s
ORDER BY id DESC limit 1''', (context['journal_id'], context['period_id'], uid, 'draft'))
res = cr.fetchone()
move_id = res and res[0] or False
data['date'] = res and res[1] or period_obj.browse(cr, uid, context['period_id'], context=context).date_start
data['move_id'] = move_id
if move_id:
#if there exist some unbalanced accounting entries that match the journal and the period,
#we propose to continue the same move by copying the ref, the name, the partner...
move = move_obj.browse(cr, uid, move_id, context=context)
data.setdefault('name', move.line_id[-1].name)
for l in move.line_id:
data['partner_id'] = data.get('partner_id') or l.partner_id.id
data['ref'] = data.get('ref') or l.ref
total += (l.debit or 0.0) - (l.credit or 0.0)
#compute the total of current move
data['debit'] = total < 0 and -total or 0.0
data['credit'] = total > 0 and total or 0.0
#pick the good account on the journal accordingly if the next proposed line will be a debit or a credit
journal_data = journal_obj.browse(cr, uid, context['journal_id'], context=context)
account = total > 0 and journal_data.default_credit_account_id or journal_data.default_debit_account_id
#map the account using the fiscal position of the partner, if needed
part = data.get('partner_id') and partner_obj.browse(cr, uid, data['partner_id'], context=context) or False
if account and data.get('partner_id'):
account = fiscal_pos_obj.map_account(cr, uid, part and part.property_account_position or False, account.id)
account = account_obj.browse(cr, uid, account, context=context)
data['account_id'] = account and account.id or False
#compute the amount in secondary currency of the account, if needed
if account and account.currency_id:
data['currency_id'] = account.currency_id.id
#set the context for the multi currency change
compute_ctx = context.copy()
compute_ctx.update({
#the following 2 parameters are used to choose the currency rate, in case where the account
#doesn't work with an outgoing currency rate method 'at date' but 'average'
'res.currency.compute.account': account,
'res.currency.compute.account_invert': True,
})
if data.get('date'):
compute_ctx.update({'date': data['date']})
data['amount_currency'] = currency_obj.compute(cr, uid, account.company_id.currency_id.id, data['currency_id'], -total, context=compute_ctx)
data = self._default_get_move_form_hook(cr, uid, data)
return data
def on_create_write(self, cr, uid, id, context=None):
if not id:
return []
ml = self.browse(cr, uid, id, context=context)
return map(lambda x: x.id, ml.move_id.line_id)
def _balance(self, cr, uid, ids, name, arg, context=None):
if context is None:
context = {}
c = context.copy()
c['initital_bal'] = True
sql = """SELECT l2.id, SUM(l1.debit-l1.credit)
FROM account_move_line l1, account_move_line l2
WHERE l2.account_id = l1.account_id
AND l1.id <= l2.id
AND l2.id IN %s AND """ + \
self._query_get(cr, uid, obj='l1', context=c) + \
" GROUP BY l2.id"
cr.execute(sql, [tuple(ids)])
return dict(cr.fetchall())
def _invoice(self, cursor, user, ids, name, arg, context=None):
invoice_obj = self.pool.get('account.invoice')
res = {}
for line_id in ids:
res[line_id] = False
cursor.execute('SELECT l.id, i.id ' \
'FROM account_move_line l, account_invoice i ' \
'WHERE l.move_id = i.move_id ' \
'AND l.id IN %s',
(tuple(ids),))
invoice_ids = []
for line_id, invoice_id in cursor.fetchall():
res[line_id] = invoice_id
invoice_ids.append(invoice_id)
invoice_names = {False: ''}
for invoice_id, name in invoice_obj.name_get(cursor, user, invoice_ids, context=context):
invoice_names[invoice_id] = name
for line_id in res.keys():
invoice_id = res[line_id]
res[line_id] = (invoice_id, invoice_names[invoice_id])
return res
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
result = []
for line in self.browse(cr, uid, ids, context=context):
if line.ref:
result.append((line.id, (line.move_id.name or '')+' ('+line.ref+')'))
else:
result.append((line.id, line.move_id.name))
return result
def _balance_search(self, cursor, user, obj, name, args, domain=None, context=None):
if context is None:
context = {}
if not args:
return []
where = ' AND '.join(map(lambda x: '(abs(sum(debit-credit))'+x[1]+str(x[2])+')',args))
cursor.execute('SELECT id, SUM(debit-credit) FROM account_move_line \
GROUP BY id, debit, credit having '+where)
res = cursor.fetchall()
if not res:
return [('id', '=', '0')]
return [('id', 'in', [x[0] for x in res])]
def _invoice_search(self, cursor, user, obj, name, args, context=None):
if not args:
return []
invoice_obj = self.pool.get('account.invoice')
i = 0
while i < len(args):
fargs = args[i][0].split('.', 1)
if len(fargs) > 1:
args[i] = (fargs[0], 'in', invoice_obj.search(cursor, user,
[(fargs[1], args[i][1], args[i][2])]))
i += 1
continue
if isinstance(args[i][2], basestring):
res_ids = invoice_obj.name_search(cursor, user, args[i][2], [],
args[i][1])
args[i] = (args[i][0], 'in', [x[0] for x in res_ids])
i += 1
qu1, qu2 = [], []
for x in args:
if x[1] != 'in':
if (x[2] is False) and (x[1] == '='):
qu1.append('(i.id IS NULL)')
elif (x[2] is False) and (x[1] == '<>' or x[1] == '!='):
qu1.append('(i.id IS NOT NULL)')
else:
qu1.append('(i.id %s %s)' % (x[1], '%s'))
qu2.append(x[2])
elif x[1] == 'in':
if len(x[2]) > 0:
qu1.append('(i.id IN (%s))' % (','.join(['%s'] * len(x[2]))))
qu2 += x[2]
else:
qu1.append(' (False)')
if qu1:
qu1 = ' AND' + ' AND'.join(qu1)
else:
qu1 = ''
cursor.execute('SELECT l.id ' \
'FROM account_move_line l, account_invoice i ' \
'WHERE l.move_id = i.move_id ' + qu1, qu2)
res = cursor.fetchall()
if not res:
return [('id', '=', '0')]
return [('id', 'in', [x[0] for x in res])]
def _get_move_lines(self, cr, uid, ids, context=None):
result = []
for move in self.pool.get('account.move').browse(cr, uid, ids, context=context):
for line in move.line_id:
result.append(line.id)
return result
def _get_reconcile(self, cr, uid, ids,name, unknow_none, context=None):
res = dict.fromkeys(ids, False)
for line in self.browse(cr, uid, ids, context=context):
if line.reconcile_id:
res[line.id] = str(line.reconcile_id.name)
elif line.reconcile_partial_id:
res[line.id] = str(line.reconcile_partial_id.name)
return res
_columns = {
'name': fields.char('Name', size=64, required=True),
'quantity': fields.float('Quantity', digits=(16,2), help="The optional quantity expressed by this line, eg: number of product sold. The quantity is not a legal requirement but is very useful for some reports."),
'product_uom_id': fields.many2one('product.uom', 'Unit of Measure'),
'product_id': fields.many2one('product.product', 'Product'),
'debit': fields.float('Debit', digits_compute=dp.get_precision('Account')),
'credit': fields.float('Credit', digits_compute=dp.get_precision('Account')),
'account_id': fields.many2one('account.account', 'Account', required=True, ondelete="cascade", domain=[('type','<>','view'), ('type', '<>', 'closed')], select=2),
'move_id': fields.many2one('account.move', 'Journal Entry', ondelete="cascade", help="The move of this entry line.", select=2, required=True),
'narration': fields.related('move_id','narration', type='text', relation='account.move', string='Internal Note'),
'ref': fields.related('move_id', 'ref', string='Reference', type='char', size=64, store=True),
'statement_id': fields.many2one('account.bank.statement', 'Statement', help="The bank statement used for bank reconciliation", select=1),
'reconcile_id': fields.many2one('account.move.reconcile', 'Reconcile', readonly=True, ondelete='set null', select=2),
'reconcile_partial_id': fields.many2one('account.move.reconcile', 'Partial Reconcile', readonly=True, ondelete='set null', select=2),
'reconcile': fields.function(_get_reconcile, type='char', string='Reconcile Ref'),
'amount_currency': fields.float('Amount Currency', help="The amount expressed in an optional other currency if it is a multi-currency entry.", digits_compute=dp.get_precision('Account')),
'amount_residual_currency': fields.function(_amount_residual, string='Residual Amount in Currency', multi="residual", help="The residual amount on a receivable or payable of a journal entry expressed in its currency (maybe different of the company currency)."),
'amount_residual': fields.function(_amount_residual, string='Residual Amount', multi="residual", help="The residual amount on a receivable or payable of a journal entry expressed in the company currency."),
'currency_id': fields.many2one('res.currency', 'Currency', help="The optional other currency if it is a multi-currency entry."),
'journal_id': fields.related('move_id', 'journal_id', string='Journal', type='many2one', relation='account.journal', required=True, select=True,
store = {
'account.move': (_get_move_lines, ['journal_id'], 20)
}),
'period_id': fields.related('move_id', 'period_id', string='Period', type='many2one', relation='account.period', required=True, select=True,
store = {
'account.move': (_get_move_lines, ['period_id'], 20)
}),
'blocked': fields.boolean('No Follow-up', help="You can check this box to mark this journal item as a litigation with the associated partner"),
'partner_id': fields.many2one('res.partner', 'Partner', select=1, ondelete='restrict'),
'date_maturity': fields.date('Due date', select=True ,help="This field is used for payable and receivable journal entries. You can put the limit date for the payment of this line."),
'date': fields.related('move_id','date', string='Effective date', type='date', required=True, select=True,
store = {
'account.move': (_get_move_lines, ['date'], 20)
}),
'date_created': fields.date('Creation date', select=True),
'analytic_lines': fields.one2many('account.analytic.line', 'move_id', 'Analytic lines'),
'centralisation': fields.selection([('normal','Normal'),('credit','Credit Centralisation'),('debit','Debit Centralisation'),('currency','Currency Adjustment')], 'Centralisation', size=8),
'balance': fields.function(_balance, fnct_search=_balance_search, string='Balance'),
'state': fields.selection([('draft','Unbalanced'), ('valid','Balanced')], 'Status', readonly=True),
'tax_code_id': fields.many2one('account.tax.code', 'Tax Account', help="The Account can either be a base tax code or a tax code account."),
'tax_amount': fields.float('Tax/Base Amount', digits_compute=dp.get_precision('Account'), select=True, help="If the Tax account is a tax code account, this field will contain the taxed amount.If the tax account is base tax code, "\
"this field will contain the basic amount(without tax)."),
'invoice': fields.function(_invoice, string='Invoice',
type='many2one', relation='account.invoice', fnct_search=_invoice_search),
'account_tax_id':fields.many2one('account.tax', 'Tax'),
'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account'),
'company_id': fields.related('account_id', 'company_id', type='many2one', relation='res.company',
string='Company', store=True, readonly=True)
}
def _get_date(self, cr, uid, context=None):
if context is None:
context or {}
period_obj = self.pool.get('account.period')
dt = time.strftime('%Y-%m-%d')
if context.get('journal_id') and context.get('period_id'):
cr.execute('SELECT date FROM account_move_line ' \
'WHERE journal_id = %s AND period_id = %s ' \
'ORDER BY id DESC limit 1',
(context['journal_id'], context['period_id']))
res = cr.fetchone()
if res:
dt = res[0]
else:
period = period_obj.browse(cr, uid, context['period_id'], context=context)
dt = period.date_start
return dt
def _get_currency(self, cr, uid, context=None):
if context is None:
context = {}
if not context.get('journal_id', False):
return False
cur = self.pool.get('account.journal').browse(cr, uid, context['journal_id']).currency
return cur and cur.id or False
def _get_period(self, cr, uid, context=None):
"""
Return default account period value
"""
context = context or {}
if context.get('period_id', False):
return context['period_id']
account_period_obj = self.pool.get('account.period')
ids = account_period_obj.find(cr, uid, context=context)
period_id = False
if ids:
period_id = ids[0]
return period_id
def _get_journal(self, cr, uid, context=None):
"""
Return journal based on the journal type
"""
context = context or {}
if context.get('journal_id', False):
return context['journal_id']
journal_id = False
journal_pool = self.pool.get('account.journal')
if context.get('journal_type', False):
jids = journal_pool.search(cr, uid, [('type','=', context.get('journal_type'))])
if not jids:
raise osv.except_osv(_('Configuration Error!'), _('Cannot find any account journal of %s type for this company.\n\nYou can create one in the menu: \nConfiguration/Journals/Journals.') % context.get('journal_type'))
journal_id = jids[0]
return journal_id
_defaults = {
'blocked': False,
'centralisation': 'normal',
'date': _get_date,
'date_created': fields.date.context_today,
'state': 'draft',
'currency_id': _get_currency,
'journal_id': _get_journal,
'credit': 0.0,
'debit': 0.0,
'amount_currency': 0.0,
'account_id': lambda self, cr, uid, c: c.get('account_id', False),
'period_id': _get_period,
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.move.line', context=c)
}
_order = "date desc, id desc"
_sql_constraints = [
('credit_debit1', 'CHECK (credit*debit=0)', 'Wrong credit or debit value in accounting entry !'),
('credit_debit2', 'CHECK (credit+debit>=0)', 'Wrong credit or debit value in accounting entry !'),
]
def _auto_init(self, cr, context=None):
super(account_move_line, self)._auto_init(cr, context=context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'account_move_line_journal_id_period_id_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX account_move_line_journal_id_period_id_index ON account_move_line (journal_id, period_id)')
def _check_no_view(self, cr, uid, ids, context=None):
lines = self.browse(cr, uid, ids, context=context)
for l in lines:
if l.account_id.type == 'view':
return False
return True
def _check_no_closed(self, cr, uid, ids, context=None):
lines = self.browse(cr, uid, ids, context=context)
for l in lines:
if l.account_id.type == 'closed':
raise osv.except_osv(_('Error!'), _('You cannot create journal items on a closed account %s %s.') % (l.account_id.code, l.account_id.name))
return True
def _check_company_id(self, cr, uid, ids, context=None):
lines = self.browse(cr, uid, ids, context=context)
for l in lines:
if l.company_id != l.account_id.company_id or l.company_id != l.period_id.company_id:
return False
return True
def _check_date(self, cr, uid, ids, context=None):
for l in self.browse(cr, uid, ids, context=context):
if l.journal_id.allow_date:
if not time.strptime(l.date[:10],'%Y-%m-%d') >= time.strptime(l.period_id.date_start, '%Y-%m-%d') or not time.strptime(l.date[:10], '%Y-%m-%d') <= time.strptime(l.period_id.date_stop, '%Y-%m-%d'):
return False
return True
def _check_currency(self, cr, uid, ids, context=None):
for l in self.browse(cr, uid, ids, context=context):
if l.account_id.currency_id:
if not l.currency_id or not l.currency_id.id == l.account_id.currency_id.id:
return False
return True
def _check_currency_and_amount(self, cr, uid, ids, context=None):
for l in self.browse(cr, uid, ids, context=context):
if (l.amount_currency and not l.currency_id):
return False
return True
def _check_currency_amount(self, cr, uid, ids, context=None):
for l in self.browse(cr, uid, ids, context=context):
if l.amount_currency:
if (l.amount_currency > 0.0 and l.credit > 0.0) or (l.amount_currency < 0.0 and l.debit > 0.0):
return False
return True
def _check_currency_company(self, cr, uid, ids, context=None):
for l in self.browse(cr, uid, ids, context=context):
if l.currency_id.id == l.company_id.currency_id.id:
return False
return True
_constraints = [
(_check_no_view, 'You cannot create journal items on an account of type view.', ['account_id']),
(_check_no_closed, 'You cannot create journal items on closed account.', ['account_id']),
(_check_company_id, 'Account and Period must belong to the same company.', ['company_id']),
(_check_date, 'The date of your Journal Entry is not in the defined period! You should change the date or remove this constraint from the journal.', ['date']),
(_check_currency, 'The selected account of your Journal Entry forces to provide a secondary currency. You should remove the secondary currency on the account or select a multi-currency view on the journal.', ['currency_id']),
(_check_currency_and_amount, "You cannot create journal items with a secondary currency without recording both 'currency' and 'amount currency' field.", ['currency_id','amount_currency']),
(_check_currency_amount, 'The amount expressed in the secondary currency must be positif when journal item are debit and negatif when journal item are credit.', ['amount_currency']),
(_check_currency_company, "You cannot provide a secondary currency if it is the same than the company one." , ['currency_id']),
]
#TODO: ONCHANGE_ACCOUNT_ID: set account_tax_id
def onchange_currency(self, cr, uid, ids, account_id, amount, currency_id, date=False, journal=False, context=None):
if context is None:
context = {}
account_obj = self.pool.get('account.account')
journal_obj = self.pool.get('account.journal')
currency_obj = self.pool.get('res.currency')
if (not currency_id) or (not account_id):
return {}
result = {}
acc = account_obj.browse(cr, uid, account_id, context=context)
if (amount>0) and journal:
x = journal_obj.browse(cr, uid, journal).default_credit_account_id
if x: acc = x
context.update({
'date': date,
'res.currency.compute.account': acc,
})
v = currency_obj.compute(cr, uid, currency_id, acc.company_id.currency_id.id, amount, context=context)
result['value'] = {
'debit': v > 0 and v or 0.0,
'credit': v < 0 and -v or 0.0
}
return result
def onchange_account_id(self, cr, uid, ids, account_id, context=None):
res = {'value': {}}
if account_id:
res['value']['account_tax_id'] = [x.id for x in self.pool.get('account.account').browse(cr, uid, account_id, context=context).tax_ids]
return res
def onchange_partner_id(self, cr, uid, ids, move_id, partner_id, account_id=None, debit=0, credit=0, date=False, journal=False):
partner_obj = self.pool.get('res.partner')
payment_term_obj = self.pool.get('account.payment.term')
journal_obj = self.pool.get('account.journal')
fiscal_pos_obj = self.pool.get('account.fiscal.position')
val = {}
val['date_maturity'] = False
if not partner_id:
return {'value':val}
if not date:
date = datetime.now().strftime('%Y-%m-%d')
jt = False
if journal:
jt = journal_obj.browse(cr, uid, journal).type
part = partner_obj.browse(cr, uid, partner_id)
payment_term_id = False
if jt and jt in ('purchase', 'purchase_refund') and part.property_supplier_payment_term:
payment_term_id = part.property_supplier_payment_term.id
elif jt and part.property_payment_term:
payment_term_id = part.property_payment_term.id
if payment_term_id:
res = payment_term_obj.compute(cr, uid, payment_term_id, 100, date)
if res:
val['date_maturity'] = res[0][0]
if not account_id:
id1 = part.property_account_payable.id
id2 = part.property_account_receivable.id
if jt:
if jt in ('sale', 'purchase_refund'):
val['account_id'] = fiscal_pos_obj.map_account(cr, uid, part and part.property_account_position or False, id2)
elif jt in ('purchase', 'sale_refund'):
val['account_id'] = fiscal_pos_obj.map_account(cr, uid, part and part.property_account_position or False, id1)
elif jt in ('general', 'bank', 'cash'):
if part.customer:
val['account_id'] = fiscal_pos_obj.map_account(cr, uid, part and part.property_account_position or False, id2)
elif part.supplier:
val['account_id'] = fiscal_pos_obj.map_account(cr, uid, part and part.property_account_position or False, id1)
if val.get('account_id', False):
d = self.onchange_account_id(cr, uid, ids, val['account_id'])
val.update(d['value'])
return {'value':val}
def onchange_account_id(self, cr, uid, ids, account_id=False, partner_id=False):
account_obj = self.pool.get('account.account')
partner_obj = self.pool.get('res.partner')
fiscal_pos_obj = self.pool.get('account.fiscal.position')
val = {}
if account_id:
res = account_obj.browse(cr, uid, account_id)
tax_ids = res.tax_ids
if tax_ids and partner_id:<|fim▁hole|> else:
tax_id = tax_ids and tax_ids[0].id or False
val['account_tax_id'] = tax_id
return {'value': val}
#
# type: the type if reconciliation (no logic behind this field, for info)
#
# writeoff; entry generated for the difference between the lines
#
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
if context is None:
context = {}
if context and context.get('next_partner_only', False):
if not context.get('partner_id', False):
partner = self.list_partners_to_reconcile(cr, uid, context=context)
if partner:
partner = partner[0]
else:
partner = context.get('partner_id', False)
if not partner:
return []
args.append(('partner_id', '=', partner[0]))
return super(account_move_line, self).search(cr, uid, args, offset, limit, order, context, count)
def list_partners_to_reconcile(self, cr, uid, context=None):
cr.execute(
"""
SELECT partner_id
FROM (
SELECT l.partner_id, p.last_reconciliation_date, SUM(l.debit) AS debit, SUM(l.credit) AS credit
FROM account_move_line l
RIGHT JOIN account_account a ON (a.id = l.account_id)
RIGHT JOIN res_partner p ON (l.partner_id = p.id)
WHERE a.reconcile IS TRUE
AND l.reconcile_id IS NULL
AND (p.last_reconciliation_date IS NULL OR l.date > p.last_reconciliation_date)
AND l.state <> 'draft'
GROUP BY l.partner_id, p.last_reconciliation_date
) AS s
WHERE debit > 0 AND credit > 0
ORDER BY last_reconciliation_date""")
ids = cr.fetchall()
ids = len(ids) and [x[0] for x in ids] or []
return self.pool.get('res.partner').name_get(cr, uid, ids, context=context)
def reconcile_partial(self, cr, uid, ids, type='auto', context=None, writeoff_acc_id=False, writeoff_period_id=False, writeoff_journal_id=False):
move_rec_obj = self.pool.get('account.move.reconcile')
merges = []
unmerge = []
total = 0.0
merges_rec = []
company_list = []
if context is None:
context = {}
for line in self.browse(cr, uid, ids, context=context):
if company_list and not line.company_id.id in company_list:
raise osv.except_osv(_('Warning!'), _('To reconcile the entries company should be the same for all entries.'))
company_list.append(line.company_id.id)
for line in self.browse(cr, uid, ids, context=context):
if line.account_id.currency_id:
currency_id = line.account_id.currency_id
else:
currency_id = line.company_id.currency_id
if line.reconcile_id:
raise osv.except_osv(_('Warning'), _("Journal Item '%s' (id: %s), Move '%s' is already reconciled!") % (line.name, line.id, line.move_id.name))
if line.reconcile_partial_id:
for line2 in line.reconcile_partial_id.line_partial_ids:
if not line2.reconcile_id:
if line2.id not in merges:
merges.append(line2.id)
if line2.account_id.currency_id:
total += line2.amount_currency
else:
total += (line2.debit or 0.0) - (line2.credit or 0.0)
merges_rec.append(line.reconcile_partial_id.id)
else:
unmerge.append(line.id)
if line.account_id.currency_id:
total += line.amount_currency
else:
total += (line.debit or 0.0) - (line.credit or 0.0)
if self.pool.get('res.currency').is_zero(cr, uid, currency_id, total):
res = self.reconcile(cr, uid, merges+unmerge, context=context, writeoff_acc_id=writeoff_acc_id, writeoff_period_id=writeoff_period_id, writeoff_journal_id=writeoff_journal_id)
return res
r_id = move_rec_obj.create(cr, uid, {
'type': type,
'line_partial_ids': map(lambda x: (4,x,False), merges+unmerge)
})
move_rec_obj.reconcile_partial_check(cr, uid, [r_id] + merges_rec, context=context)
return True
def reconcile(self, cr, uid, ids, type='auto', writeoff_acc_id=False, writeoff_period_id=False, writeoff_journal_id=False, context=None):
account_obj = self.pool.get('account.account')
move_obj = self.pool.get('account.move')
move_rec_obj = self.pool.get('account.move.reconcile')
partner_obj = self.pool.get('res.partner')
currency_obj = self.pool.get('res.currency')
lines = self.browse(cr, uid, ids, context=context)
unrec_lines = filter(lambda x: not x['reconcile_id'], lines)
credit = debit = 0.0
currency = 0.0
account_id = False
partner_id = False
if context is None:
context = {}
company_list = []
for line in self.browse(cr, uid, ids, context=context):
if company_list and not line.company_id.id in company_list:
raise osv.except_osv(_('Warning!'), _('To reconcile the entries company should be the same for all entries.'))
company_list.append(line.company_id.id)
for line in unrec_lines:
if line.state <> 'valid':
raise osv.except_osv(_('Error!'),
_('Entry "%s" is not valid !') % line.name)
credit += line['credit']
debit += line['debit']
currency += line['amount_currency'] or 0.0
account_id = line['account_id']['id']
partner_id = (line['partner_id'] and line['partner_id']['id']) or False
writeoff = debit - credit
# Ifdate_p in context => take this date
if context.has_key('date_p') and context['date_p']:
date=context['date_p']
else:
date = time.strftime('%Y-%m-%d')
cr.execute('SELECT account_id, reconcile_id '\
'FROM account_move_line '\
'WHERE id IN %s '\
'GROUP BY account_id,reconcile_id',
(tuple(ids), ))
r = cr.fetchall()
#TODO: move this check to a constraint in the account_move_reconcile object
if not unrec_lines:
raise osv.except_osv(_('Error!'), _('Entry is already reconciled.'))
account = account_obj.browse(cr, uid, account_id, context=context)
if r[0][1] != None:
raise osv.except_osv(_('Error!'), _('Some entries are already reconciled.'))
if context.get('fy_closing'):
# We don't want to generate any write-off when being called from the
# wizard used to close a fiscal year (and it doesn't give us any
# writeoff_acc_id).
pass
elif (not currency_obj.is_zero(cr, uid, account.company_id.currency_id, writeoff)) or \
(account.currency_id and (not currency_obj.is_zero(cr, uid, account.currency_id, currency))):
if not writeoff_acc_id:
raise osv.except_osv(_('Warning!'), _('You have to provide an account for the write off/exchange difference entry.'))
if writeoff > 0:
debit = writeoff
credit = 0.0
self_credit = writeoff
self_debit = 0.0
else:
debit = 0.0
credit = -writeoff
self_credit = 0.0
self_debit = -writeoff
# If comment exist in context, take it
if 'comment' in context and context['comment']:
libelle = context['comment']
else:
libelle = _('Write-Off')
cur_obj = self.pool.get('res.currency')
cur_id = False
amount_currency_writeoff = 0.0
if context.get('company_currency_id',False) != context.get('currency_id',False):
cur_id = context.get('currency_id',False)
for line in unrec_lines:
if line.currency_id and line.currency_id.id == context.get('currency_id',False):
amount_currency_writeoff += line.amount_currency
else:
tmp_amount = cur_obj.compute(cr, uid, line.account_id.company_id.currency_id.id, context.get('currency_id',False), abs(line.debit-line.credit), context={'date': line.date})
amount_currency_writeoff += (line.debit > 0) and tmp_amount or -tmp_amount
writeoff_lines = [
(0, 0, {
'name': libelle,
'debit': self_debit,
'credit': self_credit,
'account_id': account_id,
'date': date,
'partner_id': partner_id,
'currency_id': cur_id or (account.currency_id.id or False),
'amount_currency': amount_currency_writeoff and -1 * amount_currency_writeoff or (account.currency_id.id and -1 * currency or 0.0)
}),
(0, 0, {
'name': libelle,
'debit': debit,
'credit': credit,
'account_id': writeoff_acc_id,
'analytic_account_id': context.get('analytic_id', False),
'date': date,
'partner_id': partner_id,
'currency_id': cur_id or (account.currency_id.id or False),
'amount_currency': amount_currency_writeoff and amount_currency_writeoff or (account.currency_id.id and currency or 0.0)
})
]
writeoff_move_id = move_obj.create(cr, uid, {
'period_id': writeoff_period_id,
'journal_id': writeoff_journal_id,
'date':date,
'state': 'draft',
'line_id': writeoff_lines
})
writeoff_line_ids = self.search(cr, uid, [('move_id', '=', writeoff_move_id), ('account_id', '=', account_id)])
if account_id == writeoff_acc_id:
writeoff_line_ids = [writeoff_line_ids[1]]
ids += writeoff_line_ids
r_id = move_rec_obj.create(cr, uid, {
'type': type,
'line_id': map(lambda x: (4, x, False), ids),
'line_partial_ids': map(lambda x: (3, x, False), ids)
})
wf_service = netsvc.LocalService("workflow")
# the id of the move.reconcile is written in the move.line (self) by the create method above
# because of the way the line_id are defined: (4, x, False)
for id in ids:
wf_service.trg_trigger(uid, 'account.move.line', id, cr)
if lines and lines[0]:
partner_id = lines[0].partner_id and lines[0].partner_id.id or False
if partner_id and not partner_obj.has_something_to_reconcile(cr, uid, partner_id, context=context):
partner_obj.mark_as_reconciled(cr, uid, [partner_id], context=context)
return r_id
def view_header_get(self, cr, user, view_id, view_type, context=None):
if context is None:
context = {}
context = self.convert_to_period(cr, user, context=context)
if context.get('account_id', False):
cr.execute('SELECT code FROM account_account WHERE id = %s', (context['account_id'], ))
res = cr.fetchone()
if res:
res = _('Entries: ')+ (res[0] or '')
return res
if (not context.get('journal_id', False)) or (not context.get('period_id', False)):
return False
if context.get('search_default_journal_id', False):
context['journal_id'] = context.get('search_default_journal_id')
cr.execute('SELECT code FROM account_journal WHERE id = %s', (context['journal_id'], ))
j = cr.fetchone()[0] or ''
cr.execute('SELECT code FROM account_period WHERE id = %s', (context['period_id'], ))
p = cr.fetchone()[0] or ''
if j or p:
return j + (p and (':' + p) or '')
return False
def onchange_date(self, cr, user, ids, date, context=None):
"""
Returns a dict that contains new values and context
@param cr: A database cursor
@param user: ID of the user currently logged in
@param date: latest value from user input for field date
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
res = {}
if context is None:
context = {}
period_pool = self.pool.get('account.period')
ctx = dict(context, account_period_prefer_normal=True)
pids = period_pool.find(cr, user, date, context=ctx)
if pids:
res.update({
'period_id':pids[0]
})
context.update({
'period_id':pids[0]
})
return {
'value':res,
'context':context,
}
def _check_moves(self, cr, uid, context=None):
# use the first move ever created for this journal and period
if context is None:
context = {}
cr.execute('SELECT id, state, name FROM account_move WHERE journal_id = %s AND period_id = %s ORDER BY id limit 1', (context['journal_id'],context['period_id']))
res = cr.fetchone()
if res:
if res[1] != 'draft':
raise osv.except_osv(_('User Error!'),
_('The account move (%s) for centralisation ' \
'has been confirmed.') % res[2])
return res
def _remove_move_reconcile(self, cr, uid, move_ids=None, opening_reconciliation=False, context=None):
# Function remove move rencocile ids related with moves
obj_move_line = self.pool.get('account.move.line')
obj_move_rec = self.pool.get('account.move.reconcile')
unlink_ids = []
if not move_ids:
return True
recs = obj_move_line.read(cr, uid, move_ids, ['reconcile_id', 'reconcile_partial_id'])
full_recs = filter(lambda x: x['reconcile_id'], recs)
rec_ids = [rec['reconcile_id'][0] for rec in full_recs]
part_recs = filter(lambda x: x['reconcile_partial_id'], recs)
part_rec_ids = [rec['reconcile_partial_id'][0] for rec in part_recs]
unlink_ids += rec_ids
unlink_ids += part_rec_ids
if unlink_ids:
if opening_reconciliation:
obj_move_rec.write(cr, uid, unlink_ids, {'opening_reconciliation': False})
obj_move_rec.unlink(cr, uid, unlink_ids)
return True
def unlink(self, cr, uid, ids, context=None, check=True):
if context is None:
context = {}
move_obj = self.pool.get('account.move')
self._update_check(cr, uid, ids, context)
result = False
move_ids = set()
for line in self.browse(cr, uid, ids, context=context):
move_ids.add(line.move_id.id)
context['journal_id'] = line.journal_id.id
context['period_id'] = line.period_id.id
result = super(account_move_line, self).unlink(cr, uid, [line.id], context=context)
move_ids = list(move_ids)
if check and move_ids:
move_obj.validate(cr, uid, move_ids, context=context)
return result
def write(self, cr, uid, ids, vals, context=None, check=True, update_check=True):
if context is None:
context={}
move_obj = self.pool.get('account.move')
account_obj = self.pool.get('account.account')
journal_obj = self.pool.get('account.journal')
if isinstance(ids, (int, long)):
ids = [ids]
if vals.get('account_tax_id', False):
raise osv.except_osv(_('Unable to change tax!'), _('You cannot change the tax, you should remove and recreate lines.'))
if ('account_id' in vals) and not account_obj.read(cr, uid, vals['account_id'], ['active'])['active']:
raise osv.except_osv(_('Bad Account!'), _('You cannot use an inactive account.'))
if update_check:
if ('account_id' in vals) or ('journal_id' in vals) or ('period_id' in vals) or ('move_id' in vals) or ('debit' in vals) or ('credit' in vals) or ('date' in vals):
self._update_check(cr, uid, ids, context)
todo_date = None
if vals.get('date', False):
todo_date = vals['date']
del vals['date']
for line in self.browse(cr, uid, ids, context=context):
ctx = context.copy()
if ('journal_id' not in ctx):
if line.move_id:
ctx['journal_id'] = line.move_id.journal_id.id
else:
ctx['journal_id'] = line.journal_id.id
if ('period_id' not in ctx):
if line.move_id:
ctx['period_id'] = line.move_id.period_id.id
else:
ctx['period_id'] = line.period_id.id
#Check for centralisation
journal = journal_obj.browse(cr, uid, ctx['journal_id'], context=ctx)
if journal.centralisation:
self._check_moves(cr, uid, context=ctx)
result = super(account_move_line, self).write(cr, uid, ids, vals, context)
if check:
done = []
for line in self.browse(cr, uid, ids):
if line.move_id.id not in done:
done.append(line.move_id.id)
move_obj.validate(cr, uid, [line.move_id.id], context)
if todo_date:
move_obj.write(cr, uid, [line.move_id.id], {'date': todo_date}, context=context)
return result
def _update_journal_check(self, cr, uid, journal_id, period_id, context=None):
journal_obj = self.pool.get('account.journal')
period_obj = self.pool.get('account.period')
jour_period_obj = self.pool.get('account.journal.period')
cr.execute('SELECT state FROM account_journal_period WHERE journal_id = %s AND period_id = %s', (journal_id, period_id))
result = cr.fetchall()
journal = journal_obj.browse(cr, uid, journal_id, context=context)
period = period_obj.browse(cr, uid, period_id, context=context)
for (state,) in result:
if state == 'done':
raise osv.except_osv(_('Error !'), _('You can not add/modify entries in a closed period %s of journal %s.' % (period.name,journal.name)))
if not result:
jour_period_obj.create(cr, uid, {
'name': (journal.code or journal.name)+':'+(period.name or ''),
'journal_id': journal.id,
'period_id': period.id
})
return True
def _update_check(self, cr, uid, ids, context=None):
done = {}
for line in self.browse(cr, uid, ids, context=context):
err_msg = _('Move name (id): %s (%s)') % (line.move_id.name, str(line.move_id.id))
if line.move_id.state <> 'draft' and (not line.journal_id.entry_posted):
raise osv.except_osv(_('Error!'), _('You cannot do this modification on a confirmed entry. You can just change some non legal fields or you must unconfirm the journal entry first.\n%s.') % err_msg)
if line.reconcile_id:
raise osv.except_osv(_('Error!'), _('You cannot do this modification on a reconciled entry. You can just change some non legal fields or you must unreconcile first.\n%s.') % err_msg)
t = (line.journal_id.id, line.period_id.id)
if t not in done:
self._update_journal_check(cr, uid, line.journal_id.id, line.period_id.id, context)
done[t] = True
return True
def create(self, cr, uid, vals, context=None, check=True):
account_obj = self.pool.get('account.account')
tax_obj = self.pool.get('account.tax')
move_obj = self.pool.get('account.move')
cur_obj = self.pool.get('res.currency')
journal_obj = self.pool.get('account.journal')
if context is None:
context = {}
if vals.get('move_id', False):
move = self.pool.get('account.move').browse(cr, uid, vals['move_id'], context=context)
if move.company_id:
vals['company_id'] = move.company_id.id
if move.date and not vals.get('date'):
vals['date'] = move.date
if ('account_id' in vals) and not account_obj.read(cr, uid, vals['account_id'], ['active'])['active']:
raise osv.except_osv(_('Bad Account!'), _('You cannot use an inactive account.'))
if 'journal_id' in vals and vals['journal_id']:
context['journal_id'] = vals['journal_id']
if 'period_id' in vals and vals['period_id']:
context['period_id'] = vals['period_id']
if ('journal_id' not in context) and ('move_id' in vals) and vals['move_id']:
m = move_obj.browse(cr, uid, vals['move_id'])
context['journal_id'] = m.journal_id.id
context['period_id'] = m.period_id.id
#we need to treat the case where a value is given in the context for period_id as a string
if 'period_id' in context and not isinstance(context.get('period_id', ''), (int, long)):
period_candidate_ids = self.pool.get('account.period').name_search(cr, uid, name=context.get('period_id',''))
if len(period_candidate_ids) != 1:
raise osv.except_osv(_('Error!'), _('No period found or more than one period found for the given date.'))
context['period_id'] = period_candidate_ids[0][0]
if not context.get('journal_id', False) and context.get('search_default_journal_id', False):
context['journal_id'] = context.get('search_default_journal_id')
self._update_journal_check(cr, uid, context['journal_id'], context['period_id'], context)
move_id = vals.get('move_id', False)
journal = journal_obj.browse(cr, uid, context['journal_id'], context=context)
vals['journal_id'] = vals.get('journal_id') or context.get('journal_id')
vals['period_id'] = vals.get('period_id') or context.get('period_id')
vals['date'] = vals.get('date') or context.get('date')
if not move_id:
if journal.centralisation:
#Check for centralisation
res = self._check_moves(cr, uid, context)
if res:
vals['move_id'] = res[0]
if not vals.get('move_id', False):
if journal.sequence_id:
#name = self.pool.get('ir.sequence').next_by_id(cr, uid, journal.sequence_id.id)
v = {
'date': vals.get('date', time.strftime('%Y-%m-%d')),
'period_id': context['period_id'],
'journal_id': context['journal_id']
}
if vals.get('ref', ''):
v.update({'ref': vals['ref']})
move_id = move_obj.create(cr, uid, v, context)
vals['move_id'] = move_id
else:
raise osv.except_osv(_('No piece number !'), _('Cannot create an automatic sequence for this piece.\nPut a sequence in the journal definition for automatic numbering or create a sequence manually for this piece.'))
ok = not (journal.type_control_ids or journal.account_control_ids)
if ('account_id' in vals):
account = account_obj.browse(cr, uid, vals['account_id'], context=context)
if journal.type_control_ids:
type = account.user_type
for t in journal.type_control_ids:
if type.code == t.code:
ok = True
break
if journal.account_control_ids and not ok:
for a in journal.account_control_ids:
if a.id == vals['account_id']:
ok = True
break
# Automatically convert in the account's secondary currency if there is one and
# the provided values were not already multi-currency
if account.currency_id and (vals.get('amount_currency', False) is False) and account.currency_id.id != account.company_id.currency_id.id:
vals['currency_id'] = account.currency_id.id
ctx = {}
if 'date' in vals:
ctx['date'] = vals['date']
vals['amount_currency'] = cur_obj.compute(cr, uid, account.company_id.currency_id.id,
account.currency_id.id, vals.get('debit', 0.0)-vals.get('credit', 0.0), context=ctx)
if not ok:
raise osv.except_osv(_('Bad Account!'), _('You cannot use this general account in this journal, check the tab \'Entry Controls\' on the related journal.'))
if vals.get('analytic_account_id',False):
if journal.analytic_journal_id:
vals['analytic_lines'] = [(0,0, {
'name': vals['name'],
'date': vals.get('date', time.strftime('%Y-%m-%d')),
'account_id': vals.get('analytic_account_id', False),
'unit_amount': vals.get('quantity', 1.0),
'amount': vals.get('debit', 0.0) or vals.get('credit', 0.0),
'general_account_id': vals.get('account_id', False),
'journal_id': journal.analytic_journal_id.id,
'ref': vals.get('ref', False),
'user_id': uid
})]
result = super(account_move_line, self).create(cr, uid, vals, context=context)
# CREATE Taxes
if vals.get('account_tax_id', False):
tax_id = tax_obj.browse(cr, uid, vals['account_tax_id'])
total = vals['debit'] - vals['credit']
if journal.type in ('purchase_refund', 'sale_refund'):
base_code = 'ref_base_code_id'
tax_code = 'ref_tax_code_id'
account_id = 'account_paid_id'
base_sign = 'ref_base_sign'
tax_sign = 'ref_tax_sign'
else:
base_code = 'base_code_id'
tax_code = 'tax_code_id'
account_id = 'account_collected_id'
base_sign = 'base_sign'
tax_sign = 'tax_sign'
tmp_cnt = 0
for tax in tax_obj.compute_all(cr, uid, [tax_id], total, 1.00, force_excluded=True).get('taxes'):
#create the base movement
if tmp_cnt == 0:
if tax[base_code]:
tmp_cnt += 1
self.write(cr, uid,[result], {
'tax_code_id': tax[base_code],
'tax_amount': tax[base_sign] * abs(total)
})
else:
data = {
'move_id': vals['move_id'],
'name': tools.ustr(vals['name'] or '') + ' ' + tools.ustr(tax['name'] or ''),
'date': vals['date'],
'partner_id': vals.get('partner_id',False),
'ref': vals.get('ref',False),
'account_tax_id': False,
'tax_code_id': tax[base_code],
'tax_amount': tax[base_sign] * abs(total),
'account_id': vals['account_id'],
'credit': 0.0,
'debit': 0.0,
}
if data['tax_code_id']:
self.create(cr, uid, data, context)
#create the Tax movement
data = {
'move_id': vals['move_id'],
'name': tools.ustr(vals['name'] or '') + ' ' + tools.ustr(tax['name'] or ''),
'date': vals['date'],
'partner_id': vals.get('partner_id',False),
'ref': vals.get('ref',False),
'account_tax_id': False,
'tax_code_id': tax[tax_code],
'tax_amount': tax[tax_sign] * abs(tax['amount']),
'account_id': tax[account_id] or vals['account_id'],
'credit': tax['amount']<0 and -tax['amount'] or 0.0,
'debit': tax['amount']>0 and tax['amount'] or 0.0,
}
if data['tax_code_id']:
self.create(cr, uid, data, context)
del vals['account_tax_id']
if check and ((not context.get('no_store_function')) or journal.entry_posted):
tmp = move_obj.validate(cr, uid, [vals['move_id']], context)
if journal.entry_posted and tmp:
move_obj.button_validate(cr,uid, [vals['move_id']], context)
return result
def list_periods(self, cr, uid, context=None):
ids = self.pool.get('account.period').search(cr,uid,[])
return self.pool.get('account.period').name_get(cr, uid, ids, context=context)
def list_journals(self, cr, uid, context=None):
ng = dict(self.pool.get('account.journal').name_search(cr,uid,'',[]))
ids = ng.keys()
result = []
for journal in self.pool.get('account.journal').browse(cr, uid, ids, context=context):
result.append((journal.id,ng[journal.id],journal.type,
bool(journal.currency),bool(journal.analytic_journal_id)))
return result
account_move_line()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | part = partner_obj.browse(cr, uid, partner_id)
tax_id = fiscal_pos_obj.map_tax(cr, uid, part and part.property_account_position or False, tax_ids)[0] |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Support for Peewee ORM (https://github.com/coleifer/peewee)."""
from __future__ import annotations
import typing as t
import marshmallow as ma
import muffin
import peewee as pw
from apispec.ext.marshmallow import MarshmallowPlugin
from marshmallow_peewee import ForeignKey, ModelSchema
from muffin.typing import JSONType
from peewee_aio import Manager, Model
from muffin_rest.errors import APIError
from muffin_rest.handler import RESTBase, RESTOptions
from muffin_rest.peewee.filters import PWFilters
from muffin_rest.peewee.openapi import PeeweeOpenAPIMixin
from muffin_rest.peewee.sorting import PWSorting
# XXX: Patch apispec.MarshmallowPlugin to support ForeignKeyField
MarshmallowPlugin.Converter.field_mapping[ForeignKey] = ("integer", None)
class PWRESTOptions(RESTOptions):
"""Support Peewee."""
model: t.Type[pw.Model]
model_pk: t.Optional[pw.Field] = None
manager: Manager
# Base filters class
filters_cls: t.Type[PWFilters] = PWFilters
# Base sorting class
sorting_cls: t.Type[PWSorting] = PWSorting
Schema: t.Type[ModelSchema]
# Schema auto generation params
schema_base: t.Type[ModelSchema] = ModelSchema
# Recursive delete
delete_recursive = False
base_property: str = "model"
def setup(self, cls):
"""Prepare meta options."""
self.name = self.name or self.model._meta.table_name.lower()
self.model_pk = self.model_pk or self.model._meta.primary_key
manager = getattr(self, "manager", getattr(self.model, "_manager", None))
if manager is None:
raise RuntimeError("Peewee-AIO ORM Manager is not available")
self.manager = manager
super().setup(cls)
def setup_schema_meta(self, _):
"""Prepare a schema."""
return type(
"Meta",
(object,),
dict(
{"unknown": self.schema_unknown, "model": self.model},
**self.schema_meta,
),
)
class PWRESTBase(RESTBase):
"""Support Peeweee."""
collection: pw.Query
resource: pw.Model
meta: PWRESTOptions
meta_class: t.Type[PWRESTOptions] = PWRESTOptions
async def prepare_collection(self, _: muffin.Request) -> pw.Query:
"""Initialize Peeewee QuerySet for a binded to the resource model."""
return self.meta.model.select()
async def prepare_resource(self, request: muffin.Request) -> t.Optional[pw.Model]:
"""Load a resource."""
pk = request["path_params"].get(self.meta.name_id)
if not pk:
return None
meta = self.meta
resource = await meta.manager.fetchone(
self.collection.where(meta.model_pk == pk)
)
if resource is None:
raise APIError.NOT_FOUND("Resource not found")
return resource
async def paginate(
self, _: muffin.Request, *, limit: int = 0, offset: int = 0
) -> t.Tuple[pw.Query, int]:
"""Paginate the collection."""
cqs: pw.Select = self.collection.order_by() # type: ignore
if cqs._group_by:
cqs._returning = cqs._group_by
count = await self.meta.manager.count(cqs)
return self.collection.offset(offset).limit(limit), count # type: ignore
async def get(self, request, *, resource=None) -> JSONType:
"""Get resource or collection of resources."""
if resource is not None and resource != "":
return await self.dump(request, resource, many=False)
resources = await self.meta.manager.fetchall(self.collection)
return await self.dump(request, resources, many=True)
async def save(self, _: muffin.Request, resource: pw.Model) -> pw.Model:
"""Save the given resource."""
meta = self.meta
if issubclass(meta.model, Model):
await resource.save()
else:
await meta.manager.save(resource)
return resource
async def remove(self, request: muffin.Request, *, resource: pw.Model = None):
"""Remove the given resource."""
meta = self.meta
if resource:
resources = [resource]
<|fim▁hole|> data = await request.data()
if not data:
return
model_pk = t.cast(pw.Field, meta.model_pk)
resources = await meta.manager.fetchall(
self.collection.where(model_pk << data)
)
if not resources:
raise APIError.NOT_FOUND()
delete_instance = meta.manager.delete_instance
if issubclass(meta.model, Model):
delete_instance = lambda m: m.delete_instance(recursive=meta.delete_recursive) # type: ignore # noqa
for res in resources:
await delete_instance(res)
delete = remove # noqa
async def get_schema(
self, request: muffin.Request, resource=None, **_
) -> ma.Schema:
"""Initialize marshmallow schema for serialization/deserialization."""
return self.meta.Schema(
instance=resource,
only=request.url.query.get("schema_only"),
exclude=request.url.query.get("schema_exclude", ()),
)
class PWRESTHandler(PWRESTBase, PeeweeOpenAPIMixin): # type: ignore
"""Support peewee."""
pass<|fim▁end|> | else: |
<|file_name|>linked_tap.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import numpy as np
from bokeh.client import push_session
from bokeh.io import curdoc
from bokeh.models import (ColumnDataSource, DataRange1d, Plot, Circle, WidgetBox,
Row, Button, TapTool)
N = 9
x = np.linspace(-2, 2, N)
y = x**2
source1 = ColumnDataSource(dict(x = x, y = y, radius = [0.1]*N))
xdr1 = DataRange1d()
ydr1 = DataRange1d()
plot1 = Plot(x_range=xdr1, y_range=ydr1, plot_width=400, plot_height=400)
plot1.title.text = "Plot1"
plot1.tools.append(TapTool(plot=plot1))
plot1.add_glyph(source1, Circle(x="x", y="y", radius="radius", fill_color="red"))
source2 = ColumnDataSource(dict(x = x, y = y, color = ["blue"]*N))
xdr2 = DataRange1d()<|fim▁hole|>plot2.add_glyph(source2, Circle(x="x", y="y", radius=0.1, fill_color="color"))
def on_selection_change1(attr, _, inds):
color = ["blue"]*N
if inds['1d']['indices']:
indices = inds['1d']['indices']
for i in indices:
color[i] = "red"
source2.data["color"] = color
source1.on_change('selected', on_selection_change1)
def on_selection_change2(attr, _, inds):
inds = inds['1d']['indices']
if inds:
[index] = inds
radius = [0.1]*N
radius[index] = 0.2
else:
radius = [0.1]*N
source1.data["radius"] = radius
source2.on_change('selected', on_selection_change2)
reset = Button(label="Reset")
def on_reset_click():
source1.selected = {
'0d': {'flag': False, 'indices': []},
'1d': {'indices': []},
'2d': {'indices': {}}
}
source2.selected = {
'0d': {'flag': False, 'indices': []},
'1d': {'indices': []},
'2d': {'indices': {}}
}
reset.on_click(on_reset_click)
widgetBox = WidgetBox(children=[reset], width=150)
row = Row(children=[widgetBox, plot1, plot2])
document = curdoc()
document.add_root(row)
if __name__ == "__main__":
print("\npress ctrl-C to exit")
session = push_session(document)
session.show()
session.loop_until_closed()<|fim▁end|> | ydr2 = DataRange1d()
plot2 = Plot(x_range=xdr2, y_range=ydr2, plot_width=400, plot_height=400)
plot2.title.text = "Plot2"
plot2.tools.append(TapTool(plot=plot2)) |
<|file_name|>favoritesPages_script.js<|end_file_name|><|fim▁begin|>Template.favoritesPages.onCreated(function () {
var template = this;
template.subscribe('FavoritesPages');
});
Template.favoritesPages.helpers({
favoritesPages: function () {
if (Meteor.userId()) {
return Pages.find(
{ $or: [{ isPublic: true }, { owners: Meteor.userId() }, { users: Meteor.userId() }], favorites: Meteor.userId() },
{ sort: { updatedAt: -1 } }
);
}
}<|fim▁hole|>
Template.favoritesPages.onRendered(function () {
});<|fim▁end|> | });
Template.favoritesPages.events({
}); |
<|file_name|>ItemIdKeyProvider.java<|end_file_name|><|fim▁begin|>package it.niedermann.owncloud.notes.main.items.selection;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.recyclerview.selection.ItemKeyProvider;
import androidx.recyclerview.widget.RecyclerView;
import static androidx.recyclerview.widget.RecyclerView.NO_POSITION;
public class ItemIdKeyProvider extends ItemKeyProvider<Long> {
private final RecyclerView recyclerView;<|fim▁hole|> }
@Nullable
@Override
public Long getKey(int position) {
final RecyclerView.Adapter<?> adapter = recyclerView.getAdapter();
if (adapter == null) {
throw new IllegalStateException("RecyclerView adapter is not set!");
}
return adapter.getItemId(position);
}
@Override
public int getPosition(@NonNull Long key) {
final RecyclerView.ViewHolder viewHolder = recyclerView.findViewHolderForItemId(key);
return viewHolder == null ? NO_POSITION : viewHolder.getLayoutPosition();
}
}<|fim▁end|> |
public ItemIdKeyProvider(RecyclerView recyclerView) {
super(SCOPE_MAPPED);
this.recyclerView = recyclerView; |
<|file_name|>dateandtime.py<|end_file_name|><|fim▁begin|>#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2019, Ilya Etingof <[email protected]>
# License: http://snmplabs.com/pyasn1/license.html
#
import time
from datetime import datetime
from sys import version_info
__all__ = ['strptime']
if version_info[:2] <= (2, 4):
def strptime(text, dateFormat):
return datetime(*(time.strptime(text, dateFormat)[0:6]))
<|fim▁hole|>else:
def strptime(text, dateFormat):
return datetime.strptime(text, dateFormat)<|fim▁end|> | |
<|file_name|>wavesurfer.js<|end_file_name|><|fim▁begin|>(function (root, factory) {
if (root === undefined && window !== undefined) root = window;
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module unless amdModuleId is set
define('wavesurfer', [], function () {
return (root['WaveSurfer'] = factory());
});
} else if (typeof module === 'object' && module.exports) {
// Node. Does not work with strict CommonJS, but
// only CommonJS-like environments that support module.exports,
// like Node.
module.exports = factory();
} else {
root['WaveSurfer'] = factory();
}
}(this, function () {
'use strict';
var WaveSurfer = {
defaultParams: {
audioContext : null,
audioRate : 1,
autoCenter : true,
backend : 'WebAudio',
barHeight : 1,
closeAudioContext: false,
container : null,
cursorColor : '#333',
cursorWidth : 1,
dragSelection : true,
fillParent : true,
forceDecode : false,
height : 128,
hideScrollbar : false,
interact : true,
loopSelection : true,
mediaContainer: null,
mediaControls : false,
mediaType : 'audio',
minPxPerSec : 20,
partialRender : false,
pixelRatio : window.devicePixelRatio || screen.deviceXDPI / screen.logicalXDPI,
progressColor : '#555',
normalize : false,
renderer : 'MultiCanvas',
scrollParent : false,
skipLength : 2,
splitChannels : false,
waveColor : '#999',
},
init: function (params) {
// Extract relevant parameters (or defaults)
this.params = WaveSurfer.util.extend({}, this.defaultParams, params);
this.container = 'string' == typeof params.container ?
document.querySelector(this.params.container) :
this.params.container;
if (!this.container) {
throw new Error('Container element not found');
}
if (this.params.mediaContainer == null) {
this.mediaContainer = this.container;
} else if (typeof this.params.mediaContainer == 'string') {
this.mediaContainer = document.querySelector(this.params.mediaContainer);
} else {
this.mediaContainer = this.params.mediaContainer;
}
if (!this.mediaContainer) {
throw new Error('Media Container element not found');
}
// Used to save the current volume when muting so we can
// restore once unmuted
this.savedVolume = 0;
// The current muted state
this.isMuted = false;
// Will hold a list of event descriptors that need to be
// cancelled on subsequent loads of audio
this.tmpEvents = [];
// Holds any running audio downloads
this.currentAjax = null;
this.createDrawer();
this.createBackend();
this.createPeakCache();
this.isDestroyed = false;
},
createDrawer: function () {
var my = this;
this.drawer = Object.create(WaveSurfer.Drawer[this.params.renderer]);
this.drawer.init(this.container, this.params);
this.drawer.on('redraw', function () {
my.drawBuffer();
my.drawer.progress(my.backend.getPlayedPercents());
});
// Click-to-seek
this.drawer.on('click', function (e, progress) {
setTimeout(function () {
my.seekTo(progress);
}, 0);
});
// Relay the scroll event from the drawer
this.drawer.on('scroll', function (e) {
if (my.params.partialRender) {
my.drawBuffer();
}
my.fireEvent('scroll', e);
});
},
createBackend: function () {
var my = this;
if (this.backend) {
this.backend.destroy();
}
// Back compat
if (this.params.backend == 'AudioElement') {
this.params.backend = 'MediaElement';
}
if (this.params.backend == 'WebAudio' && !WaveSurfer.WebAudio.supportsWebAudio()) {
this.params.backend = 'MediaElement';
}
this.backend = Object.create(WaveSurfer[this.params.backend]);
this.backend.init(this.params);
this.backend.on('finish', function () { my.fireEvent('finish'); });
this.backend.on('play', function () { my.fireEvent('play'); });
this.backend.on('pause', function () { my.fireEvent('pause'); });
this.backend.on('audioprocess', function (time) {
my.drawer.progress(my.backend.getPlayedPercents());
my.fireEvent('audioprocess', time);
});
},
createPeakCache: function() {
if (this.params.partialRender) {
this.peakCache = Object.create(WaveSurfer.PeakCache);
this.peakCache.init();
}
},
getDuration: function () {
return this.backend.getDuration();
},
getCurrentTime: function () {
return this.backend.getCurrentTime();
},
play: function (start, end) {
this.fireEvent('interaction', this.play.bind(this, start, end));
this.backend.play(start, end);
},
pause: function () {
this.backend.isPaused() || this.backend.pause();
},
playPause: function () {
this.backend.isPaused() ? this.play() : this.pause();
},
isPlaying: function () {
return !this.backend.isPaused();
},
skipBackward: function (seconds) {
this.skip(-seconds || -this.params.skipLength);
},
skipForward: function (seconds) {
this.skip(seconds || this.params.skipLength);
},
skip: function (offset) {
var position = this.getCurrentTime() || 0;
var duration = this.getDuration() || 1;
position = Math.max(0, Math.min(duration, position + (offset || 0)));
this.seekAndCenter(position / duration);
},
seekAndCenter: function (progress) {
this.seekTo(progress);
this.drawer.recenter(progress);
},
seekTo: function (progress) {
this.fireEvent('interaction', this.seekTo.bind(this, progress));
var paused = this.backend.isPaused();
// avoid draw wrong position while playing backward seeking
if (!paused) {
this.backend.pause();
}
// avoid small scrolls while paused seeking
var oldScrollParent = this.params.scrollParent;
this.params.scrollParent = false;
this.backend.seekTo(progress * this.getDuration());
this.drawer.progress(this.backend.getPlayedPercents());
if (!paused) {
this.backend.play();
}
this.params.scrollParent = oldScrollParent;
this.fireEvent('seek', progress);
},
stop: function () {
this.pause();
this.seekTo(0);
this.drawer.progress(0);
},
/**
* Set the playback volume.
*
* @param {Number} newVolume A value between 0 and 1, 0 being no
* volume and 1 being full volume.
*/
setVolume: function (newVolume) {
this.backend.setVolume(newVolume);
},
/**
* Get the playback volume.
*/
getVolume: function () {
return this.backend.getVolume();
},
/**
* Set the playback rate.
*
* @param {Number} rate A positive number. E.g. 0.5 means half the
* normal speed, 2 means double speed and so on.
*/
setPlaybackRate: function (rate) {
this.backend.setPlaybackRate(rate);
},
/**
* Get the playback rate.
*/
getPlaybackRate: function () {
return this.backend.getPlaybackRate();
},
/**
* Toggle the volume on and off. It not currenly muted it will
* save the current volume value and turn the volume off.
* If currently muted then it will restore the volume to the saved
* value, and then rest the saved value.
*/
toggleMute: function () {
this.setMute(!this.isMuted);
},
setMute: function (mute) {
// ignore all muting requests if the audio is already in that state
if (mute === this.isMuted) {
return;
}
if (mute) {
// If currently not muted then save current volume,
// turn off the volume and update the mute properties
this.savedVolume = this.backend.getVolume();
this.backend.setVolume(0);
this.isMuted = true;
} else {
// If currently muted then restore to the saved volume
// and update the mute properties
this.backend.setVolume(this.savedVolume);
this.isMuted = false;
}
},
/**
* Get the current mute status.
*/
getMute: function () {
return this.isMuted;
},
/**
* Get the list of current set filters as an array.
*
* Filters must be set with setFilters method first
*/
getFilters: function() {
return this.backend.filters || [];
},
toggleScroll: function () {
this.params.scrollParent = !this.params.scrollParent;
this.drawBuffer();
},
toggleInteraction: function () {
this.params.interact = !this.params.interact;
},
drawBuffer: function () {
var nominalWidth = Math.round(
this.getDuration() * this.params.minPxPerSec * this.params.pixelRatio
);
var parentWidth = this.drawer.getWidth();
var width = nominalWidth;
var start = this.drawer.getScrollX();
var end = Math.min(start + parentWidth, width);
// Fill container
if (this.params.fillParent && (!this.params.scrollParent || nominalWidth < parentWidth)) {
width = parentWidth;
start = 0;
end = width;
}
if (this.params.partialRender) {
var newRanges = this.peakCache.addRangeToPeakCache(width, start, end);
for (var i = 0; i < newRanges.length; i++) {
var peaks = this.backend.getPeaks(width, newRanges[i][0], newRanges[i][1]);
this.drawer.drawPeaks(peaks, width, newRanges[i][0], newRanges[i][1]);
}
} else {
start = 0;
end = width;
var peaks = this.backend.getPeaks(width, start, end);
this.drawer.drawPeaks(peaks, width, start, end);
}
this.fireEvent('redraw', peaks, width);
},
zoom: function (pxPerSec) {
this.params.minPxPerSec = pxPerSec;
this.params.scrollParent = true;
this.drawBuffer();
this.drawer.progress(this.backend.getPlayedPercents());
this.drawer.recenter(
this.getCurrentTime() / this.getDuration()
);
this.fireEvent('zoom', pxPerSec);
},
/**
* Internal method.
*/
loadArrayBuffer: function (arraybuffer) {
this.decodeArrayBuffer(arraybuffer, function (data) {
if (!this.isDestroyed) {
this.loadDecodedBuffer(data);
}
}.bind(this));
},
/**
* Directly load an externally decoded AudioBuffer.
*/
loadDecodedBuffer: function (buffer) {
this.backend.load(buffer);
this.drawBuffer();
this.fireEvent('ready');
},
/**
* Loads audio data from a Blob or File object.
*
* @param {Blob|File} blob Audio data.
*/
loadBlob: function (blob) {
var my = this;
// Create file reader
var reader = new FileReader();
reader.addEventListener('progress', function (e) {
my.onProgress(e);
});
reader.addEventListener('load', function (e) {
my.loadArrayBuffer(e.target.result);
});
reader.addEventListener('error', function () {
my.fireEvent('error', 'Error reading file');
});
reader.readAsArrayBuffer(blob);
this.empty();
},
/**
* Loads audio and re-renders the waveform.
*/
load: function (url, peaks, preload) {
this.empty();
this.isMuted = false;
switch (this.params.backend) {
case 'WebAudio': return this.loadBuffer(url, peaks);
case 'MediaElement': return this.loadMediaElement(url, peaks, preload);
}
},
/**
* Loads audio using Web Audio buffer backend.
*/
loadBuffer: function (url, peaks) {
var load = (function (action) {
if (action) {
this.tmpEvents.push(this.once('ready', action));
}
return this.getArrayBuffer(url, this.loadArrayBuffer.bind(this));
}).bind(this);
if (peaks) {
this.backend.setPeaks(peaks);
this.drawBuffer();
this.tmpEvents.push(this.once('interaction', load));
} else {
return load();
}
},
/**
* Either create a media element, or load
* an existing media element.
* @param {String|HTMLElement} urlOrElt Either a path to a media file,
* or an existing HTML5 Audio/Video
* Element
* @param {Array} [peaks] Array of peaks. Required to bypass
* web audio dependency
*/
loadMediaElement: function (urlOrElt, peaks, preload) {
var url = urlOrElt;
if (typeof urlOrElt === 'string') {
this.backend.load(url, this.mediaContainer, peaks, preload);
} else {
var elt = urlOrElt;
this.backend.loadElt(elt, peaks);
// If peaks are not provided,
// url = element.src so we can get peaks with web audio
url = elt.src;
}
this.tmpEvents.push(
this.backend.once('canplay', (function () {
this.drawBuffer();
this.fireEvent('ready');
}).bind(this)),
this.backend.once('error', (function (err) {
this.fireEvent('error', err);<|fim▁hole|> }).bind(this))
);
// If no pre-decoded peaks provided or pre-decoded peaks are
// provided with forceDecode flag, attempt to download the
// audio file and decode it with Web Audio.
if (peaks) { this.backend.setPeaks(peaks); }
if ((!peaks || this.params.forceDecode) && this.backend.supportsWebAudio()) {
this.getArrayBuffer(url, (function (arraybuffer) {
this.decodeArrayBuffer(arraybuffer, (function (buffer) {
this.backend.buffer = buffer;
this.backend.setPeaks(null);
this.drawBuffer();
this.fireEvent('waveform-ready');
}).bind(this));
}).bind(this));
}
},
decodeArrayBuffer: function (arraybuffer, callback) {
this.arraybuffer = arraybuffer;
this.backend.decodeArrayBuffer(
arraybuffer,
(function (data) {
// Only use the decoded data if we haven't been destroyed or another decode started in the meantime
if (!this.isDestroyed && this.arraybuffer == arraybuffer) {
callback(data);
this.arraybuffer = null;
}
}).bind(this),
this.fireEvent.bind(this, 'error', 'Error decoding audiobuffer')
);
},
getArrayBuffer: function (url, callback) {
var my = this;
var ajax = WaveSurfer.util.ajax({
url: url,
responseType: 'arraybuffer'
});
this.currentAjax = ajax;
this.tmpEvents.push(
ajax.on('progress', function (e) {
my.onProgress(e);
}),
ajax.on('success', function (data, e) {
callback(data);
my.currentAjax = null;
}),
ajax.on('error', function (e) {
my.fireEvent('error', 'XHR error: ' + e.target.statusText);
my.currentAjax = null;
})
);
return ajax;
},
onProgress: function (e) {
if (e.lengthComputable) {
var percentComplete = e.loaded / e.total;
} else {
// Approximate progress with an asymptotic
// function, and assume downloads in the 1-3 MB range.
percentComplete = e.loaded / (e.loaded + 1000000);
}
this.fireEvent('loading', Math.round(percentComplete * 100), e.target);
},
/**
* Exports PCM data into a JSON array and opens in a new window.
*/
exportPCM: function (length, accuracy, noWindow) {
length = length || 1024;
accuracy = accuracy || 10000;
noWindow = noWindow || false;
var peaks = this.backend.getPeaks(length, accuracy);
var arr = [].map.call(peaks, function (val) {
return Math.round(val * accuracy) / accuracy;
});
var json = JSON.stringify(arr);
if (!noWindow) {
window.open('data:application/json;charset=utf-8,' +
encodeURIComponent(json));
}
return json;
},
/**
* Save waveform image as data URI.
*
* The default format is 'image/png'. Other supported types are
* 'image/jpeg' and 'image/webp'.
*/
exportImage: function(format, quality) {
if (!format) {
format = 'image/png';
}
if (!quality) {
quality = 1;
}
return this.drawer.getImage(format, quality);
},
cancelAjax: function () {
if (this.currentAjax) {
this.currentAjax.xhr.abort();
this.currentAjax = null;
}
},
clearTmpEvents: function () {
this.tmpEvents.forEach(function (e) { e.un(); });
},
/**
* Display empty waveform.
*/
empty: function () {
if (!this.backend.isPaused()) {
this.stop();
this.backend.disconnectSource();
}
this.cancelAjax();
this.clearTmpEvents();
this.drawer.progress(0);
this.drawer.setWidth(0);
this.drawer.drawPeaks({ length: this.drawer.getWidth() }, 0);
},
/**
* Remove events, elements and disconnect WebAudio nodes.
*/
destroy: function () {
this.fireEvent('destroy');
this.cancelAjax();
this.clearTmpEvents();
this.unAll();
this.backend.destroy();
this.drawer.destroy();
this.isDestroyed = true;
}
};
WaveSurfer.create = function (params) {
var wavesurfer = Object.create(WaveSurfer);
wavesurfer.init(params);
return wavesurfer;
};
WaveSurfer.util = {
extend: function (dest) {
var sources = Array.prototype.slice.call(arguments, 1);
sources.forEach(function (source) {
Object.keys(source).forEach(function (key) {
dest[key] = source[key];
});
});
return dest;
},
debounce: function (func, wait, immediate) {
var args, context, timeout;
var later = function() {
timeout = null;
if (!immediate) {
func.apply(context, args);
}
};
return function() {
context = this;
args = arguments;
var callNow = immediate && !timeout;
clearTimeout(timeout);
timeout = setTimeout(later, wait);
if (!timeout) {
timeout = setTimeout(later, wait);
}
if (callNow) {
func.apply(context, args);
}
};
},
min: function (values) {
var min = +Infinity;
for (var i in values) {
if (values[i] < min) {
min = values[i];
}
}
return min;
},
max: function (values) {
var max = -Infinity;
for (var i in values) {
if (values[i] > max) {
max = values[i];
}
}
return max;
},
getId: function () {
return 'wavesurfer_' + Math.random().toString(32).substring(2);
},
ajax: function (options) {
var ajax = Object.create(WaveSurfer.Observer);
var xhr = new XMLHttpRequest();
var fired100 = false;
xhr.open(options.method || 'GET', options.url, true);
xhr.responseType = options.responseType || 'json';
xhr.addEventListener('progress', function (e) {
ajax.fireEvent('progress', e);
if (e.lengthComputable && e.loaded == e.total) {
fired100 = true;
}
});
xhr.addEventListener('load', function (e) {
if (!fired100) {
ajax.fireEvent('progress', e);
}
ajax.fireEvent('load', e);
if (200 == xhr.status || 206 == xhr.status) {
ajax.fireEvent('success', xhr.response, e);
} else {
ajax.fireEvent('error', e);
}
});
xhr.addEventListener('error', function (e) {
ajax.fireEvent('error', e);
});
xhr.send();
ajax.xhr = xhr;
return ajax;
}
};
/* Observer */
WaveSurfer.Observer = {
/**
* Attach a handler function for an event.
*/
on: function (event, fn) {
if (!this.handlers) { this.handlers = {}; }
var handlers = this.handlers[event];
if (!handlers) {
handlers = this.handlers[event] = [];
}
handlers.push(fn);
// Return an event descriptor
return {
name: event,
callback: fn,
un: this.un.bind(this, event, fn)
};
},
/**
* Remove an event handler.
*/
un: function (event, fn) {
if (!this.handlers) { return; }
var handlers = this.handlers[event];
if (handlers) {
if (fn) {
for (var i = handlers.length - 1; i >= 0; i--) {
if (handlers[i] == fn) {
handlers.splice(i, 1);
}
}
} else {
handlers.length = 0;
}
}
},
/**
* Remove all event handlers.
*/
unAll: function () {
this.handlers = null;
},
/**
* Attach a handler to an event. The handler is executed at most once per
* event type.
*/
once: function (event, handler) {
var my = this;
var fn = function () {
handler.apply(this, arguments);
setTimeout(function () {
my.un(event, fn);
}, 0);
};
return this.on(event, fn);
},
fireEvent: function (event) {
if (!this.handlers) { return; }
var handlers = this.handlers[event];
var args = Array.prototype.slice.call(arguments, 1);
handlers && handlers.forEach(function (fn) {
fn.apply(null, args);
});
}
};
/* Make the main WaveSurfer object an observer */
WaveSurfer.util.extend(WaveSurfer, WaveSurfer.Observer);
'use strict';
WaveSurfer.WebAudio = {
scriptBufferSize: 256,
PLAYING_STATE: 0,
PAUSED_STATE: 1,
FINISHED_STATE: 2,
supportsWebAudio: function () {
return !!(window.AudioContext || window.webkitAudioContext);
},
getAudioContext: function () {
if (!WaveSurfer.WebAudio.audioContext) {
WaveSurfer.WebAudio.audioContext = new (
window.AudioContext || window.webkitAudioContext
);
}
return WaveSurfer.WebAudio.audioContext;
},
getOfflineAudioContext: function (sampleRate) {
if (!WaveSurfer.WebAudio.offlineAudioContext) {
WaveSurfer.WebAudio.offlineAudioContext = new (
window.OfflineAudioContext || window.webkitOfflineAudioContext
)(1, 2, sampleRate);
}
return WaveSurfer.WebAudio.offlineAudioContext;
},
init: function (params) {
this.params = params;
this.ac = params.audioContext || this.getAudioContext();
this.lastPlay = this.ac.currentTime;
this.startPosition = 0;
this.scheduledPause = null;
this.states = [
Object.create(WaveSurfer.WebAudio.state.playing),
Object.create(WaveSurfer.WebAudio.state.paused),
Object.create(WaveSurfer.WebAudio.state.finished)
];
this.createVolumeNode();
this.createScriptNode();
this.createAnalyserNode();
this.setState(this.PAUSED_STATE);
this.setPlaybackRate(this.params.audioRate);
this.setLength(0);
},
disconnectFilters: function () {
if (this.filters) {
this.filters.forEach(function (filter) {
filter && filter.disconnect();
});
this.filters = null;
// Reconnect direct path
this.analyser.connect(this.gainNode);
}
},
setState: function (state) {
if (this.state !== this.states[state]) {
this.state = this.states[state];
this.state.init.call(this);
}
},
// Unpacked filters
setFilter: function () {
this.setFilters([].slice.call(arguments));
},
/**
* @param {Array} filters Packed ilters array
*/
setFilters: function (filters) {
// Remove existing filters
this.disconnectFilters();
// Insert filters if filter array not empty
if (filters && filters.length) {
this.filters = filters;
// Disconnect direct path before inserting filters
this.analyser.disconnect();
// Connect each filter in turn
filters.reduce(function (prev, curr) {
prev.connect(curr);
return curr;
}, this.analyser).connect(this.gainNode);
}
},
createScriptNode: function () {
if (this.ac.createScriptProcessor) {
this.scriptNode = this.ac.createScriptProcessor(this.scriptBufferSize);
} else {
this.scriptNode = this.ac.createJavaScriptNode(this.scriptBufferSize);
}
this.scriptNode.connect(this.ac.destination);
},
addOnAudioProcess: function () {
var my = this;
this.scriptNode.onaudioprocess = function () {
var time = my.getCurrentTime();
if (time >= my.getDuration()) {
my.setState(my.FINISHED_STATE);
my.fireEvent('pause');
} else if (time >= my.scheduledPause) {
my.pause();
} else if (my.state === my.states[my.PLAYING_STATE]) {
my.fireEvent('audioprocess', time);
}
};
},
removeOnAudioProcess: function () {
this.scriptNode.onaudioprocess = null;
},
createAnalyserNode: function () {
this.analyser = this.ac.createAnalyser();
this.analyser.connect(this.gainNode);
},
/**
* Create the gain node needed to control the playback volume.
*/
createVolumeNode: function () {
// Create gain node using the AudioContext
if (this.ac.createGain) {
this.gainNode = this.ac.createGain();
} else {
this.gainNode = this.ac.createGainNode();
}
// Add the gain node to the graph
this.gainNode.connect(this.ac.destination);
},
/**
* Set the gain to a new value.
*
* @param {Number} newGain The new gain, a floating point value
* between 0 and 1. 0 being no gain and 1 being maximum gain.
*/
setVolume: function (newGain) {
this.gainNode.gain.value = newGain;
},
/**
* Get the current gain.
*
* @returns {Number} The current gain, a floating point value
* between 0 and 1. 0 being no gain and 1 being maximum gain.
*/
getVolume: function () {
return this.gainNode.gain.value;
},
decodeArrayBuffer: function (arraybuffer, callback, errback) {
if (!this.offlineAc) {
this.offlineAc = this.getOfflineAudioContext(this.ac ? this.ac.sampleRate : 44100);
}
this.offlineAc.decodeAudioData(arraybuffer, (function (data) {
callback(data);
}).bind(this), errback);
},
/**
* Set pre-decoded peaks.
*/
setPeaks: function (peaks) {
this.peaks = peaks;
},
/**
* Set the rendered length (different from the length of the audio).
*/
setLength: function (length) {
// No resize, we can preserve the cached peaks.
if (this.mergedPeaks && length == ((2 * this.mergedPeaks.length - 1) + 2)) {
return;
}
this.splitPeaks = [];
this.mergedPeaks = [];
// Set the last element of the sparse array so the peak arrays are
// appropriately sized for other calculations.
var channels = this.buffer ? this.buffer.numberOfChannels : 1;
for (var c = 0; c < channels; c++) {
this.splitPeaks[c] = [];
this.splitPeaks[c][2 * (length - 1)] = 0;
this.splitPeaks[c][2 * (length - 1) + 1] = 0;
}
this.mergedPeaks[2 * (length - 1)] = 0;
this.mergedPeaks[2 * (length - 1) + 1] = 0;
},
/**
* Compute the max and min value of the waveform when broken into
* <length> subranges.
* @param {Number} length How many subranges to break the waveform into.
* @param {Number} first First sample in the required range.
* @param {Number} last Last sample in the required range.
* @returns {Array} Array of 2*<length> peaks or array of arrays
* of peaks consisting of (max, min) values for each subrange.
*/
getPeaks: function (length, first, last) {
if (this.peaks) { return this.peaks; }
this.setLength(length);
var sampleSize = this.buffer.length / length;
var sampleStep = ~~(sampleSize / 10) || 1;
var channels = this.buffer.numberOfChannels;
for (var c = 0; c < channels; c++) {
var peaks = this.splitPeaks[c];
var chan = this.buffer.getChannelData(c);
for (var i = first; i <= last; i++) {
var start = ~~(i * sampleSize);
var end = ~~(start + sampleSize);
var min = 0;
var max = 0;
for (var j = start; j < end; j += sampleStep) {
var value = chan[j];
if (value > max) {
max = value;
}
if (value < min) {
min = value;
}
}
peaks[2 * i] = max;
peaks[2 * i + 1] = min;
if (c == 0 || max > this.mergedPeaks[2 * i]) {
this.mergedPeaks[2 * i] = max;
}
if (c == 0 || min < this.mergedPeaks[2 * i + 1]) {
this.mergedPeaks[2 * i + 1] = min;
}
}
}
return this.params.splitChannels ? this.splitPeaks : this.mergedPeaks;
},
getPlayedPercents: function () {
return this.state.getPlayedPercents.call(this);
},
disconnectSource: function () {
if (this.source) {
this.source.disconnect();
}
},
destroy: function () {
if (!this.isPaused()) {
this.pause();
}
this.unAll();
this.buffer = null;
this.disconnectFilters();
this.disconnectSource();
this.gainNode.disconnect();
this.scriptNode.disconnect();
this.analyser.disconnect();
// close the audioContext if closeAudioContext option is set to true
if (this.params.closeAudioContext) {
// check if browser supports AudioContext.close()
if (typeof this.ac.close === 'function' && this.ac.state != 'closed') {
this.ac.close();
}
// clear the reference to the audiocontext
this.ac = null;
// clear the actual audiocontext, either passed as param or the
// global singleton
if (!this.params.audioContext) {
WaveSurfer.WebAudio.audioContext = null;
} else {
this.params.audioContext = null;
}
// clear the offlineAudioContext
WaveSurfer.WebAudio.offlineAudioContext = null;
}
},
load: function (buffer) {
this.startPosition = 0;
this.lastPlay = this.ac.currentTime;
this.buffer = buffer;
this.createSource();
},
createSource: function () {
this.disconnectSource();
this.source = this.ac.createBufferSource();
//adjust for old browsers.
this.source.start = this.source.start || this.source.noteGrainOn;
this.source.stop = this.source.stop || this.source.noteOff;
this.source.playbackRate.value = this.playbackRate;
this.source.buffer = this.buffer;
this.source.connect(this.analyser);
},
isPaused: function () {
return this.state !== this.states[this.PLAYING_STATE];
},
getDuration: function () {
if (!this.buffer) {
return 0;
}
return this.buffer.duration;
},
seekTo: function (start, end) {
if (!this.buffer) { return; }
this.scheduledPause = null;
if (start == null) {
start = this.getCurrentTime();
if (start >= this.getDuration()) {
start = 0;
}
}
if (end == null) {
end = this.getDuration();
}
this.startPosition = start;
this.lastPlay = this.ac.currentTime;
if (this.state === this.states[this.FINISHED_STATE]) {
this.setState(this.PAUSED_STATE);
}
return { start: start, end: end };
},
getPlayedTime: function () {
return (this.ac.currentTime - this.lastPlay) * this.playbackRate;
},
/**
* Plays the loaded audio region.
*
* @param {Number} start Start offset in seconds,
* relative to the beginning of a clip.
* @param {Number} end When to stop
* relative to the beginning of a clip.
*/
play: function (start, end) {
if (!this.buffer) { return; }
// need to re-create source on each playback
this.createSource();
var adjustedTime = this.seekTo(start, end);
start = adjustedTime.start;
end = adjustedTime.end;
this.scheduledPause = end;
this.source.start(0, start, end - start);
if (this.ac.state == 'suspended') {
this.ac.resume && this.ac.resume();
}
this.setState(this.PLAYING_STATE);
this.fireEvent('play');
},
/**
* Pauses the loaded audio.
*/
pause: function () {
this.scheduledPause = null;
this.startPosition += this.getPlayedTime();
this.source && this.source.stop(0);
this.setState(this.PAUSED_STATE);
this.fireEvent('pause');
},
/**
* Returns the current time in seconds relative to the audioclip's duration.
*/
getCurrentTime: function () {
return this.state.getCurrentTime.call(this);
},
/**
* Returns the current playback rate.
*/
getPlaybackRate: function () {
return this.playbackRate;
},
/**
* Set the audio source playback rate.
*/
setPlaybackRate: function (value) {
value = value || 1;
if (this.isPaused()) {
this.playbackRate = value;
} else {
this.pause();
this.playbackRate = value;
this.play();
}
}
};
WaveSurfer.WebAudio.state = {};
WaveSurfer.WebAudio.state.playing = {
init: function () {
this.addOnAudioProcess();
},
getPlayedPercents: function () {
var duration = this.getDuration();
return (this.getCurrentTime() / duration) || 0;
},
getCurrentTime: function () {
return this.startPosition + this.getPlayedTime();
}
};
WaveSurfer.WebAudio.state.paused = {
init: function () {
this.removeOnAudioProcess();
},
getPlayedPercents: function () {
var duration = this.getDuration();
return (this.getCurrentTime() / duration) || 0;
},
getCurrentTime: function () {
return this.startPosition;
}
};
WaveSurfer.WebAudio.state.finished = {
init: function () {
this.removeOnAudioProcess();
this.fireEvent('finish');
},
getPlayedPercents: function () {
return 1;
},
getCurrentTime: function () {
return this.getDuration();
}
};
WaveSurfer.util.extend(WaveSurfer.WebAudio, WaveSurfer.Observer);
'use strict';
WaveSurfer.MediaElement = Object.create(WaveSurfer.WebAudio);
WaveSurfer.util.extend(WaveSurfer.MediaElement, {
init: function (params) {
this.params = params;
// Dummy media to catch errors
this.media = {
currentTime: 0,
duration: 0,
paused: true,
playbackRate: 1,
play: function () {},
pause: function () {}
};
this.mediaType = params.mediaType.toLowerCase();
this.elementPosition = params.elementPosition;
this.setPlaybackRate(this.params.audioRate);
this.createTimer();
},
/**
* Create a timer to provide a more precise `audioprocess' event.
*/
createTimer: function () {
var my = this;
var playing = false;
var onAudioProcess = function () {
if (my.isPaused()) { return; }
my.fireEvent('audioprocess', my.getCurrentTime());
// Call again in the next frame
var requestAnimationFrame = window.requestAnimationFrame || window.webkitRequestAnimationFrame;
requestAnimationFrame(onAudioProcess);
};
this.on('play', onAudioProcess);
},
/**
* Create media element with url as its source,
* and append to container element.
* @param {String} url path to media file
* @param {HTMLElement} container HTML element
* @param {Array} peaks array of peak data
* @param {String} preload HTML 5 preload attribute value
*/
load: function (url, container, peaks, preload) {
var my = this;
var media = document.createElement(this.mediaType);
media.controls = this.params.mediaControls;
media.autoplay = this.params.autoplay || false;
media.preload = preload == null ? 'auto' : preload;
media.src = url;
media.style.width = '100%';
var prevMedia = container.querySelector(this.mediaType);
if (prevMedia) {
container.removeChild(prevMedia);
}
container.appendChild(media);
this._load(media, peaks);
},
/**
* Load existing media element.
* @param {MediaElement} elt HTML5 Audio or Video element
* @param {Array} peaks array of peak data
*/
loadElt: function (elt, peaks) {
var my = this;
var media = elt;
media.controls = this.params.mediaControls;
media.autoplay = this.params.autoplay || false;
this._load(media, peaks);
},
/**
* Private method called by both load (from url)
* and loadElt (existing media element).
* @param {MediaElement} media HTML5 Audio or Video element
* @param {Array} peaks array of peak data
* @private
*/
_load: function (media, peaks) {
var my = this;
// load must be called manually on iOS, otherwise peaks won't draw
// until a user interaction triggers load --> 'ready' event
if (typeof media.load == 'function') {
media.load();
}
media.addEventListener('error', function () {
my.fireEvent('error', 'Error loading media element');
});
media.addEventListener('canplay', function () {
my.fireEvent('canplay');
});
media.addEventListener('ended', function () {
my.fireEvent('finish');
});
this.media = media;
this.peaks = peaks;
this.onPlayEnd = null;
this.buffer = null;
this.setPlaybackRate(this.playbackRate);
},
isPaused: function () {
return !this.media || this.media.paused;
},
getDuration: function () {
var duration = (this.buffer || this.media).duration;
if (duration >= Infinity) { // streaming audio
duration = this.media.seekable.end(0);
}
return duration;
},
getCurrentTime: function () {
return this.media && this.media.currentTime;
},
getPlayedPercents: function () {
return (this.getCurrentTime() / this.getDuration()) || 0;
},
getPlaybackRate: function () {
return this.playbackRate || this.media.playbackRate;
},
/**
* Set the audio source playback rate.
*/
setPlaybackRate: function (value) {
this.playbackRate = value || 1;
this.media.playbackRate = this.playbackRate;
},
seekTo: function (start) {
if (start != null) {
this.media.currentTime = start;
}
this.clearPlayEnd();
},
/**
* Plays the loaded audio region.
*
* @param {Number} start Start offset in seconds,
* relative to the beginning of a clip.
* @param {Number} end End offset in seconds,
* relative to the beginning of a clip.
*/
play: function (start, end) {
this.seekTo(start);
this.media.play();
end && this.setPlayEnd(end);
this.fireEvent('play');
},
/**
* Pauses the loaded audio.
*/
pause: function () {
this.media && this.media.pause();
this.clearPlayEnd();
this.fireEvent('pause');
},
setPlayEnd: function (end) {
var my = this;
this.onPlayEnd = function (time) {
if (time >= end) {
my.pause();
my.seekTo(end);
}
};
this.on('audioprocess', this.onPlayEnd);
},
clearPlayEnd: function () {
if (this.onPlayEnd) {
this.un('audioprocess', this.onPlayEnd);
this.onPlayEnd = null;
}
},
getPeaks: function (length, start, end) {
if (this.buffer) {
return WaveSurfer.WebAudio.getPeaks.call(this, length, start, end);
}
return this.peaks || [];
},
getVolume: function () {
return this.media.volume;
},
setVolume: function (val) {
this.media.volume = val;
},
destroy: function () {
this.pause();
this.unAll();
this.media && this.media.parentNode && this.media.parentNode.removeChild(this.media);
this.media = null;
}
});
//For backwards compatibility
WaveSurfer.AudioElement = WaveSurfer.MediaElement;
'use strict';
WaveSurfer.Drawer = {
init: function (container, params) {
this.container = container;
this.params = params;
this.width = 0;
this.height = params.height * this.params.pixelRatio;
this.lastPos = 0;
this.initDrawer(params);
this.createWrapper();
this.createElements();
},
createWrapper: function () {
this.wrapper = this.container.appendChild(
document.createElement('wave')
);
this.style(this.wrapper, {
display: 'block',
position: 'relative',
userSelect: 'none',
webkitUserSelect: 'none',
height: this.params.height + 'px'
});
if (this.params.fillParent || this.params.scrollParent) {
this.style(this.wrapper, {
width: '100%',
overflowX: this.params.hideScrollbar ? 'hidden' : 'auto',
overflowY: 'hidden'
});
}
this.setupWrapperEvents();
},
handleEvent: function (e, noPrevent) {
!noPrevent && e.preventDefault();
var clientX = e.targetTouches ? e.targetTouches[0].clientX : e.clientX;
var bbox = this.wrapper.getBoundingClientRect();
var nominalWidth = this.width;
var parentWidth = this.getWidth();
var progress;
if (!this.params.fillParent && nominalWidth < parentWidth) {
progress = ((clientX - bbox.left) * this.params.pixelRatio / nominalWidth) || 0;
if (progress > 1) {
progress = 1;
}
} else {
progress = ((clientX - bbox.left + this.wrapper.scrollLeft) / this.wrapper.scrollWidth) || 0;
}
return progress;
},
setupWrapperEvents: function () {
var my = this;
this.wrapper.addEventListener('click', function (e) {
var scrollbarHeight = my.wrapper.offsetHeight - my.wrapper.clientHeight;
if (scrollbarHeight != 0) {
// scrollbar is visible. Check if click was on it
var bbox = my.wrapper.getBoundingClientRect();
if (e.clientY >= bbox.bottom - scrollbarHeight) {
// ignore mousedown as it was on the scrollbar
return;
}
}
if (my.params.interact) {
my.fireEvent('click', e, my.handleEvent(e));
}
});
this.wrapper.addEventListener('scroll', function (e) {
my.fireEvent('scroll', e);
});
},
drawPeaks: function (peaks, length, start, end) {
this.setWidth(length);
this.params.barWidth ?
this.drawBars(peaks, 0, start, end) :
this.drawWave(peaks, 0, start, end);
},
style: function (el, styles) {
Object.keys(styles).forEach(function (prop) {
if (el.style[prop] !== styles[prop]) {
el.style[prop] = styles[prop];
}
});
return el;
},
resetScroll: function () {
if (this.wrapper !== null) {
this.wrapper.scrollLeft = 0;
}
},
recenter: function (percent) {
var position = this.wrapper.scrollWidth * percent;
this.recenterOnPosition(position, true);
},
recenterOnPosition: function (position, immediate) {
var scrollLeft = this.wrapper.scrollLeft;
var half = ~~(this.wrapper.clientWidth / 2);
var target = position - half;
var offset = target - scrollLeft;
var maxScroll = this.wrapper.scrollWidth - this.wrapper.clientWidth;
if (maxScroll == 0) {
// no need to continue if scrollbar is not there
return;
}
// if the cursor is currently visible...
if (!immediate && -half <= offset && offset < half) {
// we'll limit the "re-center" rate.
var rate = 5;
offset = Math.max(-rate, Math.min(rate, offset));
target = scrollLeft + offset;
}
// limit target to valid range (0 to maxScroll)
target = Math.max(0, Math.min(maxScroll, target));
// no use attempting to scroll if we're not moving
if (target != scrollLeft) {
this.wrapper.scrollLeft = target;
}
},
getScrollX: function() {
return Math.round(this.wrapper.scrollLeft * this.params.pixelRatio);
},
getWidth: function () {
return Math.round(this.container.clientWidth * this.params.pixelRatio);
},
setWidth: function (width) {
if (this.width == width) {
return;
}
this.width = width;
if (this.params.fillParent || this.params.scrollParent) {
this.style(this.wrapper, {
width: ''
});
} else {
this.style(this.wrapper, {
width: ~~(this.width / this.params.pixelRatio) + 'px'
});
}
this.updateSize();
},
setHeight: function (height) {
if (height == this.height) { return; }
this.height = height;
this.style(this.wrapper, {
height: ~~(this.height / this.params.pixelRatio) + 'px'
});
this.updateSize();
},
progress: function (progress) {
var minPxDelta = 1 / this.params.pixelRatio;
var pos = Math.round(progress * this.width) * minPxDelta;
if (pos < this.lastPos || pos - this.lastPos >= minPxDelta) {
this.lastPos = pos;
if (this.params.scrollParent && this.params.autoCenter) {
var newPos = ~~(this.wrapper.scrollWidth * progress);
this.recenterOnPosition(newPos);
}
this.updateProgress(pos);
}
},
destroy: function () {
this.unAll();
if (this.wrapper) {
this.container.removeChild(this.wrapper);
this.wrapper = null;
}
},
/* Renderer-specific methods */
initDrawer: function () {},
createElements: function () {},
updateSize: function () {},
drawWave: function (peaks, max) {},
clearWave: function () {},
updateProgress: function (position) {}
};
WaveSurfer.util.extend(WaveSurfer.Drawer, WaveSurfer.Observer);
'use strict';
WaveSurfer.Drawer.Canvas = Object.create(WaveSurfer.Drawer);
WaveSurfer.util.extend(WaveSurfer.Drawer.Canvas, {
createElements: function () {
var waveCanvas = this.wrapper.appendChild(
this.style(document.createElement('canvas'), {
position: 'absolute',
zIndex: 1,
left: 0,
top: 0,
bottom: 0
})
);
this.waveCc = waveCanvas.getContext('2d');
this.progressWave = this.wrapper.appendChild(
this.style(document.createElement('wave'), {
position: 'absolute',
zIndex: 2,
left: 0,
top: 0,
bottom: 0,
overflow: 'hidden',
width: '0',
display: 'none',
boxSizing: 'border-box',
borderRightStyle: 'solid',
borderRightWidth: this.params.cursorWidth + 'px',
borderRightColor: this.params.cursorColor
})
);
if (this.params.waveColor != this.params.progressColor) {
var progressCanvas = this.progressWave.appendChild(
document.createElement('canvas')
);
this.progressCc = progressCanvas.getContext('2d');
}
},
updateSize: function () {
var width = Math.round(this.width / this.params.pixelRatio);
this.waveCc.canvas.width = this.width;
this.waveCc.canvas.height = this.height;
this.style(this.waveCc.canvas, { width: width + 'px'});
this.style(this.progressWave, { display: 'block'});
if (this.progressCc) {
this.progressCc.canvas.width = this.width;
this.progressCc.canvas.height = this.height;
this.style(this.progressCc.canvas, { width: width + 'px'});
}
this.clearWave();
},
clearWave: function () {
this.waveCc.clearRect(0, 0, this.width, this.height);
if (this.progressCc) {
this.progressCc.clearRect(0, 0, this.width, this.height);
}
},
drawBars: function (peaks, channelIndex, start, end) {
var my = this;
// Split channels
if (peaks[0] instanceof Array) {
var channels = peaks;
if (this.params.splitChannels) {
this.setHeight(channels.length * this.params.height * this.params.pixelRatio);
channels.forEach(function(channelPeaks, i) {
my.drawBars(channelPeaks, i, start, end);
});
return;
} else {
peaks = channels[0];
}
}
// Bar wave draws the bottom only as a reflection of the top,
// so we don't need negative values
var hasMinVals = [].some.call(peaks, function (val) { return val < 0; });
// Skip every other value if there are negatives.
var peakIndexScale = 1;
if (hasMinVals) {
peakIndexScale = 2;
}
// A half-pixel offset makes lines crisp
var $ = 0.5 / this.params.pixelRatio;
var width = this.width;
var height = this.params.height * this.params.pixelRatio;
var offsetY = height * channelIndex || 0;
var halfH = height / 2;
var length = peaks.length / peakIndexScale;
var bar = this.params.barWidth * this.params.pixelRatio;
var gap = Math.max(this.params.pixelRatio, ~~(bar / 2));
var step = bar + gap;
var absmax = 1 / this.params.barHeight;
if (this.params.normalize) {
var max = WaveSurfer.util.max(peaks);
var min = WaveSurfer.util.min(peaks);
absmax = -min > max ? -min : max;
}
var scale = length / width;
this.waveCc.fillStyle = this.params.waveColor;
if (this.progressCc) {
this.progressCc.fillStyle = this.params.progressColor;
}
[ this.waveCc, this.progressCc ].forEach(function (cc) {
if (!cc) { return; }
for (var i = (start / scale); i < (end / scale); i += step) {
var peak = peaks[Math.floor(i * scale * peakIndexScale)] || 0;
var h = Math.round(peak / absmax * halfH);
cc.fillRect(i + $, halfH - h + offsetY, bar + $, h * 2);
}
}, this);
},
drawWave: function (peaks, channelIndex, start, end) {
var my = this;
// Split channels
if (peaks[0] instanceof Array) {
var channels = peaks;
if (this.params.splitChannels) {
this.setHeight(channels.length * this.params.height * this.params.pixelRatio);
channels.forEach(function(channelPeaks, i) {
my.drawWave(channelPeaks, i, start, end);
});
return;
} else {
peaks = channels[0];
}
}
// Support arrays without negative peaks
var hasMinValues = [].some.call(peaks, function (val) { return val < 0; });
if (!hasMinValues) {
var reflectedPeaks = [];
for (var i = 0, len = peaks.length; i < len; i++) {
reflectedPeaks[2 * i] = peaks[i];
reflectedPeaks[2 * i + 1] = -peaks[i];
}
peaks = reflectedPeaks;
}
// A half-pixel offset makes lines crisp
var $ = 0.5 / this.params.pixelRatio;
var height = this.params.height * this.params.pixelRatio;
var offsetY = height * channelIndex || 0;
var halfH = height / 2;
var length = ~~(peaks.length / 2);
var scale = 1;
if (this.params.fillParent && this.width != length) {
scale = this.width / length;
}
var absmax = 1 / this.params.barHeight;
if (this.params.normalize) {
var max = WaveSurfer.util.max(peaks);
var min = WaveSurfer.util.min(peaks);
absmax = -min > max ? -min : max;
}
this.waveCc.fillStyle = this.params.waveColor;
if (this.progressCc) {
this.progressCc.fillStyle = this.params.progressColor;
}
[ this.waveCc, this.progressCc ].forEach(function (cc) {
if (!cc) { return; }
cc.beginPath();
cc.moveTo(start * scale + $, halfH + offsetY);
for (var i = start; i < end; i++) {
var h = Math.round(peaks[2 * i] / absmax * halfH);
cc.lineTo(i * scale + $, halfH - h + offsetY);
}
// Draw the bottom edge going backwards, to make a single
// closed hull to fill.
for (var i = end - 1; i >= start; i--) {
var h = Math.round(peaks[2 * i + 1] / absmax * halfH);
cc.lineTo(i * scale + $, halfH - h + offsetY);
}
cc.closePath();
cc.fill();
// Always draw a median line
cc.fillRect(0, halfH + offsetY - $, this.width, $);
}, this);
},
updateProgress: function (pos) {
this.style(this.progressWave, { width: pos + 'px' });
},
getImage: function(type, quality) {
return this.waveCc.canvas.toDataURL(type, quality);
}
});
'use strict';
WaveSurfer.Drawer.MultiCanvas = Object.create(WaveSurfer.Drawer);
WaveSurfer.util.extend(WaveSurfer.Drawer.MultiCanvas, {
initDrawer: function (params) {
this.maxCanvasWidth = params.maxCanvasWidth != null ? params.maxCanvasWidth : 4000;
this.maxCanvasElementWidth = Math.round(this.maxCanvasWidth / this.params.pixelRatio);
if (this.maxCanvasWidth <= 1) {
throw 'maxCanvasWidth must be greater than 1.';
} else if (this.maxCanvasWidth % 2 == 1) {
throw 'maxCanvasWidth must be an even number.';
}
this.hasProgressCanvas = this.params.waveColor != this.params.progressColor;
this.halfPixel = 0.5 / this.params.pixelRatio;
this.canvases = [];
},
createElements: function () {
this.progressWave = this.wrapper.appendChild(
this.style(document.createElement('wave'), {
position: 'absolute',
zIndex: 2,
left: 0,
top: 0,
bottom: 0,
overflow: 'hidden',
width: '0',
display: 'none',
boxSizing: 'border-box',
borderRightStyle: 'solid',
borderRightWidth: this.params.cursorWidth + 'px',
borderRightColor: this.params.cursorColor
})
);
this.addCanvas();
},
updateSize: function () {
var totalWidth = Math.round(this.width / this.params.pixelRatio),
requiredCanvases = Math.ceil(totalWidth / this.maxCanvasElementWidth);
while (this.canvases.length < requiredCanvases) {
this.addCanvas();
}
while (this.canvases.length > requiredCanvases) {
this.removeCanvas();
}
for (var i in this.canvases) {
// Add some overlap to prevent vertical white stripes, keep the width even for simplicity.
var canvasWidth = this.maxCanvasWidth + 2 * Math.ceil(this.params.pixelRatio / 2);
if (i == this.canvases.length - 1) {
canvasWidth = this.width - (this.maxCanvasWidth * (this.canvases.length - 1));
}
this.updateDimensions(this.canvases[i], canvasWidth, this.height);
this.clearWaveForEntry(this.canvases[i]);
}
},
addCanvas: function () {
var entry = {},
leftOffset = this.maxCanvasElementWidth * this.canvases.length;
entry.wave = this.wrapper.appendChild(
this.style(document.createElement('canvas'), {
position: 'absolute',
zIndex: 1,
left: leftOffset + 'px',
top: 0,
bottom: 0,
height: '100%'
})
);
entry.waveCtx = entry.wave.getContext('2d');
if (this.hasProgressCanvas) {
entry.progress = this.progressWave.appendChild(
this.style(document.createElement('canvas'), {
position: 'absolute',
left: leftOffset + 'px',
top: 0,
bottom: 0,
height: '100%'
})
);
entry.progressCtx = entry.progress.getContext('2d');
}
this.canvases.push(entry);
},
removeCanvas: function () {
var lastEntry = this.canvases.pop();
lastEntry.wave.parentElement.removeChild(lastEntry.wave);
if (this.hasProgressCanvas) {
lastEntry.progress.parentElement.removeChild(lastEntry.progress);
}
},
updateDimensions: function (entry, width, height) {
var elementWidth = Math.round(width / this.params.pixelRatio),
totalWidth = Math.round(this.width / this.params.pixelRatio);
// Where the canvas starts and ends in the waveform, represented as a decimal between 0 and 1.
entry.start = (entry.waveCtx.canvas.offsetLeft / totalWidth) || 0;
entry.end = entry.start + elementWidth / totalWidth;
entry.waveCtx.canvas.width = width;
entry.waveCtx.canvas.height = height;
this.style(entry.waveCtx.canvas, { width: elementWidth + 'px'});
this.style(this.progressWave, { display: 'block'});
if (this.hasProgressCanvas) {
entry.progressCtx.canvas.width = width;
entry.progressCtx.canvas.height = height;
this.style(entry.progressCtx.canvas, { width: elementWidth + 'px'});
}
},
clearWave: function () {
for (var i in this.canvases) {
this.clearWaveForEntry(this.canvases[i]);
}
},
clearWaveForEntry: function (entry) {
entry.waveCtx.clearRect(0, 0, entry.waveCtx.canvas.width, entry.waveCtx.canvas.height);
if (this.hasProgressCanvas) {
entry.progressCtx.clearRect(0, 0, entry.progressCtx.canvas.width, entry.progressCtx.canvas.height);
}
},
drawBars: function (peaks, channelIndex, start, end) {
var my = this;
// Split channels
if (peaks[0] instanceof Array) {
var channels = peaks;
if (this.params.splitChannels) {
this.setHeight(channels.length * this.params.height * this.params.pixelRatio);
channels.forEach(function(channelPeaks, i) {
my.drawBars(channelPeaks, i, start, end);
});
return;
} else {
peaks = channels[0];
}
}
// Bar wave draws the bottom only as a reflection of the top,
// so we don't need negative values
var hasMinVals = [].some.call(peaks, function (val) { return val < 0; });
// Skip every other value if there are negatives.
var peakIndexScale = 1;
if (hasMinVals) {
peakIndexScale = 2;
}
// A half-pixel offset makes lines crisp
var width = this.width;
var height = this.params.height * this.params.pixelRatio;
var offsetY = height * channelIndex || 0;
var halfH = height / 2;
var length = peaks.length / peakIndexScale;
var bar = this.params.barWidth * this.params.pixelRatio;
var gap = Math.max(this.params.pixelRatio, ~~(bar / 2));
var step = bar + gap;
var absmax = 1 / this.params.barHeight;
if (this.params.normalize) {
var max = WaveSurfer.util.max(peaks);
var min = WaveSurfer.util.min(peaks);
absmax = -min > max ? -min : max;
}
var scale = length / width;
for (var i = (start / scale); i < (end / scale); i += step) {
var peak = peaks[Math.floor(i * scale * peakIndexScale)] || 0;
var h = Math.round(peak / absmax * halfH);
this.fillRect(i + this.halfPixel, halfH - h + offsetY, bar + this.halfPixel, h * 2);
}
},
drawWave: function (peaks, channelIndex, start, end) {
var my = this;
// Split channels
if (peaks[0] instanceof Array) {
var channels = peaks;
if (this.params.splitChannels) {
this.setHeight(channels.length * this.params.height * this.params.pixelRatio);
channels.forEach(function(channelPeaks, i) {
my.drawWave(channelPeaks, i, start, end);
});
return;
} else {
peaks = channels[0];
}
}
// Support arrays without negative peaks
var hasMinValues = [].some.call(peaks, function (val) { return val < 0; });
if (!hasMinValues) {
var reflectedPeaks = [];
for (var i = 0, len = peaks.length; i < len; i++) {
reflectedPeaks[2 * i] = peaks[i];
reflectedPeaks[2 * i + 1] = -peaks[i];
}
peaks = reflectedPeaks;
}
// A half-pixel offset makes lines crisp
var height = this.params.height * this.params.pixelRatio;
var offsetY = height * channelIndex || 0;
var halfH = height / 2;
var absmax = 1 / this.params.barHeight;
if (this.params.normalize) {
var max = WaveSurfer.util.max(peaks);
var min = WaveSurfer.util.min(peaks);
absmax = -min > max ? -min : max;
}
this.drawLine(peaks, absmax, halfH, offsetY, start, end);
// Always draw a median line
this.fillRect(0, halfH + offsetY - this.halfPixel, this.width, this.halfPixel);
},
drawLine: function (peaks, absmax, halfH, offsetY, start, end) {
for (var index in this.canvases) {
var entry = this.canvases[index];
this.setFillStyles(entry);
this.drawLineToContext(entry, entry.waveCtx, peaks, absmax, halfH, offsetY, start, end);
this.drawLineToContext(entry, entry.progressCtx, peaks, absmax, halfH, offsetY, start, end);
}
},
drawLineToContext: function (entry, ctx, peaks, absmax, halfH, offsetY, start, end) {
if (!ctx) { return; }
var length = peaks.length / 2;
var scale = 1;
if (this.params.fillParent && this.width != length) {
scale = this.width / length;
}
var first = Math.round(length * entry.start),
last = Math.round(length * entry.end);
if (first > end || last < start) { return; }
var canvasStart = Math.max(first, start);
var canvasEnd = Math.min(last, end);
ctx.beginPath();
ctx.moveTo((canvasStart - first) * scale + this.halfPixel, halfH + offsetY);
for (var i = canvasStart; i < canvasEnd; i++) {
var peak = peaks[2 * i] || 0;
var h = Math.round(peak / absmax * halfH);
ctx.lineTo((i - first) * scale + this.halfPixel, halfH - h + offsetY);
}
// Draw the bottom edge going backwards, to make a single
// closed hull to fill.
for (var i = canvasEnd - 1; i >= canvasStart; i--) {
var peak = peaks[2 * i + 1] || 0;
var h = Math.round(peak / absmax * halfH);
ctx.lineTo((i - first) * scale + this.halfPixel, halfH - h + offsetY);
}
ctx.closePath();
ctx.fill();
},
fillRect: function (x, y, width, height) {
var startCanvas = Math.floor(x / this.maxCanvasWidth);
var endCanvas = Math.min(Math.ceil((x + width) / this.maxCanvasWidth) + 1,
this.canvases.length);
for (var i = startCanvas; i < endCanvas; i++) {
var entry = this.canvases[i],
leftOffset = i * this.maxCanvasWidth;
var intersection = {
x1: Math.max(x, i * this.maxCanvasWidth),
y1: y,
x2: Math.min(x + width, i * this.maxCanvasWidth + entry.waveCtx.canvas.width),
y2: y + height
};
if (intersection.x1 < intersection.x2) {
this.setFillStyles(entry);
this.fillRectToContext(entry.waveCtx,
intersection.x1 - leftOffset,
intersection.y1,
intersection.x2 - intersection.x1,
intersection.y2 - intersection.y1);
this.fillRectToContext(entry.progressCtx,
intersection.x1 - leftOffset,
intersection.y1,
intersection.x2 - intersection.x1,
intersection.y2 - intersection.y1);
}
}
},
fillRectToContext: function (ctx, x, y, width, height) {
if (!ctx) { return; }
ctx.fillRect(x, y, width, height);
},
setFillStyles: function (entry) {
entry.waveCtx.fillStyle = this.params.waveColor;
if (this.hasProgressCanvas) {
entry.progressCtx.fillStyle = this.params.progressColor;
}
},
updateProgress: function (pos) {
this.style(this.progressWave, { width: pos + 'px' });
},
/**
* Combine all available canvasses together.
*
* @param {String} type - an optional value of a format type. Default is image/png.
* @param {Number} quality - an optional value between 0 and 1. Default is 0.92.
*
*/
getImage: function(type, quality) {
var availableCanvas = [];
this.canvases.forEach(function (entry) {
availableCanvas.push(entry.wave.toDataURL(type, quality));
});
return availableCanvas.length > 1 ? availableCanvas : availableCanvas[0];
}
});
'use strict';
WaveSurfer.Drawer.SplitWavePointPlot = Object.create(WaveSurfer.Drawer.Canvas);
WaveSurfer.util.extend(WaveSurfer.Drawer.SplitWavePointPlot, {
defaultPlotParams: {
plotNormalizeTo: 'whole',
plotTimeStart: 0,
plotMin: 0,
plotMax: 1,
plotColor : '#f63',
plotProgressColor : '#F00',
plotPointHeight: 2,
plotPointWidth: 2,
plotSeparator: true,
plotSeparatorColor: 'black',
plotRangeDisplay: false,
plotRangeUnits: '',
plotRangePrecision: 4,
plotRangeIgnoreOutliers: false,
plotRangeFontSize: 12,
plotRangeFontType: 'Ariel',
waveDrawMedianLine: true,
plotFileDelimiter: '\t'
},
//object variables that get manipulated by various object functions
plotTimeStart: 0, //the start time of our wave according to plot data
plotTimeEnd: -1, //the end of our wave according to plot data
plotArrayLoaded: false,
plotArray: [], //array of plot data objects containing time and plot value
plotPoints: [], //calculated average plot points corresponding to value of our wave
plotMin: 0,
plotMax: 1,
/**
* Initializes the plot array. If params.plotFileUrl is provided an ajax call will be
* executed and drawing of the wave is delayed until plot info is retrieved
* @param params
*/
initDrawer: function (params) {
var my = this;
//set defaults if not passed in
for(var paramName in this.defaultPlotParams) {
if(this.params[paramName] === undefined) {
this.params[paramName] = this.defaultPlotParams[paramName];
}
}
//set the plotTimeStart
this.plotTimeStart = this.params.plotTimeStart;
//check to see if plotTimeEnd
if(this.params.plotTimeEnd !== undefined) {
this.plotTimeEnd = this.params.plotTimeEnd;
}
//set the plot array
if (Array.isArray(params.plotArray)) {
this.plotArray = params.plotArray;
this.plotArrayLoaded = true;
}
//Need to load the plot array from ajax with our callback
else {
var onPlotArrayLoaded = function (plotArray) {
my.plotArray = plotArray;
my.plotArrayLoaded = true;
my.fireEvent('plot_array_loaded');
};
this.loadPlotArrayFromFile(params.plotFileUrl, onPlotArrayLoaded, this.params.plotFileDelimiter);
}
},
/**
* Draw the peaks - this overrides the drawer.js function and does the following additional steps
* - ensures that the plotArray has already been loaded, if not it loads via ajax
* - moves the wave form to where channel 1 would normally be
* @param peaks
* @param length
* @param start
* @param end
*/
drawPeaks: function (peaks, length, start, end) {
//make sure that the plot array is already loaded
if (this.plotArrayLoaded == true) {
this.setWidth(length);
//fake that we are splitting channels
this.splitChannels = true;
this.params.height = this.params.height/2;
if (peaks[0] instanceof Array) {
peaks = peaks[0];
}
this.params.barWidth ?
this.drawBars(peaks, 1, start, end) :
this.drawWave(peaks, 1, start, end);
//set the height back to the original
this.params.height = this.params.height*2;
this.calculatePlots();
this.drawPlots();
}
//otherwise wait for the plot array to be loaded and then draw again
else {
var my = this;
my.on('plot-array-loaded', function () {
my.drawPeaks(peaks, length, start, end);
});
}
},
/**
* Loop through the calculated plot values and actually draw them
*/
drawPlots: function() {
var height = this.params.height * this.params.pixelRatio / 2;
var $ = 0.5 / this.params.pixelRatio;
this.waveCc.fillStyle = this.params.plotColor;
if(this.progressCc) {
this.progressCc.fillStyle = this.params.plotProgressColor;
}
for(var i in this.plotPoints) {
var x = parseInt(i);
var y = height - this.params.plotPointHeight - (this.plotPoints[i] * (height - this.params.plotPointHeight));
var pointHeight = this.params.plotPointHeight;
this.waveCc.fillRect(x, y, this.params.plotPointWidth, pointHeight);
if(this.progressCc) {
this.progressCc.fillRect(x, y, this.params.plotPointWidth, pointHeight);
}
}
//draw line to separate the two waves
if(this.params.plotSeparator) {
this.waveCc.fillStyle = this.params.plotSeparatorColor;
this.waveCc.fillRect(0, height, this.width, $);
}
if(this.params.plotRangeDisplay) {
this.displayPlotRange();
}
},
/**
* Display the range for the plot graph
*/
displayPlotRange: function()
{
var fontSize = this.params.plotRangeFontSize * this.params.pixelRatio;
var maxRange = this.plotMax.toPrecision(this.params.plotRangePrecision) + ' ' + this.params.plotRangeUnits;
var minRange = this.plotMin.toPrecision(this.params.plotRangePrecision) + ' ' + this.params.plotRangeUnits;
this.waveCc.font = fontSize.toString() + 'px ' + this.params.plotRangeFontType;
this.waveCc.fillText(maxRange, 3, fontSize);
this.waveCc.fillText(minRange, 3, this.height/2);
},
/**
* This function loops through the plotArray and converts it to the plot points
* to be drawn on the canvas keyed by their position
*/
calculatePlots: function() {
//reset plots array
this.plotPoints = {};
//make sure we have our plotTimeEnd
this.calculatePlotTimeEnd();
var pointsForAverage = [];
var previousWaveIndex = -1;
var maxPlot = 0;
var minPlot = 99999999999999;
var maxSegmentPlot = 0;
var minSegmentPlot = 99999999999999;
var duration = this.plotTimeEnd - this.plotTimeStart;
//loop through our plotArray and map values to wave indexes and take the average values for each wave index
for(var i = 0; i < this.plotArray.length; i++) {
var dataPoint = this.plotArray[i];
if(dataPoint.value > maxPlot) {maxPlot = dataPoint.value;}
if(dataPoint.value < minPlot) {minPlot = dataPoint.value;}
//make sure we are in the specified range
if(dataPoint.time >= this.plotTimeStart && dataPoint.time <= this.plotTimeEnd) {
//get the wave index corresponding to the data point
var waveIndex = Math.round(this.width * (dataPoint.time - this.plotTimeStart) / duration);
pointsForAverage.push(dataPoint.value);
//if we have moved on to a new position in our wave record average and reset previousWaveIndex
if(waveIndex !== previousWaveIndex) {
if(pointsForAverage.length > 0) {
//get the average plot for this point
var avgPlot = this.avg(pointsForAverage);
//check for min max
if(avgPlot > maxSegmentPlot) {maxSegmentPlot = avgPlot;}
if(avgPlot < minSegmentPlot) {minSegmentPlot = avgPlot;}
//add plot to the position
this.plotPoints[previousWaveIndex] = avgPlot;
pointsForAverage = [];
}
}
previousWaveIndex = waveIndex;
}
}
//normalize the plots points
if(this.params.plotNormalizeTo == 'whole') {
this.plotMin = minPlot;
this.plotMax = maxPlot;
}
else if(this.params.plotNormalizeTo == 'values') {
this.plotMin = this.params.plotMin;
this.plotMax = this.params.plotMax;
}
else {
this.plotMin = minSegmentPlot;
this.plotMax = maxSegmentPlot;
}
this.normalizeValues();
},
/**
* Function to take all of the plots in this.plots and normalize them from 0 to one
* depending on this.plotMin and this.plotMax values
*/
normalizeValues: function() {
var normalizedValues = {};
//check to make sure we should be normalizing
if(this.params.plotNormalizeTo === 'none') {return;}
for(var i in this.plotPoints) {
//get the normalized value between 0 and 1
var normalizedValue = (this.plotPoints[i] - this.plotMin) / (this.plotMax - this.plotMin);
//check if the value is above our specified range max
if(normalizedValue > 1) {
if(!this.params.plotRangeIgnoreOutliers) {
normalizedValues[i] = 1;
}
}
//check if hte value is below our specified rant
else if(normalizedValue < 0) {
if(!this.params.plotRangeIgnoreOutliers) {
normalizedValues[i] = 0;
}
}
//in our range add the normalized value
else {
normalizedValues[i] = normalizedValue;
}
}
this.plotPoints = normalizedValues;
},
/**
*
*/
/**
* Function to load the plot array from a external file
*
* The text file should contain a series of lines.
* Each line should contain [audio time] [delimiter character] [plot value]
* e.g. "1.2355 [tab] 124.2321"
*
* @param plotFileUrl url of the file containing time and value information
* @param onSuccess function to run on success
* @param delimiter the delimiter that separates the time and values on each line
*/
loadPlotArrayFromFile: function(plotFileUrl, onSuccess, delimiter) {
//default delimiter to tab character
if (delimiter === undefined) {delimiter = '\t';}
var plotArray = [];
var options = {
url: plotFileUrl,
responseType: 'text'
};
var fileAjax = WaveSurfer.util.ajax(options);
fileAjax.on('load', function (data) {
if (data.currentTarget.status == 200) {
//split the file by line endings
var plotLines = data.currentTarget.responseText.split('\n');
//loop through each line and find the time and plot values (delimited by tab)
for (var i = 0; i < plotLines.length; i++) {
var plotParts = plotLines[i].split(delimiter);
if(plotParts.length == 2) {
plotArray.push({time: parseFloat(plotParts[0]), value: parseFloat(plotParts[1])});
}
}
//run success function
onSuccess(plotArray);
}
});
},
/***
* Calculate the end time of the plot
*/
calculatePlotTimeEnd: function() {
if(this.params.plotTimeEnd !== undefined) {
this.plotTimeEnd = this.params.plotTimeEnd;
}
else {
this.plotTimeEnd = this.plotArray[this.plotArray.length -1].time;
}
},
/**
* Quick convenience function to average numbers in an array
* @param array of values
* @returns {number}
*/
avg: function(values) {
var sum = values.reduce(function(a, b) {return a+b;});
return sum/values.length;
}
});
WaveSurfer.util.extend(WaveSurfer.Drawer.SplitWavePointPlot, WaveSurfer.Observer);
'use strict';
WaveSurfer.PeakCache = {
init: function() {
this.clearPeakCache();
},
clearPeakCache: function() {
// Flat array with entries that are always in pairs to mark the
// beginning and end of each subrange. This is a convenience so we can
// iterate over the pairs for easy set difference operations.
this.peakCacheRanges = [];
// Length of the entire cachable region, used for resetting the cache
// when this changes (zoom events, for instance).
this.peakCacheLength = -1;
},
addRangeToPeakCache: function(length, start, end) {
if (length != this.peakCacheLength) {
this.clearPeakCache();
this.peakCacheLength = length;
}
// Return ranges that weren't in the cache before the call.
var uncachedRanges = [];
var i = 0;
// Skip ranges before the current start.
while (i < this.peakCacheRanges.length && this.peakCacheRanges[i] < start) {
i++;
}
// If |i| is even, |start| falls after an existing range. Otherwise,
// |start| falls between an existing range, and the uncached region
// starts when we encounter the next node in |peakCacheRanges| or
// |end|, whichever comes first.
if (i % 2 == 0) {
uncachedRanges.push(start);
}
while (i < this.peakCacheRanges.length && this.peakCacheRanges[i] <= end) {
uncachedRanges.push(this.peakCacheRanges[i]);
i++;
}
// If |i| is even, |end| is after all existing ranges.
if (i % 2 == 0) {
uncachedRanges.push(end);
}
// Filter out the 0-length ranges.
uncachedRanges = uncachedRanges.filter(function(item, pos, arr) {
if (pos == 0) {
return item != arr[pos + 1];
} else if (pos == arr.length - 1) {
return item != arr[pos - 1];
} else {
return item != arr[pos - 1] && item != arr[pos + 1];
}
});
// Merge the two ranges together, uncachedRanges will either contain
// wholly new points, or duplicates of points in peakCacheRanges. If
// duplicates are detected, remove both and extend the range.
this.peakCacheRanges = this.peakCacheRanges.concat(uncachedRanges);
this.peakCacheRanges = this.peakCacheRanges.sort(function(a, b) {
return a - b;
}).filter(function(item, pos, arr) {
if (pos == 0) {
return item != arr[pos + 1];
} else if (pos == arr.length - 1) {
return item != arr[pos - 1];
} else {
return item != arr[pos - 1] && item != arr[pos + 1];
}
});
// Push the uncached ranges into an array of arrays for ease of
// iteration in the functions that call this.
var uncachedRangePairs = [];
for (i = 0; i < uncachedRanges.length; i += 2) {
uncachedRangePairs.push([uncachedRanges[i], uncachedRanges[i+1]]);
}
return uncachedRangePairs;
},
// For testing
getCacheRanges: function() {
var peakCacheRangePairs = [];
for (var i = 0; i < this.peakCacheRanges.length; i += 2) {
peakCacheRangePairs.push([this.peakCacheRanges[i], this.peakCacheRanges[i+1]]);
}
return peakCacheRangePairs;
}
};
'use strict';
/* Init from HTML */
(function () {
var init = function () {
var containers = document.querySelectorAll('wavesurfer');
Array.prototype.forEach.call(containers, function (el) {
var params = WaveSurfer.util.extend({
container: el,
backend: 'MediaElement',
mediaControls: true
}, el.dataset);
el.style.display = 'block';
var wavesurfer = WaveSurfer.create(params);
if (el.dataset.peaks) {
var peaks = JSON.parse(el.dataset.peaks);
}
wavesurfer.load(el.dataset.url, peaks);
});
};
if (document.readyState === 'complete') {
init();
} else {
window.addEventListener('load', init);
}
}());
return WaveSurfer;
}));<|fim▁end|> | |
<|file_name|>balancer_v1_wrapper.go<|end_file_name|><|fim▁begin|>/*
*
* Copyright 2017 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package grpc
import (
"context"
"strings"
"sync"
"google.golang.org/grpc/balancer"
"google.golang.org/grpc/connectivity"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/resolver"
)
type balancerWrapperBuilder struct {
b Balancer // The v1 balancer.
}
func (bwb *balancerWrapperBuilder) Build(cc balancer.ClientConn, opts balancer.BuildOptions) balancer.Balancer {
targetAddr := cc.Target()
targetSplitted := strings.Split(targetAddr, ":///")
if len(targetSplitted) >= 2 {
targetAddr = targetSplitted[1]
}
bwb.b.Start(targetAddr, BalancerConfig{
DialCreds: opts.DialCreds,
Dialer: opts.Dialer,
})
_, pickfirst := bwb.b.(*pickFirst)
bw := &balancerWrapper{
balancer: bwb.b,
pickfirst: pickfirst,
cc: cc,
targetAddr: targetAddr,
startCh: make(chan struct{}),
conns: make(map[resolver.Address]balancer.SubConn),
connSt: make(map[balancer.SubConn]*scState),
csEvltr: &balancer.ConnectivityStateEvaluator{},
state: connectivity.Idle,
}
cc.UpdateBalancerState(connectivity.Idle, bw)
go bw.lbWatcher()
return bw
}
func (bwb *balancerWrapperBuilder) Name() string {
return "wrapper"
}
type scState struct {
addr Address // The v1 address type.
s connectivity.State
down func(error)
}
type balancerWrapper struct {
balancer Balancer // The v1 balancer.
pickfirst bool
cc balancer.ClientConn
targetAddr string // Target without the scheme.
mu sync.Mutex
conns map[resolver.Address]balancer.SubConn
connSt map[balancer.SubConn]*scState
// This channel is closed when handling the first resolver result.
// lbWatcher blocks until this is closed, to avoid race between
// - NewSubConn is created, cc wants to notify balancer of state changes;
// - Build hasn't return, cc doesn't have access to balancer.
startCh chan struct{}
// To aggregate the connectivity state.
csEvltr *balancer.ConnectivityStateEvaluator
state connectivity.State
}
// lbWatcher watches the Notify channel of the balancer and manages
// connections accordingly.
func (bw *balancerWrapper) lbWatcher() {
<-bw.startCh
notifyCh := bw.balancer.Notify()
if notifyCh == nil {
// There's no resolver in the balancer. Connect directly.
a := resolver.Address{
Addr: bw.targetAddr,
Type: resolver.Backend,
}
sc, err := bw.cc.NewSubConn([]resolver.Address{a}, balancer.NewSubConnOptions{})
if err != nil {
grpclog.Warningf("Error creating connection to %v. Err: %v", a, err)
} else {
bw.mu.Lock()
bw.conns[a] = sc
bw.connSt[sc] = &scState{
addr: Address{Addr: bw.targetAddr},
s: connectivity.Idle,
}
bw.mu.Unlock()
sc.Connect()
}
return
}
for addrs := range notifyCh {
grpclog.Infof("balancerWrapper: got update addr from Notify: %v\n", addrs)
if bw.pickfirst {
var (
oldA resolver.Address
oldSC balancer.SubConn
)
bw.mu.Lock()
for oldA, oldSC = range bw.conns {
break
}
bw.mu.Unlock()
if len(addrs) <= 0 {
if oldSC != nil {
// Teardown old sc.
bw.mu.Lock()
delete(bw.conns, oldA)
delete(bw.connSt, oldSC)
bw.mu.Unlock()
bw.cc.RemoveSubConn(oldSC)
}
continue
}
var newAddrs []resolver.Address
for _, a := range addrs {
newAddr := resolver.Address{
Addr: a.Addr,
Type: resolver.Backend, // All addresses from balancer are all backends.
ServerName: "",
Metadata: a.Metadata,
}
newAddrs = append(newAddrs, newAddr)
}
if oldSC == nil {
// Create new sc.
sc, err := bw.cc.NewSubConn(newAddrs, balancer.NewSubConnOptions{})
if err != nil {
grpclog.Warningf("Error creating connection to %v. Err: %v", newAddrs, err)
} else {
bw.mu.Lock()
// For pickfirst, there should be only one SubConn, so the
// address doesn't matter. All states updating (up and down)
// and picking should all happen on that only SubConn.
bw.conns[resolver.Address{}] = sc
bw.connSt[sc] = &scState{
addr: addrs[0], // Use the first address.
s: connectivity.Idle,
}
bw.mu.Unlock()
sc.Connect()
}
} else {
bw.mu.Lock()
bw.connSt[oldSC].addr = addrs[0]
bw.mu.Unlock()
oldSC.UpdateAddresses(newAddrs)
}
} else {
var (
add []resolver.Address // Addresses need to setup connections.
del []balancer.SubConn // Connections need to tear down.
)
resAddrs := make(map[resolver.Address]Address)
for _, a := range addrs {
resAddrs[resolver.Address{
Addr: a.Addr,
Type: resolver.Backend, // All addresses from balancer are all backends.
ServerName: "",
Metadata: a.Metadata,
}] = a
}
bw.mu.Lock()
for a := range resAddrs {
if _, ok := bw.conns[a]; !ok {
add = append(add, a)
}
}
for a, c := range bw.conns {
if _, ok := resAddrs[a]; !ok {
del = append(del, c)
delete(bw.conns, a)
// Keep the state of this sc in bw.connSt until its state becomes Shutdown.
}
}
bw.mu.Unlock()
for _, a := range add {
sc, err := bw.cc.NewSubConn([]resolver.Address{a}, balancer.NewSubConnOptions{})
if err != nil {
grpclog.Warningf("Error creating connection to %v. Err: %v", a, err)
} else {
bw.mu.Lock()
bw.conns[a] = sc
bw.connSt[sc] = &scState{
addr: resAddrs[a],
s: connectivity.Idle,
}
bw.mu.Unlock()<|fim▁hole|> }
for _, c := range del {
bw.cc.RemoveSubConn(c)
}
}
}
}
func (bw *balancerWrapper) HandleSubConnStateChange(sc balancer.SubConn, s connectivity.State) {
bw.mu.Lock()
defer bw.mu.Unlock()
scSt, ok := bw.connSt[sc]
if !ok {
return
}
if s == connectivity.Idle {
sc.Connect()
}
oldS := scSt.s
scSt.s = s
if oldS != connectivity.Ready && s == connectivity.Ready {
scSt.down = bw.balancer.Up(scSt.addr)
} else if oldS == connectivity.Ready && s != connectivity.Ready {
if scSt.down != nil {
scSt.down(errConnClosing)
}
}
sa := bw.csEvltr.RecordTransition(oldS, s)
if bw.state != sa {
bw.state = sa
}
bw.cc.UpdateBalancerState(bw.state, bw)
if s == connectivity.Shutdown {
// Remove state for this sc.
delete(bw.connSt, sc)
}
}
func (bw *balancerWrapper) HandleResolvedAddrs([]resolver.Address, error) {
bw.mu.Lock()
defer bw.mu.Unlock()
select {
case <-bw.startCh:
default:
close(bw.startCh)
}
// There should be a resolver inside the balancer.
// All updates here, if any, are ignored.
}
func (bw *balancerWrapper) Close() {
bw.mu.Lock()
defer bw.mu.Unlock()
select {
case <-bw.startCh:
default:
close(bw.startCh)
}
bw.balancer.Close()
}
// The picker is the balancerWrapper itself.
// Pick should never return ErrNoSubConnAvailable.
// It either blocks or returns error, consistent with v1 balancer Get().
func (bw *balancerWrapper) Pick(ctx context.Context, opts balancer.PickOptions) (balancer.SubConn, func(balancer.DoneInfo), error) {
failfast := true // Default failfast is true.
if ss, ok := rpcInfoFromContext(ctx); ok {
failfast = ss.failfast
}
a, p, err := bw.balancer.Get(ctx, BalancerGetOptions{BlockingWait: !failfast})
if err != nil {
return nil, nil, err
}
var done func(balancer.DoneInfo)
if p != nil {
done = func(i balancer.DoneInfo) { p() }
}
var sc balancer.SubConn
bw.mu.Lock()
defer bw.mu.Unlock()
if bw.pickfirst {
// Get the first sc in conns.
for _, sc = range bw.conns {
break
}
} else {
var ok bool
sc, ok = bw.conns[resolver.Address{
Addr: a.Addr,
Type: resolver.Backend,
ServerName: "",
Metadata: a.Metadata,
}]
if !ok && failfast {
return nil, nil, balancer.ErrTransientFailure
}
if s, ok := bw.connSt[sc]; failfast && (!ok || s.s != connectivity.Ready) {
// If the returned sc is not ready and RPC is failfast,
// return error, and this RPC will fail.
return nil, nil, balancer.ErrTransientFailure
}
}
return sc, done, nil
}<|fim▁end|> | sc.Connect()
} |
<|file_name|>eventlet.py<|end_file_name|><|fim▁begin|># Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import socket
from oslo_concurrency import processutils<|fim▁hole|>from oslo_config import cfg
import oslo_i18n
import pbr.version
# NOTE(dstanek): i18n.enable_lazy() must be called before
# keystone.i18n._() is called to ensure it has the desired lazy lookup
# behavior. This includes cases, like keystone.exceptions, where
# keystone.i18n._() is called at import time.
oslo_i18n.enable_lazy()
from keystone.common import environment
from keystone.common import utils
from keystone import config
from keystone.i18n import _
from keystone.openstack.common import service
from keystone.openstack.common import systemd
from keystone.server import common
from keystone import service as keystone_service
CONF = cfg.CONF
class ServerWrapper(object):
"""Wraps a Server with some launching info & capabilities."""
def __init__(self, server, workers):
self.server = server
self.workers = workers
def launch_with(self, launcher):
self.server.listen()
if self.workers > 1:
# Use multi-process launcher
launcher.launch_service(self.server, self.workers)
else:
# Use single process launcher
launcher.launch_service(self.server)
def create_server(conf, name, host, port, workers):
app = keystone_service.loadapp('config:%s' % conf, name)
server = environment.Server(app, host=host, port=port,
keepalive=CONF.eventlet_server.tcp_keepalive,
keepidle=CONF.eventlet_server.tcp_keepidle)
if CONF.eventlet_server_ssl.enable:
server.set_ssl(CONF.eventlet_server_ssl.certfile,
CONF.eventlet_server_ssl.keyfile,
CONF.eventlet_server_ssl.ca_certs,
CONF.eventlet_server_ssl.cert_required)
return name, ServerWrapper(server, workers)
def serve(*servers):
logging.warning(_('Running keystone via eventlet is deprecated as of Kilo '
'in favor of running in a WSGI server (e.g. mod_wsgi). '
'Support for keystone under eventlet will be removed in '
'the "M"-Release.'))
if max([server[1].workers for server in servers]) > 1:
launcher = service.ProcessLauncher()
else:
launcher = service.ServiceLauncher()
for name, server in servers:
try:
server.launch_with(launcher)
except socket.error:
logging.exception(_('Failed to start the %(name)s server') % {
'name': name})
raise
# notify calling process we are ready to serve
systemd.notify_once()
for name, server in servers:
launcher.wait()
def _get_workers(worker_type_config_opt):
# Get the value from config, if the config value is None (not set), return
# the number of cpus with a minimum of 2.
worker_count = CONF.eventlet_server.get(worker_type_config_opt)
if not worker_count:
worker_count = max(2, processutils.get_worker_count())
return worker_count
def configure_threading():
monkeypatch_thread = not CONF.standard_threads
pydev_debug_url = utils.setup_remote_pydev_debug()
if pydev_debug_url:
# in order to work around errors caused by monkey patching we have to
# set the thread to False. An explanation is here:
# http://lists.openstack.org/pipermail/openstack-dev/2012-August/
# 000794.html
monkeypatch_thread = False
environment.use_eventlet(monkeypatch_thread)
def run(possible_topdir):
dev_conf = os.path.join(possible_topdir,
'etc',
'keystone.conf')
config_files = None
if os.path.exists(dev_conf):
config_files = [dev_conf]
common.configure(
version=pbr.version.VersionInfo('keystone').version_string(),
config_files=config_files,
pre_setup_logging_fn=configure_threading)
paste_config = config.find_paste_config()
def create_servers():
public_worker_count = _get_workers('public_workers')
servers = []
servers.append(create_server(paste_config,
'main',
CONF.eventlet_server.public_bind_host,
CONF.eventlet_server.public_port,
public_worker_count))
return servers
_unused, servers = common.setup_backends(
startup_application_fn=create_servers)
serve(*servers)<|fim▁end|> | |
<|file_name|>jai_texture_object.cpp<|end_file_name|><|fim▁begin|>#include "jai_texture_object.h"
#include "jai_shader_define.h"
namespace jai { namespace drawable {
TextureObject::TextureObject()
{
mColorEffect.setIdentity();
}
void TextureObject::_prepareDraw(int sample, Mat4 const & transform)
{
using namespace jai::ogl;
auto sct = mShader ? mShader : ShaderCoordSample::GetNoraml();
sct->use();
unsigned char * buffer = (unsigned char *)mVertexts.data();
draw::SetVertexAttribData(sct->mACoord, 4, EDataType::Float, false, sizeof(Vertext), buffer + JumpPos);
draw::SetVertexAttribData(sct->mASmpCoord, 2, EDataType::Float, false, sizeof(Vertext), buffer + JumpTV);
if(sct->hasEffect()) draw::SetUniformMat(sct->mUEffect, mColorEffect);
<|fim▁hole|> draw::SetUniformFloat(sct->mUColor, mColor.red, mColor.green, mColor.blue, mColor.alpha);
draw::SetUniformMat(sct->mUTransform, transform);
draw::EnableVertexAttrib(sct->mACoord);
draw::EnableVertexAttrib(sct->mASmpCoord);
}
}}<|fim▁end|> | draw::SetUniformInt(sct->mUSmp, sample); |
<|file_name|>ground_station_base.py<|end_file_name|><|fim▁begin|>import os
from optparse import OptionParser
import io
import time
import random
import thread
import sys
from smtp_stuff import sendMail
from imap_stuff import checkMessages
import datetime
import string
import array
from time import gmtime, strftime
from socket import *
user = ''
recipient = ''
incoming_server = ''
outgoing_server = ''
password = ''
imei = 0
aprs_server = 'second.aprs.net'
aprs_port = 20157
aprs_password = ''
aprs_callsign = ''
aprs_address = '>APRS,TCPIP*:'
aprs_is_enabled = False
# comment length is supposed to be 0 to 43 char.
email_enabled = False
ip_enabled = False
http_post_enabled = False
COMMAND_GET_POS = 0
COMMAND_RELEASE = 1
COMMAND_SET_REPORT_INTERVAL = 2
def send_mo_email(msg):
global email
global incoming_server
global outgoing_server
global password
global imei
#put together body
body = ''
#subject
subject = '%d' % imei
#message is included as an attachment
attachment = 'msg.sbd'
fd = open(attachment, 'wb')
fd.write(msg)
fd.close()
sendMail(subject, body, user, recipient, password, outgoing_server, attachment)
def log(string):
print string
#TODO logic for text logging
def parse_text_report_no_fix(report):
report = report.split(":")
report = report[1]
report = report.split(",")
int_temp = float(report[0])
ext_temp = float(report[1])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
def send_aprs_packet(position):
global aprs_callsign
#print position
# create socket & connect to server
sSock = socket(AF_INET, SOCK_STREAM)
sSock.connect((aprs_server, aprs_port))
# logon
sSock.send('user ' + aprs_callsign + ' pass ' + aprs_password + ' vers "' + aprs_callsign + ' Python" \n')
#get position information and encode string
lat = position[1]
lon = position[2]
alt = 100
kts = 0.1
crs = 30
# deg mm.mm
lat_str = "=%02d" % ( lat ) + "%05.2f" % ( ( abs(lat) % 1 ) * 60.0 )
if lat > 0:
lat_str += "N"
else:
lat_str += "S"
# deg mm.mm
lon_str = "%03d" % ( abs(lon) ) + "%05.2f" % ( ( abs(lon) % 1 ) * 60.0 )
if lat > 0:
lon_str += "W"
else:
lon_str += "E"
#combine the two
position_str = lat_str + "/" + lon_str
#add course, speed, and altitude
comment = "O%03d/%03d/A=%06d" % (crs,kts,alt)
#comment = "-HELP ME"
#print aprs_callsign + aprs_address + position_str + comment
sSock.send(aprs_callsign + aprs_address + position_str + comment +'\n')
print("Packet sent to APRS: " + time.ctime() )
# close socket -- must be closed to avoidbuffer overflow
sSock.shutdown(0)
sSock.close()
def update_position(position):
if aprs_is_enabled:
send_aprs_packet(position)
def parse_text_report(report):
report = report.split(":")
report = report[1]
report = report.split(",")
time_str = report[0]
lat = float(report[1])
lon = float(report[2])
alt = float(report[3])
kts = float(report[4])
crs = float(report[5])
position = [time_str,lat,lon,alt,kts,crs]
int_temp = float(report[6])
ext_temp = float(report[7])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
print "Report - Lat:",lat,"Lon:",lon,"Alt(ft):",alt,"Speed(kts):",kts,"Course(deg):",crs
update_position(position)
MSG_TEXT_REPORT = 'U'
MSG_TEXT_REPORT_NO_FIX = 'F'
def parse_incoming(msg):
#TODO: My gawd, this is ugly.. lets do something else?
if msg[0] == MSG_TEXT_REPORT_NO_FIX:
parse_text_report_no_fix(msg)
elif msg[0] == MSG_TEXT_REPORT:
parse_text_report(msg)
def email_check_task(name):
#check e-mail for messages
while(1):
#print 'Checking email'
msg,subject,received_msg,unread_msgs = checkMessages(incoming_server,user,password)
if received_msg:
print "Received Message", msg,"\r"
parse_incoming(msg)
time.sleep(1.0)
def SET_REPORT_INTERVAL(args):
print "Setting reporting interval"
if RepresentsInt(args[0]):
value = int(args[0])
byte1 = ( value >> 8 ) & 0xFF
byte0 = ( value ) & 0xFF
msg = array.array('B',[COMMAND_SET_REPORT_INTERVAL,byte1,byte0])
send_mo_email(msg)
else:
"First argument must be int seconds between 1 - 65532. 0 to disable automatic reporting."
def GET_POS(args):
print "Sending position request"
msg = array.array('B',[COMMAND_GET_POS,1,2,3]) #extra bytes for not good reason
send_mo_email(msg)
def RELEASE(args):
print "Sending ballast release command"
if RepresentsInt(args[0]):
msg = array.array('B',[COMMAND_RELEASE,int(args[0])])
print msg
send_mo_email(msg)
else:
"First argument must be int"
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
def process_cmd(cmd_str):
#split up the string by space
cmd_args = cmd_str.split(' ')
#caps on CLI input
cmd_args[0] = cmd_args[0].upper()
if(len(cmd_args) > 1):
args = cmd_args[1:]
else:
args = []
possibles = globals().copy()
possibles.update(locals())
method = possibles.get(cmd_args[0])
if not method:
print("Method %s not implemented" % cmd_args[0])
else:
method(args)
def main():
global user
global recipient
global incoming_server<|fim▁hole|> global password
global email_enabled
global ip_enabled
global http_post_enabled
global aprs_server
global aprs_port
global aprs_password
global aprs_callsign
global aprs_is_enabled
parser = OptionParser()
parser.add_option("-p", "--passwd", dest="passwd", action="store", help="Password", metavar="PASSWD")
parser.add_option("-u", "--user", dest="user", action="store", help="E-mail account username", metavar="USER")
parser.add_option("-r", "--recipient", dest="recipient", action="store", help="Destination e-mail address.", metavar="USER")
parser.add_option("-i", "--in_srv", dest="in_srv", action="store", help="Incoming e-mail server url", metavar="IN_SRV")
parser.add_option("-o", "--out_srv", dest="out_srv", action="store", help="Outoging e-mail server", metavar="OUT_SRV")
parser.add_option("-m", "--mode", dest="mode", action="store", help="Mode: EMAIL,HTTP_POST,IP,NONE", default="NONE", metavar="MODE")
parser.add_option("-I", "--imei", dest="imei",action="store",help="IMEI of target modem.",metavar="IMEI")
parser.add_option("-A", "--aprs-server",dest="aprs_server",action="store",help="APRS server",metavar="APRS_SERVER")
parser.add_option("-a", "--aprs-port",dest="aprs_port",action="store",help="APRS port",metavar="APRS_PORT")
parser.add_option("-s", "--aprs-password",dest="aprs_password",action="store",help="APRS password",metavar="APRS_PASSWORD")
parser.add_option("-c", "--aprs-callsign",dest="aprs_callsign",action="store",help="APRS Callsign",metavar="APRS_CALLSIGN")
(options, args) = parser.parse_args()
if options.aprs_server:
aprs_server = options.aprs_server
if options.aprs_port:
aprs_port = options.aprs_port
if options.aprs_password:
aprs_password = options.aprs_password
aprs_is_enabled = True
if options.aprs_callsign:
aprs_callsign = options.aprs_callsign
#check for valid arguments
if options.mode == "EMAIL":
if options.passwd is None or options.user is None or options.recipient is None or options.in_srv is None or options.out_srv is None:
print 'If you want to use e-mail, you must specify in/out servers, user, password, and recipient address.'
sys.exit()
else:
email_enabled = True
elif options.mode == "HTTP_POST":
print 'Not implemented yet'
sys.exit()
elif options.mode == "IP":
print 'Not implemented yet'
sys.exit()
else:
print "No valid mode specified"
sys.exit()
user = options.user
recipient = options.recipient
incoming_server = options.in_srv
outgoing_server = options.out_srv
password = options.passwd
imei = options.imei
#spawn task to monitor email for incoming messages
thread.start_new_thread ( email_check_task, ( "Thread-1" , ) )
rx_buffer = ''
while(1):
"Enter 'x' to exit"
cmd_str = raw_input("# ")
if cmd_str == 'x':
break
if not cmd_str == '':
process_cmd(cmd_str)
print "Exiting application."
if __name__ == '__main__':
main()<|fim▁end|> | global outgoing_server |
<|file_name|>JQueryDroppable.java<|end_file_name|><|fim▁begin|>/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<|fim▁hole|>
import com.tle.common.i18n.CurrentLocale;
import com.tle.core.javascript.JavascriptModule;
import com.tle.web.sections.jquery.JQueryLibraryInclude;
import com.tle.web.sections.render.PreRenderable;
@SuppressWarnings("nls")
public class JQueryDroppable implements JavascriptModule {
private static final long serialVersionUID = 1L;
public static final PreRenderable PRERENDER =
new JQueryLibraryInclude(
"jquery.ui.droppable.js",
JQueryUICore.PRERENDER,
JQueryMouse.PRERENDER,
JQueryUIWidget.PRERENDER,
JQueryDraggable.PRERENDER)
.hasMin();
@Override
public String getDisplayName() {
return CurrentLocale.get("com.tle.web.sections.jquery.modules.droppable.name");
}
@Override
public String getId() {
return "droppable";
}
@Override
public Object getPreRenderer() {
return PRERENDER;
}
}<|fim▁end|> | package com.tle.web.sections.jquery.libraries; |
<|file_name|>compile.py<|end_file_name|><|fim▁begin|>import logging as log
import sys
import getopt
import os
import subprocess<|fim▁hole|> print "\n==================================================\n"
returncode = 0
if platform == "windows":
print "Running: vcvarsall.bat x86_amd64 && " + workspace + "\ProjectK\NDP\clr\src\pal\\tools\gen-buildsys-win.bat " + workspace + "\ProjectK\NDP\clr"
print "\n==================================================\n"
sys.stdout.flush()
returncode = subprocess.call(["vcvarsall.bat", "x86_amd64", "&&", workspace + "\ProjectK\NDP\clr\src\pal\\tools\gen-buildsys-win.bat", workspace + "\ProjectK\NDP\clr"])
elif platform == "linux":
print "Running: " + workspace + "/ProjectK/NDP/clr/src/pal/tools/gen-buildsys-clang.sh " + workspace + "/ProjectK/NDP/clr DEBUG"
print "\n==================================================\n"
sys.stdout.flush()
returncode = subprocess.call(workspace + "/ProjectK/NDP/clr/src/pal/tools/gen-buildsys-clang.sh " + workspace + "/ProjectK/NDP/clr " + target, shell=True)
if returncode != 0:
print "ERROR: cmake failed with exit code " + str(returncode)
return returncode
def RunBuild(target, platform, arch):
if platform == "windows":
return RunMsBuild(target, arch)
elif platform == "linux":
return RunMake()
def RunMsBuild(target, arch):
# run MsBuild
print "\n==================================================\n"
print "Running: vcvarsall.bat x86_amd64 && msbuild CoreCLR.sln /p:Configuration=" + target + " /p:Platform=" + arch
print "\n==================================================\n"
sys.stdout.flush()
returncode = subprocess.call(["vcvarsall.bat","x86_amd64","&&","msbuild","CoreCLR.sln","/p:Configuration=" + target,"/p:Platform=" + arch])
if returncode != 0:
print "ERROR: vcvarsall.bat failed with exit code " + str(returncode)
return returncode
def RunMake():
print "\n==================================================\n"
print "Running: make"
print "\n==================================================\n"
sys.stdout.flush()
returncode = subprocess.call(["make"])
if returncode != 0:
print "ERROR: make failed with exit code " + str(returncode)
return returncode
def Compile(workspace, target, platform, arch):
returncode = RunCMake(workspace, target, platform)
if returncode != 0:
return returncode
returncode += RunBuild(target, platform, arch)
if returncode != 0:
return returncode
return returncode<|fim▁end|> | import shutil
def RunCMake(workspace, target, platform):
# run CMake |
<|file_name|>transifex.py<|end_file_name|><|fim▁begin|># Copyright (c) 2017, Matt Layman
from ConfigParser import ConfigParser, NoOptionError, NoSectionError
import os
import sys
import requests
API_URL = 'https://www.transifex.com/api/2'
LANGUAGES = [
'ar',
'de',
'el',
'es',
'fr',
'it',
'nl',
'pt_BR',
]
def fetch_po_for(language, username, password):
print('Downloading po file for {0} ...'.format(language))
po_api = '/project/handroll/resource/handrollpot/translation/{0}/'.format(<|fim▁hole|> r = requests.get(po_url, auth=(username, password), params=params)
if r.status_code == 200:
r.encoding = 'utf-8'
output_file = os.path.join(
here, 'handroll', 'locale', language, 'LC_MESSAGES', 'handroll.po')
with open(output_file, 'wb') as out:
out.write(r.text.encode('utf-8'))
else:
print('Something went wrong fetching the {0} po file.'.format(
language))
def get_auth_from_conf(here):
transifex_conf = os.path.join(here, '.transifex.ini')
config = ConfigParser()
try:
with open(transifex_conf, 'r') as conf:
config.readfp(conf)
except IOError as ex:
sys.exit('Failed to load authentication configuration file.\n'
'{0}'.format(ex))
try:
username = config.get('auth', 'username')
password = config.get('auth', 'password')
except (NoOptionError, NoSectionError) as ex:
sys.exit('Oops. Incomplete configuration file: {0}'.format(ex))
return username, password
if __name__ == '__main__':
here = os.path.abspath(os.path.dirname(__file__))
username, password = get_auth_from_conf(here)
for language in LANGUAGES:
fetch_po_for(language, username, password)<|fim▁end|> | language)
po_url = API_URL + po_api
params = {'file': '1'} |
<|file_name|>ZipCompress.java<|end_file_name|><|fim▁begin|>package org.javacore.io.zip;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.util.Enumeration;
import java.util.zip.Adler32;
import java.util.zip.CheckedInputStream;
import java.util.zip.CheckedOutputStream;<|fim▁hole|>import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
/*
* Copyright [2015] [Jeff Lee]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author admin
* @since 2015-10-17 14:58:59
* 利用Zip进行多文件保存
*/
public class ZipCompress {
private static String filePath = "src" + File.separator +
"org" + File.separator +
"javacore" + File.separator +
"io" + File.separator;
private static String[] fileNames= new String[] {
filePath + "BufferedInputFileT.java",
filePath + "ChangeSystemOut.java"
};
public static void main(String[] args) throws IOException {
zipFiles(fileNames);
}
private static void zipFiles(String[] fileNames)
throws IOException {
// 获取zip文件输出流
FileOutputStream f = new FileOutputStream("test.zip");
// 从文件输出流中获取数据校验和输出流,并设置Adler32
CheckedOutputStream csum = new CheckedOutputStream(f,new Adler32());
// 从数据校验和输出流中获取Zip输出流
ZipOutputStream zos = new ZipOutputStream(csum);
// 从Zip输出流中获取缓冲输出流
BufferedOutputStream out = new BufferedOutputStream(zos);
// 设置Zip文件注释
zos.setComment("测试 java zip stream");
for (String file : fileNames) {
System.out.println("写入文件: " + file);
// 获取文件输入字符流
BufferedReader in =
new BufferedReader(new FileReader(file));
// 想Zip处理写入新的文件条目,并流定位到数据开始处
zos.putNextEntry(new ZipEntry(file));
int c;
while ((c = in.read()) > 0)
out.write(c);
in.close();
// 刷新Zip输出流,将缓冲的流写入该流
out.flush();
}
// 文件全部写入Zip输出流后,关闭
out.close();
// 输出数据校验和
System.out.println("数据校验和: " + csum.getChecksum().getValue());
System.out.println("读取zip文件");
// 读取test.zip文件输入流
FileInputStream fi = new FileInputStream("test.zip");
// 从文件输入流中获取数据校验和流
CheckedInputStream csumi = new CheckedInputStream(fi,new Adler32());
// 从数据校验和流中获取Zip解压流
ZipInputStream in2 = new ZipInputStream(csumi);
// 从Zip解压流中获取缓冲输入流
BufferedInputStream bis = new BufferedInputStream(in2);
// 创建文件条目
ZipEntry zipEntry;
while ((zipEntry = in2.getNextEntry()) != null) {
System.out.println("读取文件: " + zipEntry);
int x;
while ((x = bis.read()) > 0)
System.out.write(x);
}
if (fileNames.length == 1)
System.out.println("数据校验和: " + csumi.getChecksum().getValue());
bis.close();
// 获取Zip文件
ZipFile zf = new ZipFile("test.zip");
// 获取文件条目枚举
Enumeration e = zf.entries();
while (e.hasMoreElements()) {
// 从Zip文件的枚举中获取文件条目
ZipEntry ze2 = (ZipEntry) e.nextElement();
System.out.println("文件: " + ze2);
}
}
}<|fim▁end|> | import java.util.zip.ZipEntry;
import java.util.zip.ZipFile; |
<|file_name|>upload.min.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:b519d8c53881da3ce32f92a5c9f583c67d563ea9d7c5cacd3bf2503726443654<|fim▁hole|><|fim▁end|> | size 3420 |
<|file_name|>async_client.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.tpu_v2alpha1.services.tpu import pagers
from google.cloud.tpu_v2alpha1.types import cloud_tpu
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import TpuTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import TpuGrpcAsyncIOTransport
from .client import TpuClient
class TpuAsyncClient:
"""Manages TPU nodes and other resources
TPU API v2alpha1
"""
_client: TpuClient
DEFAULT_ENDPOINT = TpuClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = TpuClient.DEFAULT_MTLS_ENDPOINT
accelerator_type_path = staticmethod(TpuClient.accelerator_type_path)
parse_accelerator_type_path = staticmethod(TpuClient.parse_accelerator_type_path)
node_path = staticmethod(TpuClient.node_path)
parse_node_path = staticmethod(TpuClient.parse_node_path)
runtime_version_path = staticmethod(TpuClient.runtime_version_path)
parse_runtime_version_path = staticmethod(TpuClient.parse_runtime_version_path)
common_billing_account_path = staticmethod(TpuClient.common_billing_account_path)
parse_common_billing_account_path = staticmethod(
TpuClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(TpuClient.common_folder_path)
parse_common_folder_path = staticmethod(TpuClient.parse_common_folder_path)
common_organization_path = staticmethod(TpuClient.common_organization_path)
parse_common_organization_path = staticmethod(
TpuClient.parse_common_organization_path
)
common_project_path = staticmethod(TpuClient.common_project_path)
parse_common_project_path = staticmethod(TpuClient.parse_common_project_path)
common_location_path = staticmethod(TpuClient.common_location_path)
parse_common_location_path = staticmethod(TpuClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
TpuAsyncClient: The constructed client.
"""
return TpuClient.from_service_account_info.__func__(TpuAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
TpuAsyncClient: The constructed client.
"""
return TpuClient.from_service_account_file.__func__(TpuAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return TpuClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> TpuTransport:
"""Returns the transport used by the client instance.
Returns:
TpuTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(TpuClient).get_transport_class, type(TpuClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, TpuTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the tpu client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.TpuTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = TpuClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def list_nodes(
self,
request: Union[cloud_tpu.ListNodesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListNodesAsyncPager:
r"""Lists nodes.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_list_nodes():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
request = tpu_v2alpha1.ListNodesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_nodes(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.ListNodesRequest, dict]):
The request object. Request for
[ListNodes][google.cloud.tpu.v2alpha1.Tpu.ListNodes].
parent (:class:`str`):
Required. The parent resource name.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.tpu_v2alpha1.services.tpu.pagers.ListNodesAsyncPager:
Response for
[ListNodes][google.cloud.tpu.v2alpha1.Tpu.ListNodes].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = cloud_tpu.ListNodesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_nodes,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListNodesAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_node(
self,
request: Union[cloud_tpu.GetNodeRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> cloud_tpu.Node:
r"""Gets the details of a node.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_get_node():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
request = tpu_v2alpha1.GetNodeRequest(
name="name_value",
)
# Make the request
response = client.get_node(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.GetNodeRequest, dict]):
The request object. Request for
[GetNode][google.cloud.tpu.v2alpha1.Tpu.GetNode].
name (:class:`str`):
Required. The resource name.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.tpu_v2alpha1.types.Node:
A TPU instance.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = cloud_tpu.GetNodeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_node,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def create_node(
self,
request: Union[cloud_tpu.CreateNodeRequest, dict] = None,
*,
parent: str = None,
node: cloud_tpu.Node = None,
node_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a node.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_create_node():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
node = tpu_v2alpha1.Node()
node.accelerator_type = "accelerator_type_value"
node.runtime_version = "runtime_version_value"
request = tpu_v2alpha1.CreateNodeRequest(
parent="parent_value",
node=node,
)
# Make the request
operation = client.create_node(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.CreateNodeRequest, dict]):
The request object. Request for
[CreateNode][google.cloud.tpu.v2alpha1.Tpu.CreateNode].
parent (:class:`str`):
Required. The parent resource name.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
node (:class:`google.cloud.tpu_v2alpha1.types.Node`):
Required. The node.
This corresponds to the ``node`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
node_id (:class:`str`):
The unqualified resource name.
This corresponds to the ``node_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.tpu_v2alpha1.types.Node` A TPU
instance.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, node, node_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."<|fim▁hole|>
request = cloud_tpu.CreateNodeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if node is not None:
request.node = node
if node_id is not None:
request.node_id = node_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_node,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
cloud_tpu.Node,
metadata_type=cloud_tpu.OperationMetadata,
)
# Done; return the response.
return response
async def delete_node(
self,
request: Union[cloud_tpu.DeleteNodeRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a node.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_delete_node():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
request = tpu_v2alpha1.DeleteNodeRequest(
name="name_value",
)
# Make the request
operation = client.delete_node(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.DeleteNodeRequest, dict]):
The request object. Request for
[DeleteNode][google.cloud.tpu.v2alpha1.Tpu.DeleteNode].
name (:class:`str`):
Required. The resource name.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.tpu_v2alpha1.types.Node` A TPU
instance.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = cloud_tpu.DeleteNodeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_node,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
cloud_tpu.Node,
metadata_type=cloud_tpu.OperationMetadata,
)
# Done; return the response.
return response
async def stop_node(
self,
request: Union[cloud_tpu.StopNodeRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Stops a node. This operation is only available with
single TPU nodes.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_stop_node():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
request = tpu_v2alpha1.StopNodeRequest(
)
# Make the request
operation = client.stop_node(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.StopNodeRequest, dict]):
The request object. Request for
[StopNode][google.cloud.tpu.v2alpha1.Tpu.StopNode].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.tpu_v2alpha1.types.Node` A TPU
instance.
"""
# Create or coerce a protobuf request object.
request = cloud_tpu.StopNodeRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.stop_node,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
cloud_tpu.Node,
metadata_type=cloud_tpu.OperationMetadata,
)
# Done; return the response.
return response
async def start_node(
self,
request: Union[cloud_tpu.StartNodeRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Starts a node.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_start_node():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
request = tpu_v2alpha1.StartNodeRequest(
)
# Make the request
operation = client.start_node(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.StartNodeRequest, dict]):
The request object. Request for
[StartNode][google.cloud.tpu.v2alpha1.Tpu.StartNode].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.tpu_v2alpha1.types.Node` A TPU
instance.
"""
# Create or coerce a protobuf request object.
request = cloud_tpu.StartNodeRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.start_node,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
cloud_tpu.Node,
metadata_type=cloud_tpu.OperationMetadata,
)
# Done; return the response.
return response
async def update_node(
self,
request: Union[cloud_tpu.UpdateNodeRequest, dict] = None,
*,
node: cloud_tpu.Node = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates the configurations of a node.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_update_node():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
node = tpu_v2alpha1.Node()
node.accelerator_type = "accelerator_type_value"
node.runtime_version = "runtime_version_value"
request = tpu_v2alpha1.UpdateNodeRequest(
node=node,
)
# Make the request
operation = client.update_node(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.UpdateNodeRequest, dict]):
The request object. Request for
[UpdateNode][google.cloud.tpu.v2alpha1.Tpu.UpdateNode].
node (:class:`google.cloud.tpu_v2alpha1.types.Node`):
Required. The node. Only fields specified in update_mask
are updated.
This corresponds to the ``node`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. Mask of fields from [Node][Tpu.Node] to
update. Supported fields: None.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.tpu_v2alpha1.types.Node` A TPU
instance.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([node, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = cloud_tpu.UpdateNodeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if node is not None:
request.node = node
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_node,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("node.name", request.node.name),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
cloud_tpu.Node,
metadata_type=cloud_tpu.OperationMetadata,
)
# Done; return the response.
return response
async def generate_service_identity(
self,
request: Union[cloud_tpu.GenerateServiceIdentityRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> cloud_tpu.GenerateServiceIdentityResponse:
r"""Generates the Cloud TPU service identity for the
project.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_generate_service_identity():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
request = tpu_v2alpha1.GenerateServiceIdentityRequest(
parent="parent_value",
)
# Make the request
response = client.generate_service_identity(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.GenerateServiceIdentityRequest, dict]):
The request object. Request for
[GenerateServiceIdentity][google.cloud.tpu.v2alpha1.Tpu.GenerateServiceIdentity].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.tpu_v2alpha1.types.GenerateServiceIdentityResponse:
Response for
[GenerateServiceIdentity][google.cloud.tpu.v2alpha1.Tpu.GenerateServiceIdentity].
"""
# Create or coerce a protobuf request object.
request = cloud_tpu.GenerateServiceIdentityRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.generate_service_identity,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def list_accelerator_types(
self,
request: Union[cloud_tpu.ListAcceleratorTypesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListAcceleratorTypesAsyncPager:
r"""Lists accelerator types supported by this API.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_list_accelerator_types():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
request = tpu_v2alpha1.ListAcceleratorTypesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_accelerator_types(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.ListAcceleratorTypesRequest, dict]):
The request object. Request for
[ListAcceleratorTypes][google.cloud.tpu.v2alpha1.Tpu.ListAcceleratorTypes].
parent (:class:`str`):
Required. The parent resource name.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.tpu_v2alpha1.services.tpu.pagers.ListAcceleratorTypesAsyncPager:
Response for
[ListAcceleratorTypes][google.cloud.tpu.v2alpha1.Tpu.ListAcceleratorTypes].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = cloud_tpu.ListAcceleratorTypesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_accelerator_types,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListAcceleratorTypesAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_accelerator_type(
self,
request: Union[cloud_tpu.GetAcceleratorTypeRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> cloud_tpu.AcceleratorType:
r"""Gets AcceleratorType.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_get_accelerator_type():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
request = tpu_v2alpha1.GetAcceleratorTypeRequest(
name="name_value",
)
# Make the request
response = client.get_accelerator_type(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.GetAcceleratorTypeRequest, dict]):
The request object. Request for
[GetAcceleratorType][google.cloud.tpu.v2alpha1.Tpu.GetAcceleratorType].
name (:class:`str`):
Required. The resource name.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.tpu_v2alpha1.types.AcceleratorType:
A accelerator type that a Node can be
configured with.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = cloud_tpu.GetAcceleratorTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_accelerator_type,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def list_runtime_versions(
self,
request: Union[cloud_tpu.ListRuntimeVersionsRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListRuntimeVersionsAsyncPager:
r"""Lists runtime versions supported by this API.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_list_runtime_versions():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
request = tpu_v2alpha1.ListRuntimeVersionsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_runtime_versions(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.ListRuntimeVersionsRequest, dict]):
The request object. Request for
[ListRuntimeVersions][google.cloud.tpu.v2alpha1.Tpu.ListRuntimeVersions].
parent (:class:`str`):
Required. The parent resource name.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.tpu_v2alpha1.services.tpu.pagers.ListRuntimeVersionsAsyncPager:
Response for
[ListRuntimeVersions][google.cloud.tpu.v2alpha1.Tpu.ListRuntimeVersions].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = cloud_tpu.ListRuntimeVersionsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_runtime_versions,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListRuntimeVersionsAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_runtime_version(
self,
request: Union[cloud_tpu.GetRuntimeVersionRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> cloud_tpu.RuntimeVersion:
r"""Gets a runtime version.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_get_runtime_version():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
request = tpu_v2alpha1.GetRuntimeVersionRequest(
name="name_value",
)
# Make the request
response = client.get_runtime_version(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.GetRuntimeVersionRequest, dict]):
The request object. Request for
[GetRuntimeVersion][google.cloud.tpu.v2alpha1.Tpu.GetRuntimeVersion].
name (:class:`str`):
Required. The resource name.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.tpu_v2alpha1.types.RuntimeVersion:
A runtime version that a Node can be
configured with.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = cloud_tpu.GetRuntimeVersionRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_runtime_version,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def get_guest_attributes(
self,
request: Union[cloud_tpu.GetGuestAttributesRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> cloud_tpu.GetGuestAttributesResponse:
r"""Retrieves the guest attributes for the node.
.. code-block:: python
from google.cloud import tpu_v2alpha1
def sample_get_guest_attributes():
# Create a client
client = tpu_v2alpha1.TpuClient()
# Initialize request argument(s)
request = tpu_v2alpha1.GetGuestAttributesRequest(
name="name_value",
)
# Make the request
response = client.get_guest_attributes(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.tpu_v2alpha1.types.GetGuestAttributesRequest, dict]):
The request object. Request for
[GetGuestAttributes][google.cloud.tpu.v2alpha1.Tpu.GetGuestAttributes].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.tpu_v2alpha1.types.GetGuestAttributesResponse:
Response for
[GetGuestAttributes][google.cloud.tpu.v2alpha1.Tpu.GetGuestAttributes].
"""
# Create or coerce a protobuf request object.
request = cloud_tpu.GetGuestAttributesRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_guest_attributes,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-tpu",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("TpuAsyncClient",)<|fim▁end|> | ) |
<|file_name|>account_group.py<|end_file_name|><|fim▁begin|># Copyright 2021 Alfredo de la Fuente - AvanzOSC
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import models, fields, api
class AccountGroup(models.Model):
_inherit = 'account.group'
length_account = fields.Integer(
string='Length account', compute='_compute_length_account',<|fim▁hole|>
@api.depends('code_prefix_start')
def _compute_length_account(self):
for group in self:
group.length_account = len(group.code_prefix_start)
def _find_account_group_headquarter(self):
found = False
group = self
while not found:
if not group.parent_id:
found = True
without_headquarter_control = group.without_headquarter
else:
cond = [('id', '=', group.parent_id.id)]
group = self.env['account.group'].search(cond, limit=1)
return without_headquarter_control<|fim▁end|> | store=True)
without_headquarter = fields.Boolean(
string='Without headquarter in invoices and accounting entries',
default=True) |
<|file_name|>rows.go<|end_file_name|><|fim▁begin|>package core
import (
"database/sql"
"errors"
"reflect"
"sync"
)
type Rows struct {
*sql.Rows
Mapper IMapper
}
func (rs *Rows) ToMapString() ([]map[string]string, error) {
cols, err := rs.Columns()
if err != nil {
return nil, err
}
var results = make([]map[string]string, 0, 10)
for rs.Next() {
var record = make(map[string]string, len(cols))
err = rs.ScanMap(&record)
if err != nil {
return nil, err
}
results = append(results, record)
}
return results, nil
}
// scan data to a struct's pointer according field index
func (rs *Rows) ScanStructByIndex(dest ...interface{}) error {
if len(dest) == 0 {
return errors.New("at least one struct")
}
vvvs := make([]reflect.Value, len(dest))
for i, s := range dest {
vv := reflect.ValueOf(s)
if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Struct {
return errors.New("dest should be a struct's pointer")
}
vvvs[i] = vv.Elem()
}
cols, err := rs.Columns()
if err != nil {
return err
}
newDest := make([]interface{}, len(cols))
var i = 0
for _, vvv := range vvvs {
for j := 0; j < vvv.NumField(); j++ {
newDest[i] = vvv.Field(j).Addr().Interface()
i = i + 1
}
}
return rs.Rows.Scan(newDest...)
}
var (
fieldCache = make(map[reflect.Type]map[string]int)
fieldCacheMutex sync.RWMutex
)
func fieldByName(v reflect.Value, name string) reflect.Value {
t := v.Type()
fieldCacheMutex.RLock()
cache, ok := fieldCache[t]
fieldCacheMutex.RUnlock()
if !ok {
cache = make(map[string]int)
for i := 0; i < v.NumField(); i++ {
cache[t.Field(i).Name] = i
}
fieldCacheMutex.Lock()
fieldCache[t] = cache
fieldCacheMutex.Unlock()
}
if i, ok := cache[name]; ok {
return v.Field(i)
}
return reflect.Zero(t)
}
// scan data to a struct's pointer according field name
func (rs *Rows) ScanStructByName(dest interface{}) error {
vv := reflect.ValueOf(dest)
if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Struct {
return errors.New("dest should be a struct's pointer")
}
cols, err := rs.Columns()
if err != nil {
return err
}
newDest := make([]interface{}, len(cols))
var v EmptyScanner
for j, name := range cols {
f := fieldByName(vv.Elem(), rs.Mapper.Table2Obj(name))
if f.IsValid() {
newDest[j] = f.Addr().Interface()
} else {
newDest[j] = &v
}
}
return rs.Rows.Scan(newDest...)
}
type cacheStruct struct {
value reflect.Value
idx int
}
var (
reflectCache = make(map[reflect.Type]*cacheStruct)
reflectCacheMutex sync.RWMutex
)
func ReflectNew(typ reflect.Type) reflect.Value {
reflectCacheMutex.RLock()
cs, ok := reflectCache[typ]
reflectCacheMutex.RUnlock()
const newSize = 200
if !ok || cs.idx+1 > newSize-1 {
cs = &cacheStruct{reflect.MakeSlice(reflect.SliceOf(typ), newSize, newSize), 0}
reflectCacheMutex.Lock()
reflectCache[typ] = cs
reflectCacheMutex.Unlock()
} else {
reflectCacheMutex.Lock()
cs.idx = cs.idx + 1
reflectCacheMutex.Unlock()
}
return cs.value.Index(cs.idx).Addr()
}
// scan data to a slice's pointer, slice's length should equal to columns' number
func (rs *Rows) ScanSlice(dest interface{}) error {
vv := reflect.ValueOf(dest)
if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Slice {
return errors.New("dest should be a slice's pointer")
}
vvv := vv.Elem()
cols, err := rs.Columns()
if err != nil {
return err
}
newDest := make([]interface{}, len(cols))
for j := 0; j < len(cols); j++ {
if j >= vvv.Len() {
newDest[j] = reflect.New(vvv.Type().Elem()).Interface()
} else {
newDest[j] = vvv.Index(j).Addr().Interface()
}
}
err = rs.Rows.Scan(newDest...)
if err != nil {
return err
}
srcLen := vvv.Len()
for i := srcLen; i < len(cols); i++ {
vvv = reflect.Append(vvv, reflect.ValueOf(newDest[i]).Elem())
}
return nil
}
// scan data to a map's pointer
func (rs *Rows) ScanMap(dest interface{}) error {
vv := reflect.ValueOf(dest)
if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Map {
return errors.New("dest should be a map's pointer")
}
cols, err := rs.Columns()
if err != nil {
return err
}
newDest := make([]interface{}, len(cols))
vvv := vv.Elem()
for i, _ := range cols {
newDest[i] = ReflectNew(vvv.Type().Elem()).Interface()
//v := reflect.New(vvv.Type().Elem())
//newDest[i] = v.Interface()
}
err = rs.Rows.Scan(newDest...)
if err != nil {
return err
}
for i, name := range cols {
vname := reflect.ValueOf(name)
vvv.SetMapIndex(vname, reflect.ValueOf(newDest[i]).Elem())
}
return nil<|fim▁hole|>
/*func (rs *Rows) ScanMap(dest interface{}) error {
vv := reflect.ValueOf(dest)
if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Map {
return errors.New("dest should be a map's pointer")
}
cols, err := rs.Columns()
if err != nil {
return err
}
newDest := make([]interface{}, len(cols))
err = rs.ScanSlice(newDest)
if err != nil {
return err
}
vvv := vv.Elem()
for i, name := range cols {
vname := reflect.ValueOf(name)
vvv.SetMapIndex(vname, reflect.ValueOf(newDest[i]).Elem())
}
return nil
}*/
type Row struct {
rows *Rows
// One of these two will be non-nil:
err error // deferred error for easy chaining
}
// ErrorRow return an error row
func ErrorRow(err error) *Row {
return &Row{
err: err,
}
}
// NewRow from rows
func NewRow(rows *Rows, err error) *Row {
return &Row{rows, err}
}
func (row *Row) Columns() ([]string, error) {
if row.err != nil {
return nil, row.err
}
return row.rows.Columns()
}
func (row *Row) Scan(dest ...interface{}) error {
if row.err != nil {
return row.err
}
defer row.rows.Close()
for _, dp := range dest {
if _, ok := dp.(*sql.RawBytes); ok {
return errors.New("sql: RawBytes isn't allowed on Row.Scan")
}
}
if !row.rows.Next() {
if err := row.rows.Err(); err != nil {
return err
}
return sql.ErrNoRows
}
err := row.rows.Scan(dest...)
if err != nil {
return err
}
// Make sure the query can be processed to completion with no errors.
return row.rows.Close()
}
func (row *Row) ScanStructByName(dest interface{}) error {
if row.err != nil {
return row.err
}
defer row.rows.Close()
if !row.rows.Next() {
if err := row.rows.Err(); err != nil {
return err
}
return sql.ErrNoRows
}
err := row.rows.ScanStructByName(dest)
if err != nil {
return err
}
// Make sure the query can be processed to completion with no errors.
return row.rows.Close()
}
func (row *Row) ScanStructByIndex(dest interface{}) error {
if row.err != nil {
return row.err
}
defer row.rows.Close()
if !row.rows.Next() {
if err := row.rows.Err(); err != nil {
return err
}
return sql.ErrNoRows
}
err := row.rows.ScanStructByIndex(dest)
if err != nil {
return err
}
// Make sure the query can be processed to completion with no errors.
return row.rows.Close()
}
// scan data to a slice's pointer, slice's length should equal to columns' number
func (row *Row) ScanSlice(dest interface{}) error {
if row.err != nil {
return row.err
}
defer row.rows.Close()
if !row.rows.Next() {
if err := row.rows.Err(); err != nil {
return err
}
return sql.ErrNoRows
}
err := row.rows.ScanSlice(dest)
if err != nil {
return err
}
// Make sure the query can be processed to completion with no errors.
return row.rows.Close()
}
// scan data to a map's pointer
func (row *Row) ScanMap(dest interface{}) error {
if row.err != nil {
return row.err
}
defer row.rows.Close()
if !row.rows.Next() {
if err := row.rows.Err(); err != nil {
return err
}
return sql.ErrNoRows
}
err := row.rows.ScanMap(dest)
if err != nil {
return err
}
// Make sure the query can be processed to completion with no errors.
return row.rows.Close()
}
func (row *Row) ToMapString() (map[string]string, error) {
cols, err := row.Columns()
if err != nil {
return nil, err
}
var record = make(map[string]string, len(cols))
err = row.ScanMap(&record)
if err != nil {
return nil, err
}
return record, nil
}<|fim▁end|> | } |
<|file_name|>message.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|> Define some attributes and methods to form your message.
I suggest you don't alter this class. You're are free to do so, of course. It's your own decision.
Though, I suggest you create your own message type and let it inherit from this class.
"""
pass<|fim▁end|> |
class Message(object):
"""
Base type of a message sent through the pipeline. |
<|file_name|>str_to_string.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use rustc::lint::{LateContext, LintPass, LintArray, LateLintPass, LintContext};
use rustc::middle::ty;
use rustc_front::hir;
declare_lint!(STR_TO_STRING, Deny,
"Warn when a String could use to_owned() instead of to_string()");
/// Prefer str.to_owned() over str.to_string()
///
/// The latter creates a `Formatter` and is 5x slower than the former
pub struct StrToStringPass;
impl LintPass for StrToStringPass {
fn get_lints(&self) -> LintArray {
lint_array!(STR_TO_STRING)
}
}
impl LateLintPass for StrToStringPass {
fn check_expr(&mut self, cx: &LateContext, expr: &hir::Expr) {<|fim▁hole|> if method.node.as_str() == "to_string"
&& is_str(cx, &*args[0]) => {
cx.span_lint(STR_TO_STRING, expr.span,
"str.to_owned() is more efficient than str.to_string(), please use it instead");
},
_ => ()
}
fn is_str(cx: &LateContext, expr: &hir::Expr) -> bool {
fn walk_ty<'t>(ty: ty::Ty<'t>) -> ty::Ty<'t> {
match ty.sty {
ty::TyRef(_, ref tm) | ty::TyRawPtr(ref tm) => walk_ty(tm.ty),
_ => ty
}
}
match walk_ty(cx.tcx.expr_ty(expr)).sty {
ty::TyStr => true,
_ => false
}
}
}
}<|fim▁end|> | match expr.node {
hir::ExprMethodCall(ref method, _, ref args) |
<|file_name|>l476xx--can_txmailbox.go<|end_file_name|><|fim▁begin|>// +build l476xx
// Peripheral: CAN_TxMailBox_Periph Controller Area Network TxMailBox.
// Instances:
// Registers:<|fim▁hole|>// 0x04 32 TDTR CAN mailbox data length control and time stamp register.
// 0x08 32 TDLR CAN mailbox data low register.
// 0x0C 32 TDHR CAN mailbox data high register.
// Import:
// stm32/o/l476xx/mmap
package can
// DO NOT EDIT THIS FILE. GENERATED BY stm32xgen.<|fim▁end|> | // 0x00 32 TIR CAN TX mailbox identifier register. |
<|file_name|>encoding_tests.py<|end_file_name|><|fim▁begin|># coding=utf-8
import sys, os.path
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import locale
import unittest
import test_lib as test
import sickbeard
from sickbeard.helpers import sanitizeFileName
from sickrage.helper.encoding import ek, ss, uu
class EncodingTests(test.SiCKRAGETestCase):
def test_encoding(self):
rootDir = 'C:\\Temp\\TV'
strings = [u'Les Enfants De La T\xe9l\xe9', u'RT� One']
sickbeard.SYS_ENCODING = None
try:
locale.setlocale(locale.LC_ALL, "")
sickbeard.SYS_ENCODING = locale.getpreferredencoding()
except (locale.Error, IOError):
pass
# For OSes that are poorly configured I'll just randomly force UTF-8
if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
sickbeard.SYS_ENCODING = 'UTF-8'
for s in strings:
show_dir = ek(os.path.join, rootDir, sanitizeFileName(s))
self.assertIsInstance(show_dir, unicode)
if __name__ == "__main__":
print "=================="<|fim▁hole|> unittest.TextTestRunner(verbosity=2).run(suite)<|fim▁end|> | print "STARTING - ENCODING TESTS"
print "=================="
print "######################################################################"
suite = unittest.TestLoader().loadTestsFromTestCase(EncodingTests) |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
* Knook-mailer
* https://github.com/knook/knook.git
* Auhtors: Alexandre Lagrange-Cetto, Olivier Graziano, Olivier Marin
* Created on 15/04/2016.<|fim▁hole|> */
'use strict';
module.exports = {
Accounts: require('./src/Accounts'),
Email: require('./src/Email'),
Init: require('./src/Init'),
Prefs: require('./src/Prefs'),
Security: require('./src/Security')
};<|fim▁end|> | * version 0.1.0 |
<|file_name|>0753-Cracking the Safe.py<|end_file_name|><|fim▁begin|>class Solution:<|fim▁hole|> for i in range(k ** n):
prev = result[len(result) - n + 1:]
for j in range(k - 1, -1, -1):
curr = ''.join(prev) + str(j)
if curr not in visited:
visited.add(curr)
result.append(str(j))
break
return ''.join(result)<|fim▁end|> | def crackSafe(self, n: int, k: int) -> str:
result = ['0'] * n
visited = set([''.join(result)]) |
<|file_name|>interface.rs<|end_file_name|><|fim▁begin|>use ast::*;
use span::Span;
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct Interface
{<|fim▁hole|> pub functions: Vec<FunctionSignature>,
pub typ: Type,
pub span: Span,
}
pub fn interface(name: String, functions: Vec<FunctionSignature>, span: Span) -> Interface
{
Interface{
name: name,
functions: functions,
typ: Type::Unknown,
span: span,
}
}
impl TreePrinter for Interface
{
fn print(&self, level: usize)
{
let p = prefix(level);
println!("{}interface {} ({})", p, self.name, self.span);
for func in &self.functions {
func.print(level + 1);
}
}
}<|fim▁end|> | pub name: String, |
<|file_name|>EVLA_pipe_fluxgains.py<|end_file_name|><|fim▁begin|>######################################################################
#
# Copyright (C) 2013
# Associated Universities, Inc. Washington DC, USA,
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Library General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public
# License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 675 Massachusetts Ave, Cambridge, MA 02139, USA.
#
# Correspondence concerning VLA Pipelines should be addressed as follows:
# Please register and submit helpdesk tickets via: https://help.nrao.edu
# Postal address:
# National Radio Astronomy Observatory
# VLA Pipeline Support Office
# PO Box O
# Socorro, NM, USA
#
######################################################################
# MAKE GAIN TABLE FOR FLUX DENSITY BOOTSTRAPPING
# Make a gain table that includes gain and opacity corrections for final
# amp cal, for flux density bootstrapping
logprint ("Starting EVLA_pipe_fluxgains.py", logfileout='logs/fluxgains.log')
time_list=runtiming('fluxgains', 'start')
QA2_fluxgains='Pass'
#logprint ("Making fresh calibrators.ms", logfileout='logs/fluxgains.log')
#
#syscommand='rm -rf calibrators.ms'
#os.system(syscommand)
#
#default('split')
#vis=ms_active
#outputvis='calibrators.ms'
#datacolumn='corrected'
#field=''
#spw=''
#width=int(max(channels))
#antenna=''
#timebin='0s'
#timerange=''
#scan=calibrator_scan_select_string
#intent=''
#array=''
#uvrange=''
#correlation=''
#observation=''
#keepflags=False
#split()
logprint ("Setting models for standard primary calibrators", logfileout='logs/fluxgains.log')
tb.open('calibrators.ms')
positions = []
for ii in range(0,len(field_positions[0][0])):
positions.append([field_positions[0][0][ii], field_positions[1][0][ii]])
standard_source_names = [ '3C48', '3C138', '3C147', '3C286' ]
standard_source_fields = find_standards(positions)
ii=0
for fields in standard_source_fields:
for myfield in fields:
spws = field_spws[myfield]
for myspw in spws:
reference_frequency = center_frequencies[myspw]
EVLA_band = find_EVLA_band(reference_frequency)
logprint ("Center freq for spw "+str(myspw)+" = "+str(reference_frequency)+", observing band = "+EVLA_band, logfileout='logs/fluxgains.log')
model_image = standard_source_names[ii]+'_'+EVLA_band+'.im'
logprint ("Setting model for field "+str(myfield)+" spw "+str(myspw)+" using "+model_image, logfileout='logs/fluxgains.log')
try:
default('setjy')
vis='calibrators.ms'
field=str(myfield)
spw=str(myspw)
selectdata=False
scalebychan=True
standard='Perley-Butler 2013'
model=model_image
listmodels=False
usescratch=scratch
setjy()
except:
logprint('no data found for field ' + str(myfield)+" spw "+str(myspw), logfileout='logs/fluxgains.log')
ii=ii+1
tb.close()
logprint ("Making gain tables for flux density bootstrapping", logfileout='logs/fluxgains.log')
logprint ("Short solint = "+new_gain_solint1, logfileout='logs/fluxgains.log')
logprint ("Long solint = "+gain_solint2, logfileout='logs/fluxgains.log')
print ""
print "Finding a reference antenna"
print ""
refantspw=''
refantfield=calibrator_field_select_string
findrefant=RefAntHeuristics(vis='calibrators.ms',field=refantfield,geometry=True,flagging=True)
RefAntOutput=findrefant.calculate()
refAnt=str(RefAntOutput[0])+','+str(RefAntOutput[1])+','+str(RefAntOutput[2])+','+str(RefAntOutput[3])
logprint ("The pipeline will use antenna(s) "+refAnt+" as the reference", logfileout='logs/fluxgains.log')
# Derive amp gain table. Note that gaincurves and opacity
# corrections have already been applied during applycal and split in
# semiFinalBPdcals/solint.py.
# Need to add check for 3C84 in here, when heuristics have been sorted out
default('gaincal')
vis='calibrators.ms'
caltable='fluxphaseshortgaincal.g'
field=''
spw=''
intent=''
selectdata=False
solint=new_gain_solint1
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='G'
smodel=[]
calmode='p'
append=False
docallib=False
#gaintable=filter(None, [priorcals,'delay.k','BPcal.b'])
gaintable=['']
gainfield=['']
interp=['']
spwmap=[]
parang=False
gaincal()
default('gaincal')
vis='calibrators.ms'
caltable='fluxgaincal.g'
field=''
spw=''
intent=''
selectdata=False
solint=gain_solint2
combine='scan'
preavg=-1.0
refant=refAnt<|fim▁hole|>minblperant=minBL_for_cal
minsnr=5.0
solnorm=False
gaintype='G'
smodel=[]
calmode='ap'
append=False
docallib=False
#gaintable=filter(None, [priorcals,'delay.k','BPcal.b','fluxphaseshortgaincal.g'])
gaintable=['fluxphaseshortgaincal.g']
gainfield=['']
interp=['']
spwmap=[]
parang=False
gaincal()
logprint ("Gain table fluxgaincal.g is ready for flagging", logfileout='logs/fluxgains.log')
# Calculate fractions of flagged solutions for final QA2; note, can
# tolerate higher fraction of flagged solutions for this step than in
# other gain tables
flaggedGainSolns=getCalFlaggedSoln('fluxgaincal.g')
if (flaggedGainSolns['all']['total'] == 0):
QA2_fluxgains='Fail'
elif (flaggedGainSolns['antmedian']['fraction'] > 0.2):
QA2_fluxgains='Partial'
logprint ("QA2 score: "+QA2_fluxgains, logfileout='logs/fluxgains.log')
logprint ("Finished EVLA_pipe_fluxgains.py", logfileout='logs/fluxgains.log')
time_list=runtiming('fluxgains', 'end')
pipeline_save()<|fim▁end|> | |
<|file_name|>MutableIncrement.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010 Henry Coles
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,<|fim▁hole|> * See the License for the specific language governing permissions and limitations under the License.
*/
package org.pitest.mutationtest;
public class MutableIncrement {
public static int increment() {
int i = 42;
i++;
return i;
}
}<|fim▁end|> | * software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
<|file_name|>params_windows.py<|end_file_name|><|fim▁begin|>"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
#Used in subsequent imports from params
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.format import format
from install_params import exclude_packages
from status_params import *
config = Script.get_config()
hadoop_conf_dir = None
hbase_conf_dir = None
hadoop_home = None
try:
hadoop_conf_dir = os.environ["HADOOP_CONF_DIR"]
hbase_conf_dir = os.environ["HBASE_CONF_DIR"]
hadoop_home = os.environ["HADOOP_HOME"]
except:
pass
#directories & files
dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir']
dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']
#decomission
hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", [])
exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
include_file_path = default("/configurations/hdfs-site/dfs.hosts", None)
hdfs_include_file = None
manage_include_files = default("/configurations/hdfs-site/manage.include.files", False)
if include_file_path and manage_include_files:
slave_hosts = default("/clusterHostInfo/slave_hosts", [])
hdfs_include_file = slave_hosts
update_files_only = default("/commandParams/update_files_only",False)
# HDFS High Availability properties
dfs_ha_enabled = False
dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.internal.nameservices", None)
dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
namenode_id = None
namenode_rpc = None
hostname = config["hostname"]
if dfs_ha_namenode_ids:<|fim▁hole|>if dfs_ha_enabled:
for nn_id in dfs_ha_namemodes_ids_list:
nn_host = config['configurations']['hdfs-site'][format('dfs.namenode.rpc-address.{dfs_ha_nameservices}.{nn_id}')]
if hostname.lower() in nn_host.lower():
namenode_id = nn_id
namenode_rpc = nn_host
hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
hdfs_user = hadoop_user
grep_exe = "findstr"
name_node_params = default("/commandParams/namenode", None)
service_map = {
"datanode" : datanode_win_service_name,
"journalnode" : journalnode_win_service_name,
"namenode" : namenode_win_service_name,
"secondarynamenode" : snamenode_win_service_name,
"zkfc_slave": zkfc_win_service_name
}<|fim▁end|> | dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
if dfs_ha_namenode_ids_array_len > 1:
dfs_ha_enabled = True |
<|file_name|>detailSelector.js<|end_file_name|><|fim▁begin|>var detailSelector = function() {
return {
restrict: 'E',
scope : {
items: '=',
selectedItem: '='
},
replace: true,
templateUrl: 'js/directives/detailSelector.html',
link: function (scope, element, attrs) {<|fim▁hole|> scope.selectedItem = value;
};
}
};
}
module.exports = detailSelector;<|fim▁end|> | scope.updateCurrentItem = function(value) { |
<|file_name|>mobi6.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2011, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
import struct, sys, os
from collections import OrderedDict, defaultdict
from lxml import html
from calibre.ebooks.mobi.reader.headers import NULL_INDEX
from calibre.ebooks.mobi.reader.index import (parse_index_record,
parse_tagx_section)
from calibre.ebooks.mobi.utils import (decode_hex_number, decint,
decode_tbs, read_font_record)
from calibre.utils.imghdr import what
from calibre.ebooks.mobi.debug import format_bytes
from calibre.ebooks.mobi.debug.headers import TextRecord
class TagX(object): # {{{
def __init__(self, tag, num_values, bitmask, eof):
self.tag, self.num_values, self.bitmask, self.eof = (tag, num_values,
bitmask, eof)
self.num_of_values = num_values
self.is_eof = (self.eof == 1 and self.tag == 0 and self.num_values == 0
and self.bitmask == 0)
def __repr__(self):
return 'TAGX(tag=%02d, num_values=%d, bitmask=%r, eof=%d)' % (self.tag,
self.num_values, bin(self.bitmask), self.eof)
# }}}
class SecondaryIndexHeader(object): # {{{
def __init__(self, record):
self.record = record
raw = self.record.raw
# open('/t/index_header.bin', 'wb').write(raw)
if raw[:4] != b'INDX':
raise ValueError('Invalid Secondary Index Record')
self.header_length, = struct.unpack('>I', raw[4:8])
self.unknown1 = raw[8:16]
self.index_type, = struct.unpack('>I', raw[16:20])
self.index_type_desc = {0: 'normal', 2:
'inflection', 6: 'calibre'}.get(self.index_type, 'unknown')
self.idxt_start, = struct.unpack('>I', raw[20:24])
self.index_count, = struct.unpack('>I', raw[24:28])
self.index_encoding_num, = struct.unpack('>I', raw[28:32])
self.index_encoding = {65001: 'utf-8', 1252:
'cp1252'}.get(self.index_encoding_num, 'unknown')
if self.index_encoding == 'unknown':
raise ValueError(
'Unknown index encoding: %d'%self.index_encoding_num)
self.unknown2 = raw[32:36]
self.num_index_entries, = struct.unpack('>I', raw[36:40])
self.ordt_start, = struct.unpack('>I', raw[40:44])
self.ligt_start, = struct.unpack('>I', raw[44:48])
self.num_of_ligt_entries, = struct.unpack('>I', raw[48:52])
self.num_of_cncx_blocks, = struct.unpack('>I', raw[52:56])
self.unknown3 = raw[56:180]
self.tagx_offset, = struct.unpack(b'>I', raw[180:184])
if self.tagx_offset != self.header_length:
raise ValueError('TAGX offset and header length disagree')
self.unknown4 = raw[184:self.header_length]
tagx = raw[self.header_length:]
if not tagx.startswith(b'TAGX'):
raise ValueError('Invalid TAGX section')
self.tagx_header_length, = struct.unpack('>I', tagx[4:8])
self.tagx_control_byte_count, = struct.unpack('>I', tagx[8:12])
self.tagx_entries = [TagX(*x) for x in parse_tagx_section(tagx)[1]]
if self.tagx_entries and not self.tagx_entries[-1].is_eof:
raise ValueError('TAGX last entry is not EOF')
idxt0_pos = self.header_length+self.tagx_header_length
num = ord(raw[idxt0_pos])
count_pos = idxt0_pos+1+num
self.last_entry = raw[idxt0_pos+1:count_pos]
self.ncx_count, = struct.unpack(b'>H', raw[count_pos:count_pos+2])
# There may be some alignment zero bytes between the end of the idxt0
# and self.idxt_start
idxt = raw[self.idxt_start:]
if idxt[:4] != b'IDXT':
raise ValueError('Invalid IDXT header')
length_check, = struct.unpack(b'>H', idxt[4:6])
if length_check != self.header_length + self.tagx_header_length:
raise ValueError('Length check failed')
if idxt[6:].replace(b'\0', b''):
raise ValueError('Non null trailing bytes after IDXT')
def __str__(self):
ans = ['*'*20 + ' Secondary Index Header '+ '*'*20]
a = ans.append
def u(w):
a('Unknown: %r (%d bytes) (All zeros: %r)'%(w,
len(w), not bool(w.replace(b'\0', b''))))
a('Header length: %d'%self.header_length)
u(self.unknown1)
a('Index Type: %s (%d)'%(self.index_type_desc, self.index_type))
a('Offset to IDXT start: %d'%self.idxt_start)
a('Number of index records: %d'%self.index_count)
a('Index encoding: %s (%d)'%(self.index_encoding,
self.index_encoding_num))
u(self.unknown2)
a('Number of index entries: %d'% self.num_index_entries)
a('ORDT start: %d'%self.ordt_start)
a('LIGT start: %d'%self.ligt_start)
a('Number of LIGT entries: %d'%self.num_of_ligt_entries)
a('Number of cncx blocks: %d'%self.num_of_cncx_blocks)
u(self.unknown3)
a('TAGX offset: %d'%self.tagx_offset)
u(self.unknown4)
a('\n\n')
a('*'*20 + ' TAGX Header (%d bytes)'%self.tagx_header_length+ '*'*20)
a('Header length: %d'%self.tagx_header_length)
a('Control byte count: %d'%self.tagx_control_byte_count)<|fim▁hole|> for i in self.tagx_entries:
a('\t' + repr(i))
a('Index of last IndexEntry in secondary index record: %s'% self.last_entry)
a('Number of entries in the NCX: %d'% self.ncx_count)
return '\n'.join(ans)
# }}}
class IndexHeader(object): # {{{
def __init__(self, record):
self.record = record
raw = self.record.raw
# open('/t/index_header.bin', 'wb').write(raw)
if raw[:4] != b'INDX':
raise ValueError('Invalid Primary Index Record')
self.header_length, = struct.unpack('>I', raw[4:8])
self.unknown1 = raw[8:12]
self.header_type, = struct.unpack('>I', raw[12:16])
self.index_type, = struct.unpack('>I', raw[16:20])
self.index_type_desc = {0: 'normal', 2:
'inflection', 6: 'calibre'}.get(self.index_type, 'unknown')
self.idxt_start, = struct.unpack('>I', raw[20:24])
self.index_count, = struct.unpack('>I', raw[24:28])
self.index_encoding_num, = struct.unpack('>I', raw[28:32])
self.index_encoding = {65001: 'utf-8', 1252:
'cp1252'}.get(self.index_encoding_num, 'unknown')
if self.index_encoding == 'unknown':
raise ValueError(
'Unknown index encoding: %d'%self.index_encoding_num)
self.possibly_language = raw[32:36]
self.num_index_entries, = struct.unpack('>I', raw[36:40])
self.ordt_start, = struct.unpack('>I', raw[40:44])
self.ligt_start, = struct.unpack('>I', raw[44:48])
self.num_of_ligt_entries, = struct.unpack('>I', raw[48:52])
self.num_of_cncx_blocks, = struct.unpack('>I', raw[52:56])
self.unknown2 = raw[56:180]
self.tagx_offset, = struct.unpack(b'>I', raw[180:184])
if self.tagx_offset != self.header_length:
raise ValueError('TAGX offset and header length disagree')
self.unknown3 = raw[184:self.header_length]
tagx = raw[self.header_length:]
if not tagx.startswith(b'TAGX'):
raise ValueError('Invalid TAGX section')
self.tagx_header_length, = struct.unpack('>I', tagx[4:8])
self.tagx_control_byte_count, = struct.unpack('>I', tagx[8:12])
self.tagx_entries = [TagX(*x) for x in parse_tagx_section(tagx)[1]]
if self.tagx_entries and not self.tagx_entries[-1].is_eof:
raise ValueError('TAGX last entry is not EOF')
idxt0_pos = self.header_length+self.tagx_header_length
last_num, consumed = decode_hex_number(raw[idxt0_pos:])
count_pos = idxt0_pos + consumed
self.ncx_count, = struct.unpack(b'>H', raw[count_pos:count_pos+2])
self.last_entry = last_num
if last_num != self.ncx_count - 1:
raise ValueError('Last id number in the NCX != NCX count - 1')
# There may be some alignment zero bytes between the end of the idxt0
# and self.idxt_start
idxt = raw[self.idxt_start:]
if idxt[:4] != b'IDXT':
raise ValueError('Invalid IDXT header')
length_check, = struct.unpack(b'>H', idxt[4:6])
if length_check != self.header_length + self.tagx_header_length:
raise ValueError('Length check failed')
# if idxt[6:].replace(b'\0', b''):
# raise ValueError('Non null trailing bytes after IDXT')
def __str__(self):
ans = ['*'*20 + ' Index Header (%d bytes)'%len(self.record.raw)+ '*'*20]
a = ans.append
def u(w):
a('Unknown: %r (%d bytes) (All zeros: %r)'%(w,
len(w), not bool(w.replace(b'\0', b''))))
a('Header length: %d'%self.header_length)
u(self.unknown1)
a('Header type: %d'%self.header_type)
a('Index Type: %s (%d)'%(self.index_type_desc, self.index_type))
a('Offset to IDXT start: %d'%self.idxt_start)
a('Number of index records: %d'%self.index_count)
a('Index encoding: %s (%d)'%(self.index_encoding,
self.index_encoding_num))
a('Unknown (possibly language?): %r'%(self.possibly_language))
a('Number of index entries: %d'% self.num_index_entries)
a('ORDT start: %d'%self.ordt_start)
a('LIGT start: %d'%self.ligt_start)
a('Number of LIGT entries: %d'%self.num_of_ligt_entries)
a('Number of cncx blocks: %d'%self.num_of_cncx_blocks)
u(self.unknown2)
a('TAGX offset: %d'%self.tagx_offset)
u(self.unknown3)
a('\n\n')
a('*'*20 + ' TAGX Header (%d bytes)'%self.tagx_header_length+ '*'*20)
a('Header length: %d'%self.tagx_header_length)
a('Control byte count: %d'%self.tagx_control_byte_count)
for i in self.tagx_entries:
a('\t' + repr(i))
a('Index of last IndexEntry in primary index record: %s'% self.last_entry)
a('Number of entries in the NCX: %d'% self.ncx_count)
return '\n'.join(ans)
# }}}
class Tag(object): # {{{
'''
Index entries are a collection of tags. Each tag is represented by this
class.
'''
TAG_MAP = {
1: ('offset', 'Offset in HTML'),
2: ('size', 'Size in HTML'),
3: ('label_offset', 'Label offset in CNCX'),
4: ('depth', 'Depth of this entry in TOC'),
5: ('class_offset', 'Class offset in CNCX'),
6: ('pos_fid', 'File Index'),
11: ('secondary', '[unknown, unknown, '
'tag type from TAGX in primary index header]'),
21: ('parent_index', 'Parent'),
22: ('first_child_index', 'First child'),
23: ('last_child_index', 'Last child'),
69 : ('image_index', 'Offset from first image record to the'
' image record associated with this entry'
' (masthead for periodical or thumbnail for'
' article entry).'),
70 : ('desc_offset', 'Description offset in cncx'),
71 : ('author_offset', 'Author offset in cncx'),
72 : ('image_caption_offset', 'Image caption offset in cncx'),
73 : ('image_attr_offset', 'Image attribution offset in cncx'),
}
def __init__(self, tag_type, vals, cncx):
self.value = vals if len(vals) > 1 else vals[0] if vals else None
self.cncx_value = None
if tag_type in self.TAG_MAP:
self.attr, self.desc = self.TAG_MAP[tag_type]
else:
print ('Unknown tag value: %%s'%tag_type)
self.desc = '??Unknown (tag value: %d)'%tag_type
self.attr = 'unknown'
if '_offset' in self.attr:
self.cncx_value = cncx[self.value]
def __str__(self):
if self.cncx_value is not None:
return '%s : %r [%r]'%(self.desc, self.value, self.cncx_value)
return '%s : %r'%(self.desc, self.value)
# }}}
class IndexEntry(object): # {{{
'''
The index is made up of entries, each of which is represented by an
instance of this class. Index entries typically point to offsets in the
HTML, specify HTML sizes and point to text strings in the CNCX that are
used in the navigation UI.
'''
def __init__(self, ident, entry, cncx):
try:
self.index = int(ident, 16)
except ValueError:
self.index = ident
self.tags = [Tag(tag_type, vals, cncx) for tag_type, vals in
entry.iteritems()]
@property
def label(self):
for tag in self.tags:
if tag.attr == 'label_offset':
return tag.cncx_value
return ''
@property
def offset(self):
for tag in self.tags:
if tag.attr == 'offset':
return tag.value
return 0
@property
def size(self):
for tag in self.tags:
if tag.attr == 'size':
return tag.value
return 0
@property
def depth(self):
for tag in self.tags:
if tag.attr == 'depth':
return tag.value
return 0
@property
def parent_index(self):
for tag in self.tags:
if tag.attr == 'parent_index':
return tag.value
return -1
@property
def first_child_index(self):
for tag in self.tags:
if tag.attr == 'first_child_index':
return tag.value
return -1
@property
def last_child_index(self):
for tag in self.tags:
if tag.attr == 'last_child_index':
return tag.value
return -1
@property
def pos_fid(self):
for tag in self.tags:
if tag.attr == 'pos_fid':
return tag.value
return [0, 0]
def __str__(self):
ans = ['Index Entry(index=%s, length=%d)'%(
self.index, len(self.tags))]
for tag in self.tags:
if tag.value is not None:
ans.append('\t'+str(tag))
if self.first_child_index != -1:
ans.append('\tNumber of children: %d'%(self.last_child_index -
self.first_child_index + 1))
return '\n'.join(ans)
# }}}
class IndexRecord(object): # {{{
'''
Represents all indexing information in the MOBI, apart from indexing info
in the trailing data of the text records.
'''
def __init__(self, records, index_header, cncx):
self.alltext = None
table = OrderedDict()
tags = [TagX(x.tag, x.num_values, x.bitmask, x.eof) for x in
index_header.tagx_entries]
for record in records:
raw = record.raw
if raw[:4] != b'INDX':
raise ValueError('Invalid Primary Index Record')
parse_index_record(table, record.raw,
index_header.tagx_control_byte_count, tags,
index_header.index_encoding, {}, strict=True)
self.indices = []
for ident, entry in table.iteritems():
self.indices.append(IndexEntry(ident, entry, cncx))
def get_parent(self, index):
if index.depth < 1:
return None
parent_depth = index.depth - 1
for p in self.indices:
if p.depth != parent_depth:
continue
def __str__(self):
ans = ['*'*20 + ' Index Entries (%d entries) '%len(self.indices)+ '*'*20]
a = ans.append
def u(w):
a('Unknown: %r (%d bytes) (All zeros: %r)'%(w,
len(w), not bool(w.replace(b'\0', b''))))
for entry in self.indices:
offset = entry.offset
a(str(entry))
t = self.alltext
if offset is not None and self.alltext is not None:
a('\tHTML before offset: %r'%t[offset-50:offset])
a('\tHTML after offset: %r'%t[offset:offset+50])
p = offset+entry.size
a('\tHTML before end: %r'%t[p-50:p])
a('\tHTML after end: %r'%t[p:p+50])
a('')
return '\n'.join(ans)
# }}}
class CNCX(object): # {{{
'''
Parses the records that contain the compiled NCX (all strings from the
NCX). Presents a simple offset : string mapping interface to access the
data.
'''
def __init__(self, records, codec):
self.records = OrderedDict()
record_offset = 0
for record in records:
raw = record.raw
pos = 0
while pos < len(raw):
length, consumed = decint(raw[pos:])
if length > 0:
try:
self.records[pos+record_offset] = raw[
pos+consumed:pos+consumed+length].decode(codec)
except:
byts = raw[pos:]
r = format_bytes(byts)
print ('CNCX entry at offset %d has unknown format %s'%(
pos+record_offset, r))
self.records[pos+record_offset] = r
pos = len(raw)
pos += consumed+length
record_offset += 0x10000
def __getitem__(self, offset):
return self.records.get(offset)
def __str__(self):
ans = ['*'*20 + ' cncx (%d strings) '%len(self.records)+ '*'*20]
for k, v in self.records.iteritems():
ans.append('%10d : %s'%(k, v))
return '\n'.join(ans)
# }}}
class ImageRecord(object): # {{{
def __init__(self, idx, record, fmt):
self.raw = record.raw
self.fmt = fmt
self.idx = idx
def dump(self, folder):
name = '%06d'%self.idx
with open(os.path.join(folder, name+'.'+self.fmt), 'wb') as f:
f.write(self.raw)
# }}}
class BinaryRecord(object): # {{{
def __init__(self, idx, record):
self.raw = record.raw
sig = self.raw[:4]
name = '%06d'%idx
if sig in {b'FCIS', b'FLIS', b'SRCS', b'DATP', b'RESC', b'BOUN',
b'FDST', b'AUDI', b'VIDE', b'CRES', b'CONT', b'CMET'}:
name += '-' + sig.decode('ascii')
elif sig == b'\xe9\x8e\r\n':
name += '-' + 'EOF'
self.name = name
def dump(self, folder):
with open(os.path.join(folder, self.name+'.bin'), 'wb') as f:
f.write(self.raw)
# }}}
class FontRecord(object): # {{{
def __init__(self, idx, record):
self.raw = record.raw
name = '%06d'%idx
self.font = read_font_record(self.raw)
if self.font['err']:
raise ValueError('Failed to read font record: %s Headers: %s'%(
self.font['err'], self.font['headers']))
self.payload = (self.font['font_data'] if self.font['font_data'] else
self.font['raw_data'])
self.name = '%s.%s'%(name, self.font['ext'])
def dump(self, folder):
with open(os.path.join(folder, self.name), 'wb') as f:
f.write(self.payload)
# }}}
class TBSIndexing(object): # {{{
def __init__(self, text_records, indices, doc_type):
self.record_indices = OrderedDict()
self.doc_type = doc_type
self.indices = indices
pos = 0
for r in text_records:
start = pos
pos += len(r.raw)
end = pos - 1
self.record_indices[r] = x = {'starts':[], 'ends':[],
'complete':[], 'geom': (start, end)}
for entry in indices:
istart, sz = entry.offset, entry.size
iend = istart + sz - 1
has_start = istart >= start and istart <= end
has_end = iend >= start and iend <= end
rec = None
if has_start and has_end:
rec = 'complete'
elif has_start and not has_end:
rec = 'starts'
elif not has_start and has_end:
rec = 'ends'
if rec:
x[rec].append(entry)
def get_index(self, idx):
for i in self.indices:
if i.index in {idx, unicode(idx)}:
return i
raise IndexError('Index %d not found'%idx)
def __str__(self):
ans = ['*'*20 + ' TBS Indexing (%d records) '%len(self.record_indices)+ '*'*20]
for r, dat in self.record_indices.iteritems():
ans += self.dump_record(r, dat)[-1]
return '\n'.join(ans)
def dump(self, bdir):
types = defaultdict(list)
for r, dat in self.record_indices.iteritems():
tbs_type, strings = self.dump_record(r, dat)
if tbs_type == 0:
continue
types[tbs_type] += strings
for typ, strings in types.iteritems():
with open(os.path.join(bdir, 'tbs_type_%d.txt'%typ), 'wb') as f:
f.write('\n'.join(strings))
def dump_record(self, r, dat):
ans = []
ans.append('\nRecord #%d: Starts at: %d Ends at: %d'%(r.idx,
dat['geom'][0], dat['geom'][1]))
s, e, c = dat['starts'], dat['ends'], dat['complete']
ans.append(('\tContains: %d index entries '
'(%d ends, %d complete, %d starts)')%tuple(map(len, (s+e+c, e,
c, s))))
byts = bytearray(r.trailing_data.get('indexing', b''))
ans.append('TBS bytes: %s'%format_bytes(byts))
for typ, entries in (('Ends', e), ('Complete', c), ('Starts', s)):
if entries:
ans.append('\t%s:'%typ)
for x in entries:
ans.append(('\t\tIndex Entry: %s (Parent index: %s, '
'Depth: %d, Offset: %d, Size: %d) [%s]')%(
x.index, x.parent_index, x.depth, x.offset, x.size, x.label))
def bin4(num):
ans = bin(num)[2:]
return bytes('0'*(4-len(ans)) + ans)
def repr_extra(x):
return str({bin4(k):v for k, v in extra.iteritems()})
tbs_type = 0
is_periodical = self.doc_type in (257, 258, 259)
if len(byts):
outermost_index, extra, consumed = decode_tbs(byts, flag_size=3)
byts = byts[consumed:]
for k in extra:
tbs_type |= k
ans.append('\nTBS: %d (%s)'%(tbs_type, bin4(tbs_type)))
ans.append('Outermost index: %d'%outermost_index)
ans.append('Unknown extra start bytes: %s'%repr_extra(extra))
if is_periodical: # Hierarchical periodical
try:
byts, a = self.interpret_periodical(tbs_type, byts,
dat['geom'][0])
except:
import traceback
traceback.print_exc()
a = []
print ('Failed to decode TBS bytes for record: %d'%r.idx)
ans += a
if byts:
sbyts = tuple(hex(b)[2:] for b in byts)
ans.append('Remaining bytes: %s'%' '.join(sbyts))
ans.append('')
return tbs_type, ans
def interpret_periodical(self, tbs_type, byts, record_offset):
ans = []
def read_section_transitions(byts, psi=None): # {{{
if psi is None:
# Assume previous section is 1
psi = self.get_index(1)
while byts:
ai, extra, consumed = decode_tbs(byts)
byts = byts[consumed:]
if extra.get(0b0010, None) is not None:
raise ValueError('Dont know how to interpret flag 0b0010'
' while reading section transitions')
if extra.get(0b1000, None) is not None:
if len(extra) > 1:
raise ValueError('Dont know how to interpret flags'
' %r while reading section transitions'%extra)
nsi = self.get_index(psi.index+1)
ans.append('Last article in this record of section %d'
' (relative to next section index [%d]): '
'%d [%d absolute index]'%(psi.index, nsi.index, ai,
ai+nsi.index))
psi = nsi
continue
ans.append('First article in this record of section %d'
' (relative to its parent section): '
'%d [%d absolute index]'%(psi.index, ai, ai+psi.index))
num = extra.get(0b0100, None)
if num is None:
msg = ('The section %d has at most one article'
' in this record')%psi.index
else:
msg = ('Number of articles in this record of '
'section %d: %d')%(psi.index, num)
ans.append(msg)
offset = extra.get(0b0001, None)
if offset is not None:
if offset == 0:
ans.append('This record is spanned by the article:'
'%d'%(ai+psi.index))
else:
ans.append('->Offset to start of next section (%d) from start'
' of record: %d [%d absolute offset]'%(psi.index+1,
offset, offset+record_offset))
return byts
# }}}
def read_starting_section(byts): # {{{
orig = byts
si, extra, consumed = decode_tbs(byts)
byts = byts[consumed:]
if len(extra) > 1 or 0b0010 in extra or 0b1000 in extra:
raise ValueError('Dont know how to interpret flags %r'
' when reading starting section'%extra)
si = self.get_index(si)
ans.append('The section at the start of this record is:'
' %s'%si.index)
if 0b0100 in extra:
num = extra[0b0100]
ans.append('The number of articles from the section %d'
' in this record: %s'%(si.index, num))
elif 0b0001 in extra:
eof = extra[0b0001]
if eof != 0:
raise ValueError('Unknown eof value %s when reading'
' starting section. All bytes: %r'%(eof, orig))
ans.append('??This record has more than one article from '
' the section: %s'%si.index)
return si, byts
# }}}
if tbs_type & 0b0100:
# Starting section is the first section
ssi = self.get_index(1)
else:
ssi, byts = read_starting_section(byts)
byts = read_section_transitions(byts, ssi)
return byts, ans
# }}}
class MOBIFile(object): # {{{
def __init__(self, mf):
for x in ('raw', 'palmdb', 'record_headers', 'records', 'mobi_header',
'huffman_record_nums',):
setattr(self, x, getattr(mf, x))
self.index_header = self.index_record = None
self.indexing_record_nums = set()
pir = getattr(self.mobi_header, 'primary_index_record', NULL_INDEX)
if pir != NULL_INDEX:
self.index_header = IndexHeader(self.records[pir])
numi = self.index_header.index_count
self.cncx = CNCX(self.records[
pir+1+numi:pir+1+numi+self.index_header.num_of_cncx_blocks],
self.index_header.index_encoding)
self.index_record = IndexRecord(self.records[pir+1:pir+1+numi],
self.index_header, self.cncx)
self.indexing_record_nums = set(xrange(pir,
pir+1+numi+self.index_header.num_of_cncx_blocks))
self.secondary_index_record = self.secondary_index_header = None
sir = self.mobi_header.secondary_index_record
if sir != NULL_INDEX:
self.secondary_index_header = SecondaryIndexHeader(self.records[sir])
numi = self.secondary_index_header.index_count
self.indexing_record_nums.add(sir)
self.secondary_index_record = IndexRecord(
self.records[sir+1:sir+1+numi], self.secondary_index_header, self.cncx)
self.indexing_record_nums |= set(xrange(sir+1, sir+1+numi))
ntr = self.mobi_header.number_of_text_records
fii = self.mobi_header.first_image_index
self.text_records = [TextRecord(r, self.records[r],
self.mobi_header.extra_data_flags, mf.decompress6) for r in xrange(1,
min(len(self.records), ntr+1))]
self.image_records, self.binary_records = [], []
self.font_records = []
image_index = 0
for i in xrange(self.mobi_header.first_resource_record, min(self.mobi_header.last_resource_record, len(self.records))):
if i in self.indexing_record_nums or i in self.huffman_record_nums:
continue
image_index += 1
r = self.records[i]
fmt = None
if i >= fii and r.raw[:4] not in {b'FLIS', b'FCIS', b'SRCS',
b'\xe9\x8e\r\n', b'RESC', b'BOUN', b'FDST', b'DATP',
b'AUDI', b'VIDE', b'FONT', b'CRES', b'CONT', b'CMET'}:
try:
fmt = what(None, r.raw)
except:
pass
if fmt is not None:
self.image_records.append(ImageRecord(image_index, r, fmt))
elif r.raw[:4] == b'FONT':
self.font_records.append(FontRecord(i, r))
else:
self.binary_records.append(BinaryRecord(i, r))
if self.index_record is not None:
self.tbs_indexing = TBSIndexing(self.text_records,
self.index_record.indices, self.mobi_header.type_raw)
def print_header(self, f=sys.stdout):
print (str(self.palmdb).encode('utf-8'), file=f)
print (file=f)
print ('Record headers:', file=f)
for i, r in enumerate(self.records):
print ('%6d. %s'%(i, r.header), file=f)
print (file=f)
print (str(self.mobi_header).encode('utf-8'), file=f)
# }}}
def inspect_mobi(mobi_file, ddir):
f = MOBIFile(mobi_file)
with open(os.path.join(ddir, 'header.txt'), 'wb') as out:
f.print_header(f=out)
alltext = os.path.join(ddir, 'text.html')
with open(alltext, 'wb') as of:
alltext = b''
for rec in f.text_records:
of.write(rec.raw)
alltext += rec.raw
of.seek(0)
root = html.fromstring(alltext.decode(f.mobi_header.encoding))
with open(os.path.join(ddir, 'pretty.html'), 'wb') as of:
of.write(html.tostring(root, pretty_print=True, encoding='utf-8',
include_meta_content_type=True))
if f.index_header is not None:
f.index_record.alltext = alltext
with open(os.path.join(ddir, 'index.txt'), 'wb') as out:
print(str(f.index_header), file=out)
print('\n\n', file=out)
if f.secondary_index_header is not None:
print(str(f.secondary_index_header).encode('utf-8'), file=out)
print('\n\n', file=out)
if f.secondary_index_record is not None:
print(str(f.secondary_index_record).encode('utf-8'), file=out)
print('\n\n', file=out)
print(str(f.cncx).encode('utf-8'), file=out)
print('\n\n', file=out)
print(str(f.index_record), file=out)
with open(os.path.join(ddir, 'tbs_indexing.txt'), 'wb') as out:
print(str(f.tbs_indexing), file=out)
f.tbs_indexing.dump(ddir)
for tdir, attr in [('text', 'text_records'), ('images', 'image_records'),
('binary', 'binary_records'), ('font', 'font_records')]:
tdir = os.path.join(ddir, tdir)
os.mkdir(tdir)
for rec in getattr(f, attr):
rec.dump(tdir)
# }}}<|fim▁end|> | |
<|file_name|>make_erpnext_demo.py<|end_file_name|><|fim▁begin|>if __name__=="__main__":
import sys
sys.path.extend([".", "lib", "app"])
import webnotes, os
import utilities.demo.make_demo
def make_demo_app():
webnotes.mute_emails = 1
webnotes.connect()
utilities.demo.make_demo.make(reset=True, simulate=False)
# setup demo user etc so that the site it up faster, while the data loads
make_demo_user()
make_demo_login_page()
make_demo_on_login_script()
utilities.demo.make_demo.make(reset=False, simulate=True)
def make_demo_user():
roles = ["Accounts Manager", "Analytics", "Expense Approver", "Accounts User",
"Leave Approver", "Blogger", "Customer", "Sales Manager", "Employee", "Support Manager",
"HR Manager", "HR User", "Maintenance Manager", "Maintenance User", "Material Manager",
"Material Master Manager", "Material User", "Manufacturing Manager",
"Manufacturing User", "Projects User", "Purchase Manager", "Purchase Master Manager",
"Purchase User", "Quality Manager", "Report Manager", "Sales Master Manager",
"Sales User", "Supplier", "Support Team"]
def add_roles(bean):
for role in roles:
p.doclist.append({
"doctype": "UserRole",
"parentfield": "user_roles",
"role": role
})
# make demo user
if webnotes.conn.exists("Profile", "[email protected]"):
webnotes.delete_doc("Profile", "[email protected]")
p = webnotes.new_bean("Profile")
p.doc.email = "[email protected]"
p.doc.first_name = "Demo"
p.doc.last_name = "User"
p.doc.enabled = 1
p.doc.user_type = "Owrang Demo"
p.doc.send_invite_email = 0
p.doc.new_password = "demo"
p.insert()
add_roles(p)
p.save()
# make system manager user
if webnotes.conn.exists("Profile", "[email protected]"):
webnotes.delete_doc("Profile", "[email protected]")
p = webnotes.new_bean("Profile")
p.doc.email = "[email protected]"
p.doc.first_name = "Admin"
p.doc.last_name = "User"
p.doc.enabled = 1
p.doc.user_type = "System User"
p.doc.send_invite_email = 0
p.doc.new_password = "admin010123"
p.insert()
roles.append("System Manager")
add_roles(p)
p.save()
# only read for newsletter
webnotes.conn.sql("""update `tabDocPerm` set `write`=0, `create`=0, `cancel`=0
where parent='Newsletter'""")
webnotes.conn.sql("""update `tabDocPerm` set `write`=0, `create`=0, `cancel`=0
where parent='Profile' and role='All'""")
webnotes.conn.commit()
def make_demo_login_page():
webnotes.conn.set_value("Website Settings", None, "home_page", "")
webnotes.conn.sql("""delete from `tabWeb Page` where name='demo-login'""")
p = webnotes.new_bean("Web Page")
p.doc.title = "Demo Login"
p.doc.published = 1
p.doc.description = "Owrang Demo Login"
with open(os.path.join(os.path.dirname(__file__), "demo-login.html"), "r") as dfile:
p.doc.main_section = dfile.read()
p.doc.insert_code = 1
with open(os.path.join(os.path.dirname(__file__), "demo-login.js"), "r") as dfile:
p.doc.javascript = dfile.read()
p.doc.insert_style = 1
with open(os.path.join(os.path.dirname(__file__), "demo-login.css"), "r") as dfile:
p.doc.css = dfile.read()
p.insert()
<|fim▁hole|> website_settings.save()
webnotes.conn.commit()
def make_demo_on_login_script():
webnotes.conn.sql("""delete from `tabCustom Script` where dt='Control Panel'""")
s = webnotes.new_bean("Custom Script")
s.doc.dt = "Control Panel"
s.doc.script_type = "Server"
with open(os.path.join(os.path.dirname(__file__), "demo_control_panel.py"), "r") as dfile:
s.doc.script = dfile.read()
s.insert()
cp = webnotes.bean("Control Panel")
cp.doc.custom_startup_code = """wn.ui.toolbar.show_banner('You are using Owrang Demo. To start your own Owrang Trial, <a href="https://owrang.yellowen.com/pricing-and-signup" target="_blank">click here</a>')"""
cp.save()
webnotes.conn.commit()
if __name__=="__main__":
make_demo_app()<|fim▁end|> | website_settings = webnotes.bean("Website Settings", "Website Settings")
website_settings.doc.home_page = "demo-login"
website_settings.doc.disable_signup = 1 |
<|file_name|>foundation.d.ts<|end_file_name|><|fim▁begin|>/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { MDCSlidableDrawerFoundation, MDCSlidableDrawerAdapter } from '../slidable';
import { cssClasses, strings } from './constants';
export default class MDCTemporaryDrawerFoundation extends MDCSlidableDrawerFoundation {
static readonly cssClasses: cssClasses;
static readonly strings: strings;
static readonly defaultAdapter: MDCSlidableDrawerAdapter;
open(): void;
close(): void;<|fim▁hole|><|fim▁end|> | } |
<|file_name|>bfl.rs<|end_file_name|><|fim▁begin|>#[doc = "Register `BFL` reader"]
pub struct R(crate::R<BFL_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<BFL_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<BFL_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<BFL_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `BFL` writer"]
pub struct W(crate::W<BFL_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<BFL_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<BFL_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<BFL_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Boundary Flag 0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFL0_A {
#[doc = "0: Passive state: result has not yet crossed the activation boundary (see bitfield BFAy), or selected gate signal is inactive, or this boundary flag is disabled"]
VALUE1 = 0,
#[doc = "1: Active state: result has crossed the activation boundary"]
VALUE2 = 1,
}
impl From<BFL0_A> for bool {
#[inline(always)]
fn from(variant: BFL0_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFL0` reader - Boundary Flag 0"]
pub struct BFL0_R(crate::FieldReader<bool, BFL0_A>);
impl BFL0_R {
pub(crate) fn new(bits: bool) -> Self {
BFL0_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFL0_A {
match self.bits {
false => BFL0_A::VALUE1,
true => BFL0_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFL0_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFL0_A::VALUE2
}
}
impl core::ops::Deref for BFL0_R {
type Target = crate::FieldReader<bool, BFL0_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Boundary Flag 1\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFL1_A {
#[doc = "0: Passive state: result has not yet crossed the activation boundary (see bitfield BFAy), or selected gate signal is inactive, or this boundary flag is disabled"]
VALUE1 = 0,
#[doc = "1: Active state: result has crossed the activation boundary"]
VALUE2 = 1,
}
impl From<BFL1_A> for bool {
#[inline(always)]
fn from(variant: BFL1_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFL1` reader - Boundary Flag 1"]
pub struct BFL1_R(crate::FieldReader<bool, BFL1_A>);
impl BFL1_R {
pub(crate) fn new(bits: bool) -> Self {
BFL1_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFL1_A {
match self.bits {
false => BFL1_A::VALUE1,
true => BFL1_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFL1_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFL1_A::VALUE2
}
}
impl core::ops::Deref for BFL1_R {
type Target = crate::FieldReader<bool, BFL1_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Boundary Flag 2\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFL2_A {
#[doc = "0: Passive state: result has not yet crossed the activation boundary (see bitfield BFAy), or selected gate signal is inactive, or this boundary flag is disabled"]
VALUE1 = 0,
#[doc = "1: Active state: result has crossed the activation boundary"]
VALUE2 = 1,
}
impl From<BFL2_A> for bool {
#[inline(always)]
fn from(variant: BFL2_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFL2` reader - Boundary Flag 2"]
pub struct BFL2_R(crate::FieldReader<bool, BFL2_A>);
impl BFL2_R {
pub(crate) fn new(bits: bool) -> Self {
BFL2_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFL2_A {
match self.bits {
false => BFL2_A::VALUE1,
true => BFL2_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFL2_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFL2_A::VALUE2
}
}
impl core::ops::Deref for BFL2_R {
type Target = crate::FieldReader<bool, BFL2_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Boundary Flag 3\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFL3_A {
#[doc = "0: Passive state: result has not yet crossed the activation boundary (see bitfield BFAy), or selected gate signal is inactive, or this boundary flag is disabled"]
VALUE1 = 0,
#[doc = "1: Active state: result has crossed the activation boundary"]
VALUE2 = 1,
}
impl From<BFL3_A> for bool {
#[inline(always)]
fn from(variant: BFL3_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFL3` reader - Boundary Flag 3"]
pub struct BFL3_R(crate::FieldReader<bool, BFL3_A>);
impl BFL3_R {
pub(crate) fn new(bits: bool) -> Self {
BFL3_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFL3_A {
match self.bits {
false => BFL3_A::VALUE1,
true => BFL3_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFL3_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFL3_A::VALUE2
}
}
impl core::ops::Deref for BFL3_R {
type Target = crate::FieldReader<bool, BFL3_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Boundary Flag 0 Activation Select\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFA0_A {
#[doc = "0: Set boundary flag BFLy if result is above the defined band or compare value, clear if below"]
VALUE1 = 0,
#[doc = "1: Set boundary flag BFLy if result is below the defined band or compare value, clear if above"]
VALUE2 = 1,
}
impl From<BFA0_A> for bool {
#[inline(always)]
fn from(variant: BFA0_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFA0` reader - Boundary Flag 0 Activation Select"]
pub struct BFA0_R(crate::FieldReader<bool, BFA0_A>);
impl BFA0_R {
pub(crate) fn new(bits: bool) -> Self {
BFA0_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFA0_A {
match self.bits {
false => BFA0_A::VALUE1,
true => BFA0_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFA0_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFA0_A::VALUE2
}
}
impl core::ops::Deref for BFA0_R {
type Target = crate::FieldReader<bool, BFA0_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `BFA0` writer - Boundary Flag 0 Activation Select"]
pub struct BFA0_W<'a> {
w: &'a mut W,
}
impl<'a> BFA0_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: BFA0_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Set boundary flag BFLy if result is above the defined band or compare value, clear if below"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(BFA0_A::VALUE1)
}
#[doc = "Set boundary flag BFLy if result is below the defined band or compare value, clear if above"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(BFA0_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | ((value as u32 & 0x01) << 8);
self.w
}
}
#[doc = "Boundary Flag 1 Activation Select\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFA1_A {
#[doc = "0: Set boundary flag BFLy if result is above the defined band or compare value, clear if below"]
VALUE1 = 0,
#[doc = "1: Set boundary flag BFLy if result is below the defined band or compare value, clear if above"]
VALUE2 = 1,
}
impl From<BFA1_A> for bool {
#[inline(always)]
fn from(variant: BFA1_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFA1` reader - Boundary Flag 1 Activation Select"]
pub struct BFA1_R(crate::FieldReader<bool, BFA1_A>);
impl BFA1_R {
pub(crate) fn new(bits: bool) -> Self {
BFA1_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFA1_A {
match self.bits {
false => BFA1_A::VALUE1,
true => BFA1_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFA1_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFA1_A::VALUE2
}
}
impl core::ops::Deref for BFA1_R {
type Target = crate::FieldReader<bool, BFA1_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}<|fim▁hole|>pub struct BFA1_W<'a> {
w: &'a mut W,
}
impl<'a> BFA1_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: BFA1_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Set boundary flag BFLy if result is above the defined band or compare value, clear if below"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(BFA1_A::VALUE1)
}
#[doc = "Set boundary flag BFLy if result is below the defined band or compare value, clear if above"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(BFA1_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | ((value as u32 & 0x01) << 9);
self.w
}
}
#[doc = "Boundary Flag 2 Activation Select\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFA2_A {
#[doc = "0: Set boundary flag BFLy if result is above the defined band or compare value, clear if below"]
VALUE1 = 0,
#[doc = "1: Set boundary flag BFLy if result is below the defined band or compare value, clear if above"]
VALUE2 = 1,
}
impl From<BFA2_A> for bool {
#[inline(always)]
fn from(variant: BFA2_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFA2` reader - Boundary Flag 2 Activation Select"]
pub struct BFA2_R(crate::FieldReader<bool, BFA2_A>);
impl BFA2_R {
pub(crate) fn new(bits: bool) -> Self {
BFA2_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFA2_A {
match self.bits {
false => BFA2_A::VALUE1,
true => BFA2_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFA2_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFA2_A::VALUE2
}
}
impl core::ops::Deref for BFA2_R {
type Target = crate::FieldReader<bool, BFA2_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `BFA2` writer - Boundary Flag 2 Activation Select"]
pub struct BFA2_W<'a> {
w: &'a mut W,
}
impl<'a> BFA2_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: BFA2_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Set boundary flag BFLy if result is above the defined band or compare value, clear if below"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(BFA2_A::VALUE1)
}
#[doc = "Set boundary flag BFLy if result is below the defined band or compare value, clear if above"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(BFA2_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | ((value as u32 & 0x01) << 10);
self.w
}
}
#[doc = "Boundary Flag 3 Activation Select\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFA3_A {
#[doc = "0: Set boundary flag BFLy if result is above the defined band or compare value, clear if below"]
VALUE1 = 0,
#[doc = "1: Set boundary flag BFLy if result is below the defined band or compare value, clear if above"]
VALUE2 = 1,
}
impl From<BFA3_A> for bool {
#[inline(always)]
fn from(variant: BFA3_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFA3` reader - Boundary Flag 3 Activation Select"]
pub struct BFA3_R(crate::FieldReader<bool, BFA3_A>);
impl BFA3_R {
pub(crate) fn new(bits: bool) -> Self {
BFA3_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFA3_A {
match self.bits {
false => BFA3_A::VALUE1,
true => BFA3_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFA3_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFA3_A::VALUE2
}
}
impl core::ops::Deref for BFA3_R {
type Target = crate::FieldReader<bool, BFA3_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `BFA3` writer - Boundary Flag 3 Activation Select"]
pub struct BFA3_W<'a> {
w: &'a mut W,
}
impl<'a> BFA3_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: BFA3_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Set boundary flag BFLy if result is above the defined band or compare value, clear if below"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(BFA3_A::VALUE1)
}
#[doc = "Set boundary flag BFLy if result is below the defined band or compare value, clear if above"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(BFA3_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | ((value as u32 & 0x01) << 11);
self.w
}
}
#[doc = "Boundary Flag 0 Inversion Control\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFI0_A {
#[doc = "0: Use BFLy directly"]
VALUE1 = 0,
#[doc = "1: Invert value and use BFLy"]
VALUE2 = 1,
}
impl From<BFI0_A> for bool {
#[inline(always)]
fn from(variant: BFI0_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFI0` reader - Boundary Flag 0 Inversion Control"]
pub struct BFI0_R(crate::FieldReader<bool, BFI0_A>);
impl BFI0_R {
pub(crate) fn new(bits: bool) -> Self {
BFI0_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFI0_A {
match self.bits {
false => BFI0_A::VALUE1,
true => BFI0_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFI0_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFI0_A::VALUE2
}
}
impl core::ops::Deref for BFI0_R {
type Target = crate::FieldReader<bool, BFI0_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `BFI0` writer - Boundary Flag 0 Inversion Control"]
pub struct BFI0_W<'a> {
w: &'a mut W,
}
impl<'a> BFI0_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: BFI0_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Use BFLy directly"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(BFI0_A::VALUE1)
}
#[doc = "Invert value and use BFLy"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(BFI0_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | ((value as u32 & 0x01) << 16);
self.w
}
}
#[doc = "Boundary Flag 1 Inversion Control\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFI1_A {
#[doc = "0: Use BFLy directly"]
VALUE1 = 0,
#[doc = "1: Invert value and use BFLy"]
VALUE2 = 1,
}
impl From<BFI1_A> for bool {
#[inline(always)]
fn from(variant: BFI1_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFI1` reader - Boundary Flag 1 Inversion Control"]
pub struct BFI1_R(crate::FieldReader<bool, BFI1_A>);
impl BFI1_R {
pub(crate) fn new(bits: bool) -> Self {
BFI1_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFI1_A {
match self.bits {
false => BFI1_A::VALUE1,
true => BFI1_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFI1_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFI1_A::VALUE2
}
}
impl core::ops::Deref for BFI1_R {
type Target = crate::FieldReader<bool, BFI1_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `BFI1` writer - Boundary Flag 1 Inversion Control"]
pub struct BFI1_W<'a> {
w: &'a mut W,
}
impl<'a> BFI1_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: BFI1_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Use BFLy directly"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(BFI1_A::VALUE1)
}
#[doc = "Invert value and use BFLy"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(BFI1_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | ((value as u32 & 0x01) << 17);
self.w
}
}
#[doc = "Boundary Flag 2 Inversion Control\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFI2_A {
#[doc = "0: Use BFLy directly"]
VALUE1 = 0,
#[doc = "1: Invert value and use BFLy"]
VALUE2 = 1,
}
impl From<BFI2_A> for bool {
#[inline(always)]
fn from(variant: BFI2_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFI2` reader - Boundary Flag 2 Inversion Control"]
pub struct BFI2_R(crate::FieldReader<bool, BFI2_A>);
impl BFI2_R {
pub(crate) fn new(bits: bool) -> Self {
BFI2_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFI2_A {
match self.bits {
false => BFI2_A::VALUE1,
true => BFI2_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFI2_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFI2_A::VALUE2
}
}
impl core::ops::Deref for BFI2_R {
type Target = crate::FieldReader<bool, BFI2_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `BFI2` writer - Boundary Flag 2 Inversion Control"]
pub struct BFI2_W<'a> {
w: &'a mut W,
}
impl<'a> BFI2_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: BFI2_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Use BFLy directly"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(BFI2_A::VALUE1)
}
#[doc = "Invert value and use BFLy"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(BFI2_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | ((value as u32 & 0x01) << 18);
self.w
}
}
#[doc = "Boundary Flag 3 Inversion Control\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BFI3_A {
#[doc = "0: Use BFLy directly"]
VALUE1 = 0,
#[doc = "1: Invert value and use BFLy"]
VALUE2 = 1,
}
impl From<BFI3_A> for bool {
#[inline(always)]
fn from(variant: BFI3_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BFI3` reader - Boundary Flag 3 Inversion Control"]
pub struct BFI3_R(crate::FieldReader<bool, BFI3_A>);
impl BFI3_R {
pub(crate) fn new(bits: bool) -> Self {
BFI3_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFI3_A {
match self.bits {
false => BFI3_A::VALUE1,
true => BFI3_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == BFI3_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == BFI3_A::VALUE2
}
}
impl core::ops::Deref for BFI3_R {
type Target = crate::FieldReader<bool, BFI3_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `BFI3` writer - Boundary Flag 3 Inversion Control"]
pub struct BFI3_W<'a> {
w: &'a mut W,
}
impl<'a> BFI3_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: BFI3_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Use BFLy directly"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(BFI3_A::VALUE1)
}
#[doc = "Invert value and use BFLy"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(BFI3_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 19)) | ((value as u32 & 0x01) << 19);
self.w
}
}
impl R {
#[doc = "Bit 0 - Boundary Flag 0"]
#[inline(always)]
pub fn bfl0(&self) -> BFL0_R {
BFL0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Boundary Flag 1"]
#[inline(always)]
pub fn bfl1(&self) -> BFL1_R {
BFL1_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Boundary Flag 2"]
#[inline(always)]
pub fn bfl2(&self) -> BFL2_R {
BFL2_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Boundary Flag 3"]
#[inline(always)]
pub fn bfl3(&self) -> BFL3_R {
BFL3_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 8 - Boundary Flag 0 Activation Select"]
#[inline(always)]
pub fn bfa0(&self) -> BFA0_R {
BFA0_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - Boundary Flag 1 Activation Select"]
#[inline(always)]
pub fn bfa1(&self) -> BFA1_R {
BFA1_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - Boundary Flag 2 Activation Select"]
#[inline(always)]
pub fn bfa2(&self) -> BFA2_R {
BFA2_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - Boundary Flag 3 Activation Select"]
#[inline(always)]
pub fn bfa3(&self) -> BFA3_R {
BFA3_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 16 - Boundary Flag 0 Inversion Control"]
#[inline(always)]
pub fn bfi0(&self) -> BFI0_R {
BFI0_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - Boundary Flag 1 Inversion Control"]
#[inline(always)]
pub fn bfi1(&self) -> BFI1_R {
BFI1_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - Boundary Flag 2 Inversion Control"]
#[inline(always)]
pub fn bfi2(&self) -> BFI2_R {
BFI2_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - Boundary Flag 3 Inversion Control"]
#[inline(always)]
pub fn bfi3(&self) -> BFI3_R {
BFI3_R::new(((self.bits >> 19) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 8 - Boundary Flag 0 Activation Select"]
#[inline(always)]
pub fn bfa0(&mut self) -> BFA0_W {
BFA0_W { w: self }
}
#[doc = "Bit 9 - Boundary Flag 1 Activation Select"]
#[inline(always)]
pub fn bfa1(&mut self) -> BFA1_W {
BFA1_W { w: self }
}
#[doc = "Bit 10 - Boundary Flag 2 Activation Select"]
#[inline(always)]
pub fn bfa2(&mut self) -> BFA2_W {
BFA2_W { w: self }
}
#[doc = "Bit 11 - Boundary Flag 3 Activation Select"]
#[inline(always)]
pub fn bfa3(&mut self) -> BFA3_W {
BFA3_W { w: self }
}
#[doc = "Bit 16 - Boundary Flag 0 Inversion Control"]
#[inline(always)]
pub fn bfi0(&mut self) -> BFI0_W {
BFI0_W { w: self }
}
#[doc = "Bit 17 - Boundary Flag 1 Inversion Control"]
#[inline(always)]
pub fn bfi1(&mut self) -> BFI1_W {
BFI1_W { w: self }
}
#[doc = "Bit 18 - Boundary Flag 2 Inversion Control"]
#[inline(always)]
pub fn bfi2(&mut self) -> BFI2_W {
BFI2_W { w: self }
}
#[doc = "Bit 19 - Boundary Flag 3 Inversion Control"]
#[inline(always)]
pub fn bfi3(&mut self) -> BFI3_W {
BFI3_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Boundary Flag Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [bfl](index.html) module"]
pub struct BFL_SPEC;
impl crate::RegisterSpec for BFL_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [bfl::R](R) reader structure"]
impl crate::Readable for BFL_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [bfl::W](W) writer structure"]
impl crate::Writable for BFL_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets BFL to value 0"]
impl crate::Resettable for BFL_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}<|fim▁end|> | #[doc = "Field `BFA1` writer - Boundary Flag 1 Activation Select"] |
<|file_name|>data.py<|end_file_name|><|fim▁begin|>"""
Utility functions for atmospheric data wrangling / preparation.
- ndarrays
- netCDF files
- Lat-lon geophysical data
- Pressure level data and topography
"""
from __future__ import division
import numpy as np
import pandas as pd
import collections
import scipy.interpolate as interp
from mpl_toolkits import basemap
import xarray as xray
from xarray import Dataset
import time
from atmos.utils import print_if, disptime
import atmos.utils as utils
import atmos.xrhelper as xr
from atmos.constants import const as constants
# ======================================================================
# NDARRAYS AND XRAY.DATAARRAYS
# ======================================================================
# ----------------------------------------------------------------------
def biggify(small, big, tile=False):
"""Add dimensions or tile an array for broadcasting.
Parameters
----------
small : ndarray
Array which singleton dimensions will be added to. Its
dimensions must be a subset of big's dimensions.
big : ndarray
Array whose shape will be used to determine the shape of
the output.
tile : bool, optional
If True, tile the array along the additional dimensions.
If False, add singleton dimensions.
Returns
-------
biggified : ndarray
Array of data from small, with dimensions added
for any dimension that is in big but not in small.
"""
debug = False
dbig, dsmall = big.shape, small.shape
# Check that all of the dimensions of small are contained within big
check = [d in dbig or d == 1 for d in dsmall]
if not np.all(check):
msg = ('Dimensions of small ' + str(dsmall) +
' are not a subset of big ' + str(dbig))
raise ValueError(msg)
# Check that the dimensions appear in a compatible order
inds = list()
for d in dsmall:
try:
inds.append(dbig.index(d))
except ValueError:
inds.append(-1)
if not utils.non_decreasing(inds):
msg = ('Dimensions of small ' + str(dsmall) +
' are not in an order compatible with big ' + str(dbig))
raise ValueError(msg)
# Biggify the small array
biggified = small
ibig = big.ndim - 1
ismall = small.ndim - 1
n = -1
# First add singleton dimensions
while ismall >= 0 and ibig >= 0:
print_if('ibig %d, ismall %d, n %d' % (ibig, ismall, n), debug)
if dbig[ibig] == dsmall[ismall] or dsmall[ismall] == 1:
print_if(' Same %d' % dbig[ibig], debug)
ismall -= 1
else:
print_if(' Different. Big %d, small %d' %
(dbig[ibig], dsmall[ismall]), debug)
biggified = np.expand_dims(biggified, n)
n -= 1
ibig -= 1
# Expand with tiles if selected
if tile:
dims = list(biggified.shape)
# First add any additional singleton dimensions needed to make
# biggified of the same dimension as big\
for i in range(len(dims), len(dbig)):
dims.insert(0, 1)
# Tile the array
for i in range(-1, -1 - len(dims), -1):
if dims[i] == dbig[i]:
dims[i] = 1
else:
dims[i] = dbig[i]
biggified = np.tile(biggified, dims)
return biggified
# ----------------------------------------------------------------------
def collapse(arr, axis=-1):
"""Collapse singleton dimension (first or last) in an array.
Parameters
----------
arr : ndarray
Array to collapse.
axis : {0, -1}
Axis to collapse.
Returns
-------
output : ndarray
Array with singleton dimension at beginning or end removed.
"""
if axis not in [0, -1]:
raise ValueError('Invalid axis %d. Must be 0 or -1.' % axis)
dims = arr.shape
if dims[axis] > 1:
raise ValueError('Dimension %d of input array is not singleton.' % axis)
if axis == 0:
output = arr[0]
else:
output = arr[...,0]
return output
# ----------------------------------------------------------------------
def nantrapz(y, x=None, axis=-1):
"""
Integrate using the composite trapezoidal rule, ignoring NaNs
Integrate `ym` (`x`) along given axis, where `ym` is a masked
array of `y` with NaNs masked.
Parameters
----------
y : array_like
Input array to integrate.
x : array_like, optional
If `x` is None, then spacing between all `y` elements is `dx`.
axis : int, optional
Specify the axis.
Returns
-------
trapz : float
Definite integral as approximated by trapezoidal rule.
"""
ym = np.ma.masked_array(y, np.isnan(y))
trapz = np.trapz(ym, x, axis=axis)
# Convert from masked array back to regular ndarray
if isinstance(trapz, np.ma.masked_array):
trapz = trapz.filled(np.nan)
return trapz
# ----------------------------------------------------------------------
def rolling_mean(data, nroll, axis=-1, center=True, **kwargs):
"""Return the rolling mean along an axis.
Parameters
----------
data : ndarray or xray.DataArray
Input data.
nroll : int
Size of window for rolling mean.
axis : int, optional
Axis to compute along.
center : bool, optional
Align to center of window.
**kwargs : other keyword arguments
See pandas.rolling_mean.
Returns
-------
rolling : ndarray or DataArray
Rolling mean data.
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > 5:
raise ValueError('Input data has too many dimensions. Max 5-D.')
if isinstance(data, xray.DataArray):
name, attrs, coords, dimnames = xr.meta(data)
vals = data.values.copy()
else:
vals = data
# Roll axis to end
vals = np.rollaxis(vals, axis, ndim)
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
vals = np.expand_dims(vals, axis=0)
# Initialize output
rolling = np.ones(vals.shape, dtype=vals.dtype)
# Compute rolling mean, iterating over additional dimensions
dims = vals.shape[:-1]
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
for m in range(dims[3]):
rolling[i,j,k,m] = pd.rolling_mean(vals[i,j,k,m], nroll,
center=center, **kwargs)
# Collapse any additional dimensions that were added
for i in range(ndim, rolling.ndim):
rolling = rolling[0]
# Roll axis back to its original position
rolling = np.rollaxis(rolling, -1, axis)
if isinstance(data, xray.DataArray):
rolling = xray.DataArray(rolling, name=name, coords=coords,
dims=dimnames, attrs=attrs)
return rolling
# ----------------------------------------------------------------------
def gradient(data, vec, axis=-1):
"""Compute gradient along an axis.
Parameters
----------
data : np.ndarray or xray.DataArray
Input data.
vec : 1-dimensional np.ndarray
Array of coordinates corresponding to axis of differentiation.
axis : int, optional
Axis to differentiate along.
Returns
-------
grad : np.ndarray or xray.DataArray
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > 5:
raise ValueError('Input data has too many dimensions. Max 5-D.')
if isinstance(data, xray.DataArray):
name, attrs, coords, dimnames = xr.meta(data)
vals = data.values.copy()
else:
vals = data
# Roll axis to end
vals = np.rollaxis(vals, axis, ndim)
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
vals = np.expand_dims(vals, axis=0)
# Initialize output
grad = np.ones(vals.shape, dtype=vals.dtype)
# Compute gradient, iterating over additional dimensions
dvec = np.gradient(vec)
dims = vals.shape[:-1]
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
for m in range(dims[3]):
grad[i,j,k,m] = np.gradient(vals[i,j,k,m], dvec)
# Collapse any additional dimensions that were added
for i in range(ndim, grad.ndim):
grad = grad[0]
# Roll axis back to its original position
grad = np.rollaxis(grad, -1, axis)
if isinstance(data, xray.DataArray):
grad = xray.DataArray(grad, coords=coords, dims=dimnames)
return grad
# ======================================================================
# UNIT CONVERSIONS
# ======================================================================
# ----------------------------------------------------------------------
def pres_units(units):
"""
Return a standardized name (hPa or Pa) for the input pressure units.
"""
hpa = ['mb', 'millibar', 'millibars', 'hpa', 'hectopascal', 'hectopascals']
pa = ['pascal', 'pascals', 'pa']
if units.lower() in hpa:
return 'hPa'
elif units.lower() in pa:
return 'Pa'
else:
raise ValueError('Unknown units ' + units)
# ----------------------------------------------------------------------
def pres_convert(pres, units_in, units_out):
"""Convert pressure array from units_in to units_out."""
if pres_units(units_in) == pres_units(units_out):
pres_out = pres
elif pres_units(units_in) == 'hPa' and pres_units(units_out) == 'Pa':
pres_out = pres * 100
elif pres_units(units_in) == 'Pa' and pres_units(units_out) == 'hPa':
pres_out = pres / 100
else:
raise ValueError('Problem with input/output units.')
return pres_out
# ----------------------------------------------------------------------
def precip_units(units):
"""
Return a standardized name for precip units.
"""
kgm2s = ['kg/m2/s', '(kg/m^2)/s', 'kg/m^2/s', 'kg m^-2 s^-1',
'kg/(m^2 s)', 'kg m-2 s-1']
mmday = ['mm/day', 'mm day^-1']
if units.lower() in kgm2s:
return 'kg m^-2 s^-1'
elif units.lower() in mmday:
return 'mm day^-1'
else:
raise ValueError('Unknown units ' + units)
# ----------------------------------------------------------------------
def precip_convert(precip, units_in, units_out):
"""Convert precipitation from units_in to units_out."""
if isinstance(precip, xray.DataArray):
name, attrs, coords, dims = xr.meta(precip)
attrs['units'] = units_out
i_DataArray = True
else:
i_DataArray = False
kgm2s = 'kg m^-2 s^-1'
mmday = 'mm day^-1'
# Convert between (kg/m^2)/s to mm/day
SCALE = 60 * 60 * 24
if precip_units(units_in) == precip_units(units_out):
precip_out = precip
elif precip_units(units_in) == kgm2s and precip_units(units_out) == mmday:
precip_out = precip * SCALE
elif precip_units(units_in) == mmday and precip_units(units_out) == kgm2s:
precip_out = precip / SCALE
else:
msg = "Don't know how to convert between %s and %s"
raise ValueError(msg % (units_in, units_out))
if i_DataArray:
precip_out = xray.DataArray(precip_out, name=name, dims=dims,
coords=coords, attrs=attrs)
return precip_out
# ======================================================================
# COORDINATES AND SUBSETS
# ======================================================================
# ----------------------------------------------------------------------
def get_coord(data, coord_name, return_type='values'):
"""Return values, name or dimension of coordinate in DataArray.
Parameters
----------
data : xray.DataArray
Data array to search for latitude coords.
coord_name : str
Coordinate to extract. Can be the exact ID of the variable or
a generic ID ('lat', 'lon', 'plev', 'time', 'day', 'year').
If a generic ID is provided then lists of common names for that ID
will be searched for a match.
return_type : {'values', 'name', 'dim'}, optional
'values' : Return an array of coordinate values.
'name' : Return the name of the coordinate.
'dim' : Return the dimension of the coordinate.
Returns
-------
output : ndarray, string or int
The generic coordinate names searched through are:
'lat' : ['lats', 'latitude', 'YDim','Y', 'y']
'lon' : ['long', 'lons', 'longitude', 'XDim', 'X', 'x']
'plev' : ['plevel', 'plevels', 'lev', 'level',
'levels', 'Height']
as well as capitalization options for coord_name (.upper(),
.lower(), .capitalize())
"""
def name_options(nm):
opts = {'lat' : ['lats', 'latitude', 'YDim','Y', 'y'],
'lon' : ['long', 'lons', 'longitude', 'XDim', 'X', 'x'],
'plev' : ['plevel', 'plevels', 'lev', 'level', 'levels',
'Height']}
nms = list(set([nm, nm.lower(), nm.upper(), nm.capitalize()]))
if opts.get(nm) is not None:
nms = list(nms) + opts[nm]
return nms
names = name_options(coord_name)
# Look in list of common coordinate names
if coord_name not in data.coords:
found = [i for i, s in enumerate(names) if s in data.coords]
if len(found) == 0:
raise ValueError("Can't find coordinate name in data coords %s" %
data.coords.keys())
if len(found) > 1:
raise ValueError('Conflicting possible coord names in coords %s'
% data.coords.keys())
else:
coord_name = names[found[0]]
if return_type == 'values':
output = data[coord_name].values.copy()
elif return_type == 'name':
output = coord_name
elif return_type == 'dim':
output = data.dims.index(coord_name)
else:
raise ValueError('Invalid return_type ' + return_type)
return output
# ----------------------------------------------------------------------
def subset(data, subset_dict, incl_lower=True, incl_upper=True, search=True,
copy=True, squeeze=False):
"""Extract a subset of a DataArray or Dataset along named dimensions.
Returns a DataArray or Dataset sub extracted from input data,
such that:
sub[dim_name] >= lower_or_list & sub[dim_name] <= upper,
OR sub[dim_name] == lower_or_list (if lower_or_list is a list)
for each dim_name in subset_dict.
This function calls atmos.xrhelper.subset with the additional
feature of calling the get_coord function to find common
dimension names (e.g. 'XDim' for latitude)
Parameters
----------
data : xray.DataArray or xray.Dataset
Data source for extraction.
subset_dict : dict of 2-tuples
Dimensions and subsets to extract. Each entry in subset_dict
is in the form {dim_name : (lower_or_list, upper)}, where:
- dim_name : string
Name of dimension to extract from. If dim_name is not in
data.dims, then the get_coord() function is used
to search for a similar dimension name (if search is True).
- lower_or_list : scalar or list of int or float
If scalar, then used as the lower bound for the subset range.
If list, then the subset matching the list will be extracted.
- upper : int, float, or None
Upper bound for subset range. If lower_or_list is a list,
then upper is ignored and should be set to None.
incl_lower, incl_upper : bool, optional
If True lower / upper bound is inclusive, with >= or <=.
If False, lower / upper bound is exclusive with > or <.
If lower_or_list is a list, then the whole list is included
and these parameters are ignored.
search : bool, optional
If True, call the get_coord function if dim_name is not found
in the dimension names of data.
copy : bool, optional
If True, return a copy of the data, otherwise a pointer.
squeeze : bool, optional
If True, squeeze any singleton dimensions out.
Returns
-------
sub : xray.DataArray or xray.Dataset
"""
if search:
nms = ['lat', 'lon', 'plev']
for dim_name in subset_dict:
if dim_name in nms and dim_name not in data.dims:
dim_name_new = get_coord(data, dim_name, 'name')
subset_dict[dim_name_new] = subset_dict.pop(dim_name)
return xr.subset(data, subset_dict, incl_lower, incl_upper, copy, squeeze)
# ----------------------------------------------------------------------
def dim_mean(data, dimname, lower=None, upper=None, minfrac=0.5):
"""Return the mean of a DataArray along dimension, preserving attributes.
Parameters
----------
data : xray.DataArray or xray.Dataset
Data to average.
dimname : str
Dimension to average along. Can be a generic name (e.g. 'lon')
or exact ID (e.g. 'XDim').
lower, upper : float, optional
Lower and upper bounds (inclusive) of subset to extract along
the dimension before averaging.
minfrac : float, optional
Mininum fraction of non-missings required for non-NaN output.
Returns
-------
databar : xray.DataArray or xray.Dataset
"""
def one_variable(var, dimname, dimvals, minfrac):
try:
axis = get_coord(var, dimname, 'dim')
except ValueError:
# Dimension isn't in the data variable
return var
attrs = var.attrs
attrs['avg_over_' + dimname] = dimvals
attrs['minfrac'] = minfrac
# Create mask for any point where more than minfrac fraction is missing
missings = np.isnan(var)
missings = missings.sum(dim=dimname)
min_num = var.shape[axis] * minfrac
mask = missings > min_num
# Compute mean and apply mask
var = var.mean(dim=dimname)
name, _, coords, dims = xr.meta(var)
vals = np.ma.masked_array(var.values, mask).filled(np.nan)
var_out = xray.DataArray(vals, name=name, attrs=attrs, dims=dims,
coords=coords)
return var_out
if dimname not in data.dims:
try:
dimname = get_coord(data, dimname, 'name')
except ValueError:
# Dimension isn't in the data variable
return data
if lower is not None:
data = subset(data, {dimname : (lower, upper)}, copy=False)
dimvals = get_coord(data, coord_name=dimname)
if isinstance(data, xray.DataArray):
databar = one_variable(data, dimname, dimvals, minfrac)
elif isinstance(data, xray.Dataset):
databar = xray.Dataset()
databar.attrs = data.attrs
for nm in data.data_vars:
databar[nm] = one_variable(data[nm], dimname, dimvals, minfrac)
else:
raise ValueError('Input data must be xray.DataArray or xray.Dataset')
return databar
# ======================================================================
# NETCDF FILE I/O
# ======================================================================
# ----------------------------------------------------------------------
def ncdisp(filename, verbose=True, decode_cf=False, indent=2, width=None):
"""Display the attributes of data in a netcdf file."""
with xray.open_dataset(filename, decode_cf=decode_cf) as ds:
if verbose:
xr.ds_print(ds, indent, width)
else:
print(ds)
# ----------------------------------------------------------------------
def ncload(filename, verbose=True, unpack=True, missing_name=u'missing_value',
offset_name=u'add_offset', scale_name=u'scale_factor',
decode_cf=False):
"""
Read data from netcdf file into xray dataset.
If options are selected, unpacks from compressed form and/or replaces
missing values with NaN. Returns data as an xray.Dataset object.
"""
with xray.open_dataset(filename, decode_cf=decode_cf) as ds:
print_if('****** Reading file: ' + filename + '********', verbose)
print_if(ds, verbose, printfunc=xr.ds_print)
if unpack:
print_if('****** Unpacking data *********', verbose)
ds = xr.ds_unpack(ds, verbose=verbose, missing_name=missing_name,
offset_name=offset_name, scale_name=scale_name)
# Use the load() function so that the dataset is available after
# the file is closed
ds.load()
return ds
# ----------------------------------------------------------------------
def load_concat(paths, var_ids=None, concat_dim='TIME', subset_dict=None,
func=None, func_args=None, func_kw=None, squeeze=True, verbose=True):
"""Load a variable from multiple files and concatenate into one.
Especially useful for extracting variables split among multiple
OpenDAP files.
Parameters
----------
paths : list of strings
List of file paths or OpenDAP urls to process.
var_ids : str or list of str, optional
Name(s) of variable(s) to extract. If None then all variables
are extracted and a Dataset is returned.
concat_dim : str
Name of dimension to concatenate along. If this dimension
doesn't exist in the input data, a new one is created.
subset_dict : dict of 2-tuples, optional
Dimensions and subsets to extract. Each entry in subset_dict
is in the form {dim_name : (lower_or_list, upper)}, where:
- dim_name : string
Name of dimension to extract from.
The dimension name can be the actual dimension name
(e.g. 'XDim') or a generic name (e.g. 'lon') and get_coord()
is called to find the specific name.
- lower_or_list : scalar or list of int or float
If scalar, then used as the lower bound for the subset range.
If list, then the subset matching the list will be extracted.
- upper : int, float, or None
Upper bound for subset range. If lower_or_list is a list,
then upper is ignored and should be set to None.
func : function, optional
Function to apply to each variable in each file before concatenating.
e.g. compute zonal mean. Takes one DataArray as first input parameter.
func_args : list, optional
List of numbered arguments to pass to func.
func_kw : dict or list of dict, optional
Dict of keyword arguments to pass to func. To use different values for
different files, make func_kw a list of the same length as the list of
file paths, with func_kw[i] containing a dict of keyword args for
path[i]. Otherwise, make func_kw a single dict to use for all paths.
squeeze : bool, optional
If True, squeeze out extra dimensions and add info to attributes.
verbose : bool, optional
If True, print updates while processing files.
Returns:
--------
data : xray.DataArray or xray.Dataset
Data extracted from input files.
"""
# Number of times to attempt opening file (in case of server problems)
NMAX = 3
# Wait time (seconds) between attempts
WAIT = 5
if var_ids is not None:
var_ids = utils.makelist(var_ids)
def get_data(path, var_ids, subset_dict, func, func_args, func_kw):
with xray.open_dataset(path) as ds:
if var_ids is None:
# All variables
data = ds
else:
# Extract specific variables
data = ds[var_ids]
if subset_dict is not None:
data = subset(data, subset_dict, copy=False)
if func is not None:
data_out = xray.Dataset()
if func_args is None:
func_args = []
if func_kw is None:
func_kw = {}
for nm in data.data_vars:
vars_out = func(data[nm], *func_args, **func_kw)
if not isinstance(vars_out, xray.Dataset):
vars_out = vars_out.to_dataset()
for nm2 in vars_out.data_vars:
data_out[nm2] = vars_out[nm2]
data = data_out
data.load()
return data
pieces = []
func_kw = utils.makelist(func_kw)
paths = utils.makelist(paths)
if len(func_kw) == 1:
func_kw *= len(paths)
for p, kw in zip(paths, func_kw):
print_if(None, verbose, printfunc=disptime)
print_if('Loading ' + p, verbose)
attempt = 0
while attempt < NMAX:
try:
piece = get_data(p, var_ids, subset_dict, func, func_args, kw)
print_if('Appending data', verbose)
pieces.append(piece)
attempt = NMAX
except RuntimeError as err:
attempt += 1
if attempt < NMAX:
print('Error reading file. Attempting again in %d s' %
WAIT)
time.sleep(WAIT)
else:
raise err
print_if('Concatenating data', verbose)
data = xray.concat(pieces, dim=concat_dim)
print_if(None, verbose, printfunc=disptime)
if squeeze:
data = xr.squeeze(data)
if len(data.data_vars) == 1:
# Convert from Dataset to DataArray for output
data = data[data.data_vars.keys()[0]]
return data
# ----------------------------------------------------------------------
def save_nc(filename, *args):
"""Save xray.DataArray variables to a netcdf file.
Call Signatures
---------------
save_nc(filename, var1)
save_nc(filename, var1, var2)
save_nc(filename, var1, var2, var3)
etc...
Parameters
----------
filename : string
File path for saving.
var1, var2, ... : xray.DataArrays
List of xray.DataArrays with compatible coordinates.
"""
ds = xr.vars_to_dataset(*args)
ds.to_netcdf(filename)
return None
# ----------------------------------------------------------------------
def mean_over_files(files, nms=None):
"""Return data averaged over all input files.
Parameters
----------
files : list of str
Names of files to average over, e.g. yearly files.
nms : list of str, optional
Subset of data variables to include. If None, then all data
variables are included.
Returns
-------
ds_out : xray.Dataset
Dataset of variables averaged over all the input files.
"""
# Initialize with first file
print('Reading ' + files[0])
with xray.open_dataset(files[0]) as ds:
if nms is None:
nms = ds.data_vars.keys()
ds_out = ds[nms].load()
# Sum the variables from each subsequent file
for i, filenm in enumerate(files[1:]):
print('Reading ' + filenm)
with xray.open_dataset(filenm) as ds:
ds_out = ds_out + ds[nms]
ds_out.load()
# Divide by number of files for mean
ds_out = ds_out / float(len(files))
return ds_out
# ======================================================================
# LAT-LON GEOPHYSICAL DATA
# ======================================================================
# ----------------------------------------------------------------------
def latlon_equal(data1, data2, latname1=None, lonname1=None,
latname2=None, lonname2=None):
"""Return True if input DataArrays have the same lat-lon coordinates."""
lat1 = get_coord(data1, 'lat', coord_name=latname1)
lon1 = get_coord(data1, 'lon', coord_name=lonname1)
lat2 = get_coord(data2, 'lat', coord_name=latname2)
lon2 = get_coord(data2, 'lon', coord_name=lonname2)
is_equal = np.array_equal(lat1, lat2) and np.array_equal(lon1, lon2)
return is_equal
# ----------------------------------------------------------------------
def lon_convention(lon):
"""Return 360 if longitudes are 0-360E, 180 if 180W-180E.
The output of this function can be used in the set_lon() function
to make two data arrays use a consistent longitude convention.
"""
if lon.min() < 0:
return 180
else:
return 360
# ----------------------------------------------------------------------
def set_lon(data, lonmax=360, lon=None, lonname=None):
"""Set data longitudes to 0-360E or 180W-180E convention.
Parameters
----------
data : ndarray or xray.DataArray
Input data array with longitude as the last dimension
lonmax : int, optional
Maximum longitude for output data. Set to 360 for 0-360E,
or set to 180 for 180W-180E.
lon : 1-D ndarray or list, optional
Longitudes of input data. Only used if data is an ndarray.
If data is an xray.DataArray, then lon = data['lon']
lonname : string, optional
Name of longitude coordinate in data, if data is a DataArray
Returns
-------
If argument data is an ndarray:
data_out, lon_out : ndarray
The data and longitude arrays shifted to the selected
convention.
If argument data is an xray.DataArray:
data_out : xray.DataArray
DataArray object with data and longitude values shifted to
the selected convention.
"""
if isinstance(data, xray.DataArray):
lon = get_coord(data, 'lon')
if lonname is None:
lonname = get_coord(data, 'lon', 'name')
name, attrs, coords, dims_list = xr.meta(data)
vals = data.values
else:
vals = data
lonmin = lonmax - 360
if lonmin >= lon.min() and lonmin <= lon.max():
lon0 = lonmin
start = True
else:
lon0 = lonmax
start = False
vals_out, lon_out = basemap.shiftgrid(lon0, vals, lon, start=start)
if isinstance(data, xray.DataArray):
coords[lonname].values = lon_out
data_out = xray.DataArray(vals_out, name=name, dims=dims_list,
coords=coords, attrs=attrs)
return data_out
else:
return vals_out, lon_out
# ----------------------------------------------------------------------
def interp_latlon(data, lat_out, lon_out, lat_in=None, lon_in=None,
checkbounds=False, masked=False, order=1):
"""Interpolate data onto a new lat-lon grid.
Parameters
----------
data : ndarray or xray.DataArray
Data to interpolate, with latitude as second-last dimension,
longitude as last dimension. Maximum array dimensions: 5-D.
lat_out, lon_out : 1-D float or int array
Latitude and longitudes to interpolate onto.
lat_in, lon_in : 1-D float or int array, optional
Latitude and longitude arrays of input data. Only used if data
is an ndarray. If data is an xray.DataArray then
lat_in = data['lat'] and lon_in = data['lon']
checkbounds : bool, optional
If True, values of lat_out and lon_out are checked to see
that they lie within the range specified by lat_in, lon_in.
If False, and lat_out, lon_out are outside lat_in, lon_in,
interpolated values will be clipped to values on boundary
of input grid lat_in, lon_in
masked : bool or float, optional
If True, points outside the range of lat_in, lon_in are masked
(in a masked array).
If masked is set to a number, then points outside the range of
lat_in, lon_in will be set to that number.
order : int, optional
0 for nearest-neighbor interpolation,
1 for bilinear interpolation
3 for cublic spline (requires scipy.ndimage).
Returns
-------
data_out : ndarray or xray.DataArray
Data interpolated onto lat_out, lon_out grid
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > 5:
raise ValueError('Input data has too many dimensions. Max 5-D.')
if isinstance(data, xray.DataArray):
lat_in = get_coord(data, 'lat')
latname = get_coord(data, 'lat', 'name')
lon_in = get_coord(data, 'lon')
lonname = get_coord(data, 'lon', 'name')
name, attrs, coords, dims_list = xr.meta(data)
coords[latname] = xray.DataArray(lat_out, coords={latname : lat_out},
dims=[latname], attrs=data[latname].attrs)
coords[lonname] = xray.DataArray(lon_out, coords={lonname : lon_out},
dims=[lonname], attrs=data[lonname].attrs)
vals = data.values.copy()
else:
vals = data
# Check for the common case that lat_in and/or lat_out are decreasing
# and flip if necessary to work with basemap.interp()
flip = False
if utils.strictly_decreasing(lat_in):
lat_in = lat_in[::-1]
vals = vals[...,::-1, :]
if utils.strictly_decreasing(lat_out):<|fim▁hole|> x_out, y_out = np.meshgrid(lon_out, lat_out)
# Initialize output array
dims = vals.shape
dims = dims[:-2]
vals_out = np.empty(dims + x_out.shape)
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
vals = np.expand_dims(vals, axis=0)
vals_out = np.expand_dims(vals_out, axis=0)
# Interp onto new lat-lon grid, iterating over all other dimensions
dims = vals_out.shape[:-2]
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
vals_out[i, j, k] = basemap.interp(
vals[i, j, k], lon_in, lat_in, x_out, y_out,
order=order, checkbounds=checkbounds, masked=masked)
# Collapse any additional dimensions that were added
for i in range(ndim, vals_out.ndim):
vals_out = vals_out[0]
if flip:
# Flip everything back to previous order
vals_out = vals_out[...,::-1, :]
lat_out = lat_out[::-1]
if isinstance(data, xray.DataArray):
data_out = xray.DataArray(vals_out, name=name, coords=coords,
dims=dims_list, attrs=attrs)
else:
data_out = vals_out
return data_out
# ----------------------------------------------------------------------
def mask_oceans(data, lat=None, lon=None, inlands=True, resolution='l',
grid=5):
"""Return the data with ocean grid points set to NaN.
Parameters
----------
data : ndarray or xray.DataArray
Data to mask, with latitude as second-last dimension,
longitude as last dimension. Maximum array dimensions: 5-D.
lat, lon : ndarray, optional
Latitude and longitude arrays. Only used if data is an
ndarray and not an xray.DataArray.
inlands : bool, optional
If False, mask only ocean points and not inland lakes.
resolution : {'c','l','i','h', 'f'}, optional
gshhs coastline resolution used to define land/sea mask.
grid : {1.25, 2.5, 5, 10}, optional
Land/sea mask grid spacing in minutes.
Returns
-------
data_out : ndarray or xray.DataArray
Data with ocean grid points set to NaN.
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > 5:
raise ValueError('Input data has too many dimensions. Max 5-D.')
if isinstance(data, xray.DataArray):
lat = get_coord(data, 'lat')
lon = get_coord(data, 'lon')
name, attrs, coords, dims_list = xr.meta(data)
vals = data.values.copy()
else:
vals = data
# Convert to 180W-180E convention that basemap.maskoceans requires
lonmax = lon_convention(lon)
if lonmax == 360:
vals, lon = set_lon(vals, lonmax=180, lon=lon)
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
vals = np.expand_dims(vals, axis=0)
# Initialize output
vals_out = np.ones(vals.shape, dtype=float)
vals_out = np.ma.masked_array(vals_out, np.isnan(vals_out))
# Mask oceans, iterating over additional dimensions
x, y = np.meshgrid(lon, lat)
dims = vals_out.shape[:-2]
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
vals_out[i, j, k] = basemap.maskoceans(
x, y, vals[i, j, k], inlands=inlands,
resolution=resolution, grid=grid)
# Convert from masked array to regular array with NaNs
vals_out = vals_out.filled(np.nan)
# Collapse any additional dimensions that were added
for i in range(ndim, vals_out.ndim):
vals_out = vals_out[0]
# Convert back to original longitude convention
if lonmax == 360:
vals_out, lon = set_lon(vals_out, lonmax=lonmax, lon=lon)
if isinstance(data, xray.DataArray):
data_out = xray.DataArray(vals_out, name=name, coords=coords,
dims=dims_list, attrs=attrs)
else:
data_out = vals_out
return data_out
# ----------------------------------------------------------------------
def mean_over_geobox(data, lat1, lat2, lon1, lon2, lat=None, lon=None,
area_wtd=True, land_only=False):
"""Return the mean of an array over a lat-lon region.
Parameters
----------
data : ndarray or xray.DataArray
Data to average, with latitude as second-last dimension and
longitude as last dimension.
lat1, lat2, lon1, lon2 : float
Latitude and longitude limits for averaging region, with
lon1 <= lon2 and lat1 <= lat2.
lat, lon : ndarray, optional
Latitude and longitude arrays. Only used if data is an
ndarray and not an xray.DataArray.
area_wtd : bool, optional
Return the area-weighted average (weighted by cos(lat))
land_only : bool, optional
Mask out ocean grid points so that only data over land is
included in the mean.
Returns
-------
avg : ndarray or xray.DataArray
The data averaged over the lat-lon region.
"""
if not isinstance(data, xray.DataArray):
if lat is None or lon is None:
raise ValueError('Latitude and longitude arrays must be provided '
'if data is not an xray.DataArray.')
latname, lonname = 'lat', 'lon'
coords = xr.coords_init(data)
coords = xr.coords_assign(coords, -1, lonname, lon)
coords = xr.coords_assign(coords, -2, latname, lat)
data_out = xray.DataArray(data, coords=coords)
attrs = {}
else:
data_out = data
name, attrs, coords, _ = xr.meta(data)
latname = get_coord(data, 'lat', 'name')
lonname = get_coord(data, 'lon', 'name')
lon = get_coord(data, 'lon')
lat = get_coord(data, 'lat')
coords = utils.odict_delete(coords, latname)
coords = utils.odict_delete(coords, lonname)
attrs['description'] = 'Mean over lat-lon subset'
attrs['lon1'], attrs['lon2'] = lon1, lon2
attrs['lat1'], attrs['lat2'] = lat1, lat2
attrs['area_weighted'] = area_wtd
attrs['land_only'] = land_only
if land_only:
data_out = mask_oceans(data_out)
if lat1 == lat2:
if not lat1 in lat:
raise ValueError('lat1=lat2=%f not in latitude grid' % lat1)
if lon1 == lon2:
if not lon1 in lon:
raise ValueError('lon1=lon2=%f not in longitude grid' % lon1)
subset_dict = {latname : (lat1, lat2), lonname : (lon1, lon2)}
data_out = subset(data_out, subset_dict)
attrs['subset_lons'] = get_coord(data_out, 'lon')
attrs['subset_lats'] = get_coord(data_out, 'lat')
# Mean over longitudes
data_out = data_out.mean(axis=-1)
# Mean over latitudes
if lat1 == lat2:
# Eliminate singleton dimension
avg = data_out.mean(axis=-1)
avg.attrs = attrs
else:
# Array of latitudes with same NaN mask as the data so that the
# area calculation is correct
lat_rad = np.radians(get_coord(data_out, 'lat'))
lat_rad = biggify(lat_rad, data_out, tile=True)
mdat = np.ma.masked_array(data_out, np.isnan(data_out))
lat_rad = np.ma.masked_array(lat_rad, mdat.mask)
lat_rad = lat_rad.filled(np.nan)
if area_wtd:
# Weight by area with cos(lat)
coslat = np.cos(lat_rad)
data_out = data_out * coslat
area = nantrapz(coslat, lat_rad, axis=-1)
else:
area = nantrapz(np.ones(lat_rad.shape, dtype=float), lat_rad, axis=-1)
# Integrate with trapezoidal method
avg = nantrapz(data_out, lat_rad, axis=-1) / area
# Pack output into DataArray with the metadata that was lost in np.trapz
if isinstance(data, xray.DataArray) and not isinstance(avg, xray.DataArray):
avg = xray.DataArray(avg, name=name, coords=coords, attrs=attrs)
return avg
# ======================================================================
# PRESSURE LEVEL DATA AND TOPOGRAPHY
# ======================================================================
# ----------------------------------------------------------------------
def get_ps_clim(lat, lon, datafile='data/topo/ncep2_ps.nc'):
"""Return surface pressure climatology on selected lat-lon grid.
Parameters
----------
lat, lon : 1-D float array
Latitude and longitude grid to interpolate surface pressure
climatology onto.
datafile : string, optional
Name of file to read for surface pressure climatology.
Returns
-------
ps : xray.DataArray
DataArray of surface pressure climatology interpolated onto
lat-lon grid.
"""
ds = ncload(datafile)
ps = ds['ps']
ps.attrs = utils.odict_insert(ps.attrs, 'title', ds.attrs['title'], pos=0)
# Check what longitude convention is used in the surface pressure
# climatology and switch if necessary
lonmax = lon_convention(lon)
lon_ps = get_coord(ps, 'lon')
if lon_convention(lon_ps) != lonmax:
ps = set_lon(ps, lonmax)
# Interpolate ps onto lat-lon grid
ps = interp_latlon(ps, lat, lon)
return ps
# ----------------------------------------------------------------------
def correct_for_topography(data, topo_ps, plev=None, lat=None, lon=None):
"""Set pressure level data below topography to NaN.
Parameters
----------
data : ndarray or xray.DataArray
Data to correct, with pressure, latitude, longitude as the
last three dimensions.
topo_ps : ndarray or xray.DataArray
Climatological surface pressure to use for topography, on same
lat-lon grid as data.
plev, lat, lon : 1-D float array, optional
Pressure levels, latitudes and longitudes of input data.
Only used if data is an ndarray. If data is an xray.DataArray
then plev, lat and lon are extracted from data.coords.
Returns
-------
data_out : ndarray or xray.DataArray
Data with grid points below topography set to NaN.
"""
if isinstance(data, xray.DataArray):
lat = get_coord(data, 'lat')
lon = get_coord(data, 'lon')
name, attrs, coords, _ = xr.meta(data)
vals = data.values.copy()
# -- Pressure levels in Pascals
plev = get_coord(data, 'plev')
pname = get_coord(data, 'plev', 'name')
plev = pres_convert(plev, data[pname].units, 'Pa')
else:
vals = data
if isinstance(topo_ps, xray.DataArray):
if not latlon_equal(data, topo_ps):
msg = 'Inputs data and topo_ps are not on same latlon grid.'
raise ValueError(msg)
# Surface pressure values in Pascals:
ps_vals = topo_ps.values
ps_vals = pres_convert(ps_vals, topo_ps.units, 'Pa')
else:
ps_vals = topo_ps
# For each vertical level, set any point below topography to NaN
for k, p in enumerate(plev):
ibelow = ps_vals < p
vals[...,k,ibelow] = np.nan
if isinstance(data, xray.DataArray):
data_out = xray.DataArray(vals, name=name, coords=coords, attrs=attrs)
else:
data_out = vals
return data_out
# ----------------------------------------------------------------------
def near_surface(data, pdim=-3, return_inds=False):
"""Return the pressure-level data closest to surface.
At each grid point, the first non-NaN level is taken as the
near-surface level.
Parameters
----------
data : ndarray or xray.DataArray
Input data, maximum of 5 dimensions. Pressure levels must
be the last, second-last or third-last dimension.
pdim : {-3, -2, -1}, optional
Dimension of vertical levels in data.
return_inds : bool, optional
If True, return the pressure-level indices of the extracted
data in a tuple along with the near-surface data.
If False, return only the near-surface data.
Returns
-------
data_s[, ind_s] : ndarray or xray.DataArray[, ndarray]
Near-surface data [and indices of extracted data, if
return_inds is True]. If input data is an xray.DataArray,
data_s is returned as an xray.DataArray, otherwise as
an ndarray.
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > nmax:
raise ValueError('Input data has too many dimensions. Max 5-D.')
# Save metadata for output DataArray, if applicable
if isinstance(data, xray.DataArray):
i_DataArray = True
data = data.copy()
name, attrs, coords, _ = xr.meta(data)
title = 'Near-surface data extracted from pressure level data'
attrs = utils.odict_insert(attrs, 'title', title, pos=0)
pname = get_coord(data, 'plev', 'name')
del(coords[pname])
else:
i_DataArray = False
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
data = np.expand_dims(data, axis=0)
# Make sure pdim is indexing from end
pdim_in = pdim
if pdim > 0:
pdim = pdim - nmax
# Iterate over all other dimensions
dims = list(data.shape)
dims.pop(pdim)
data_s = np.nan*np.ones(dims, dtype=float)
ind_s = np.ones(dims, dtype=int)
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
for m in range(dims[3]):
if pdim == -3:
sub = data[i,j,:,k,m]
elif pdim == -2:
sub = data[i,j,k,:,m]
elif pdim == -1:
sub = data[i,j,k,m,:]
else:
raise ValueError('Invalid p dimension ' + str(pdim_in))
ind = np.where(~np.isnan(sub))[0][0]
data_s[i,j,k,m] = sub[ind]
ind_s[i,j,k,m] = ind
# Collapse any additional dimensions that were added
for i in range(ndim - 1, data_s.ndim):
data_s = data_s[0]
ind_s = ind_s[0]
# Pack data_s into an xray.DataArray if input was in that form
if i_DataArray:
data_s = xray.DataArray(data_s, name=name, coords=coords, attrs=attrs)
# Return data only, or tuple of data plus array of indices extracted
if return_inds:
return data_s, ind_s
else:
return data_s
# ----------------------------------------------------------------------
def interp_plevels(data, plev_new, plev_in=None, pdim=-3, kind='linear'):
"""Return the data interpolated onto new pressure level grid.
Parameters
----------
data : ndarray or xray.DataArray
Input data, maximum of 5 dimensions. Pressure levels must
be the last, second-last or third-last dimension.
plev_new : ndarray
New pressure levels to interpolate onto.
plev_in : ndarray
Original pressure levels of data. If data is an xray.DataArray,
then the values from data.coords are used.
pdim : {-3, -2, -1}, optional
Dimension of vertical levels in data.
kind : string, optional
Type of interpolation, e.g. 'linear', 'cubic', 'nearest', etc.
See scipy.interpolate.interp1d for all options.
Returns
-------
data_i : ndarray or xray.DataArray
Interpolated data. If input data is an xray.DataArray,
data_i is returned as an xray.DataArray, otherwise as
an ndarray.
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > 5:
raise ValueError('Input data has too many dimensions. Max 5-D.')
if isinstance(data, xray.DataArray):
i_DataArray = True
data = data.copy()
name, attrs, coords, _ = xr.meta(data)
title = 'Pressure-level data interpolated onto new pressure grid'
attrs = utils.odict_insert(attrs, 'title', title, pos=0)
pname = get_coord(data, 'plev', 'name')
plev_in = get_coord(data, 'plev')
coords[pname] = xray.DataArray(plev_new, coords={pname : plev_new},
attrs=data.coords[pname].attrs)
else:
i_DataArray = False
# Make sure pressure units are consistent
if plev_new.min() < plev_in.min() or plev_new.max() > plev_in.max():
raise ValueError('Output pressure levels are not contained '
'within input pressure levels. Check units on each.')
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
data = np.expand_dims(data, axis=0)
# Make sure pdim is indexing from end
pdim_in = pdim
if pdim > 0:
pdim = pdim - nmax
# Iterate over all other dimensions
dims = list(data.shape)
dims[pdim] = len(plev_new)
data_i = np.nan*np.ones(dims, dtype=float)
dims.pop(pdim)
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
for m in range(dims[3]):
if pdim == -3:
sub = data[i,j,:,k,m]
view = data_i[i,j,:,k,m]
elif pdim == -2:
sub = data[i,j,k,:,m]
view = data_i[i,j,k,:,m]
elif pdim == -1:
sub = data[i,j,k,m,:]
view = data_i[i,j,k,m,:]
else:
raise ValueError('Invalid p dimension ' + str(pdim_in))
vals_i = interp.interp1d(plev_in, sub, kind=kind)(plev_new)
view[:] = vals_i
# Collapse any additional dimensions that were added
for i in range(ndim, data_i.ndim):
data_i = data_i[0]
# Pack data_s into an xray.DataArray if input was in that form
if i_DataArray:
data_i = xray.DataArray(data_i, name=name, coords=coords,
attrs=attrs)
return data_i
# ----------------------------------------------------------------------
def int_pres(data, plev=None, pdim=-3, pmin=0, pmax=1e6):
"""Return the mass-weighted vertical integral of the data.
Parameters
----------
data : xray.DataArray or ndarray
Data to be integrated, on pressure levels.
plev : ndarray, optional
Vertical pressure levels in Pascals. Only used if data
is an ndarray. If data is a DataArray, plev is extracted
from data and converted to Pa if necessary.
pdim : int, optional
Dimension of vertical pressure levels in data.
pmin, pmax : float, optional
Lower and upper bounds (inclusive) of pressure levels (Pa)
to include in integration.
Returns
-------
data_int : xray.DataArray or ndarray
Mass-weighted vertical integral of data from pmin to pmax.
"""
if isinstance(data, xray.DataArray):
i_DataArray = True
data = data.copy()
name, _, coords, _ = xr.meta(data)
attrs = collections.OrderedDict()
title = 'Vertically integrated by dp/g'
attrs['title'] = title
if 'long_name' in data.attrs.keys():
attrs['long_name'] = data.attrs['long_name']
if 'units' in data.attrs.keys():
attrs['units'] = '(' + data.attrs['units'] + ') * kg'
pname = get_coord(data, 'plev', 'name')
del(coords[pname])
if plev is None:
# -- Make sure pressure levels are in Pa
plev = get_coord(data, 'plev')
plev = pres_convert(plev, data[pname].units, 'Pa')
data[pname].values = plev
else:
i_DataArray = False
# Pack into DataArray to easily extract pressure level subset
pname = 'plev'
coords = xr.coords_init(data)
coords = xr.coords_assign(coords, pdim, pname, plev)
data = xray.DataArray(data, coords=coords)
# Extract subset and integrate
data = subset(data, {pname : (pmin, pmax)})
vals_int = nantrapz(data.values, data[pname].values, axis=pdim)
vals_int /= constants.g.values
if utils.strictly_decreasing(plev):
vals_int = -vals_int
if i_DataArray:
data_int = xray.DataArray(vals_int, name=name, coords=coords,
attrs=attrs)
else:
data_int = vals_int
return data_int
# ======================================================================
# TIME
# ======================================================================
# ----------------------------------------------------------------------
def split_timedim(data, n, slowfast=True, timename=None, time0_name='time0',
time0_vals=None, time1_name='time1', time1_vals=None):
"""Split time dimension into two dimensions.
Parameters
----------
data : ndarray or xray.DataArray
Data array with time as the first dimension.
n : int
Number of periods per split (e.g. 12 for months).
slowfast : bool, optional
If True, then the slowest changing time index is first, e.g.
year, month. If False, then the fastest changing time index is
first, e.g. month, year.
timename : str, optional
Name of time dimension. Only used if data is a DataArray.
If omitted, the name is extracted from data with get_coord().
time0_name, time1_name : str, optional
Names for new time dimensions. Only used if data is a
DataArray.
time0_vals, time1_vals : ndarray, optional
Values for new time dimensions. Defaults to array of
integers. Only used if data is a DataArray.
Returns
-------
data_out : ndarray or xray.DataArray
Data array with the first dimension split into two. If dims
is the shape of the input data, and nt = dims[0], then:
- If slowfast=True: data_out.shape is [nt/n, n] + dims[1:]
- If slowfast=False: data_out.shape is [n, nt/n] + dims[1:]
"""
if isinstance(data, xray.DataArray):
i_DataArray = True
if timename is None:
timename = get_coord(data, 'time', 'name')
name, attrs, coords, dim_names = xr.meta(data)
dim_names = list(dim_names)
dim_names.remove(timename)
coords = utils.odict_delete(coords, timename)
data = data.values.copy()
else:
i_DataArray = False
dims = list(data.shape)
nt = dims[0]
nn = nt /n
data_out = np.reshape(data, [nn, n] + dims[1:])
if not slowfast:
data_out = np.swapaxes(data_out, 0, 1)
def time_coord(name, size, vals, coords):
if vals is None:
vals = np.arange(size)
time_arr = xray.DataArray(vals, coords={name : vals}, name=name)
return utils.odict_insert(coords, name, time_arr)
if i_DataArray:
coords = time_coord(time0_name, data_out.shape[0], time0_vals, coords)
coords = time_coord(time1_name, data_out.shape[1], time1_vals, coords)
dim_names = [time0_name, time1_name] + dim_names
data_out = xray.DataArray(data_out, name=name, dims=dim_names,
coords=coords, attrs=attrs)
return data_out
# ----------------------------------------------------------------------
def splitdays(days):
"""Return a list of each set of consecutive days within an array."""
daysets = []
consec = np.diff(days) == 1
while not consec.all():
isplit = consec.argmin() + 1
daysets.append(days[:isplit])
days = days[isplit:]
consec = np.diff(days) == 1
else:
daysets.append(days)
return daysets
# ----------------------------------------------------------------------
def daily_from_subdaily(data, n, method='mean', timename=None, dayname='day',
dayvals=None):
"""Return daily data from sub-daily data.
Parameters
----------
data : ndarray, xray.DataArray, or xray.Dataset
Data array (or set of data arrays) with time as the first dimension.
n : int
Number of values per day (e.g. n=8 for 3-hourly data).
method : {'mean'} or int, optional
Method for computing daily values from sub-daily values.
Default is the daily mean. If method is an integer in
range(n), then the daily value is the sub-sample at that
index (e.g. method=0 returns the first sub-daily value from
each day).
timename : str, optional
Name of time dimension in input. Only used if data is a DataArray.
If omitted, the name is extracted from data with get_coord().
dayname : str, optional
Name of time dimension in output. Only used if data is a DataArray.
dayvals : ndarray, optional
Values for time dimension in output, e.g. np.arange(1, 366).
Only used if data is a DataArray.
Returns
-------
data_out : ndarray or xray.DataArray
Daily values of data (mean or subsample).
"""
def process_one(data, n, method, timename, dayname, dayvals):
"""Process one data array."""
# Split the time dimension
data_out = split_timedim(data, n, slowfast=False, timename=timename,
time1_name=dayname, time1_vals=dayvals)
if isinstance(method, int):
if method in range(n):
data_out = data_out[method]
else:
msg = 'Subsample index %d exceeds valid range 0-%d.'
raise ValueError(msg % (method, n))
elif isinstance(method, str) and method.lower() == 'mean':
if isinstance(data, xray.DataArray):
_, attrs, _, _ = xr.meta(data)
data_out = data_out.mean(axis=0)
data_out.attrs = attrs
else:
data_out = np.nanmean(data_out, axis=0)
else:
raise ValueError('Invalid method ' + str(method))
return data_out
if isinstance(data, xray.Dataset):
data_out = xray.Dataset()
for nm in data.data_vars:
data_out[nm] = process_one(data[nm], n, method, timename, dayname,
dayvals)
else:
data_out = process_one(data, n, method, timename, dayname, dayvals)
return data_out
# ----------------------------------------------------------------------
def combine_daily_years(varnames, files, years, yearname='Year',
subset_dict=None):
"""Combine daily mean data from multiple files.
Parameters
----------
varnames : list of str
List of variables to extract. If None, then all variables
in the first file are used as varnames.
files : list of str
List of filenames to read. Each file should contain one year's
worth of daily data, with day of year as the first dimension
of each variable.
years : list of ints
List of years corresponding to each file.
yearname : str, optional
Name for year dimension in DataArrays.
subset_dict : dict of 2-tuples, optional
Dimensions and subsets to extract. Each entry in subset_dict
is in the form {dim_name : (lower_or_list, upper)}, where:
- dim_name : string
Name of dimension to extract from.
The dimension name can be the actual dimension name
(e.g. 'XDim') or a generic name (e.g. 'lon') and get_coord()
is called to find the specific name.
- lower_or_list : scalar or list of int or float
If scalar, then used as the lower bound for the subset range.
If list, then the subset matching the list will be extracted.
- upper : int, float, or None
Upper bound for subset range. If lower_or_list is a list,
then upper is ignored and should be set to None.
Returns
-------
data : xray.Dataset or xray.DataArray
Dataset with each variable as an array with year as the first
dimension, day of year as the second dimension. If a single
variable is selected, then the output is a DataArray rather
than a Dataset.
"""
# Read daily data from each year and concatenate
if varnames is None:
with xray.open_dataset(files[0]) as ds0:
varlist = ds0.data_vars.keys()
else:
varlist = utils.makelist(varnames)
ds = xray.Dataset()
for y, filn in enumerate(files):
print('Loading ' + filn)
ds1 = xray.Dataset()
with xray.open_dataset(filn) as ds_in:
if subset_dict is not None:
ds_in = subset(ds_in, subset_dict)
for nm in varlist:
var = ds_in[nm].load()
var.coords[yearname] = years[y]
ds1[nm] = var
if y == 0:
ds = ds1
dayname = ds1[varlist[0]].dims[0]
days = ds1[dayname].values
else:
days = np.union1d(days, ds1[dayname].values)
ds = ds.reindex(**{dayname : days})
ds1 = ds1.reindex(**{dayname : days})
ds = xray.concat([ds, ds1], dim=yearname)
# Collapse to single DataArray if only one variable, otherwise
# return Dataset
if len(varlist) == 1:
data = ds[varlist[0]]
else:
data = ds
return data<|fim▁end|> | flip = True
lat_out = lat_out[::-1]
|
<|file_name|>WiimoteEmu.cpp<|end_file_name|><|fim▁begin|>// Copyright 2010 Dolphin Emulator Project
// Licensed under GPLv2+
// Refer to the license.txt file included.
#include <cassert>
#include <cmath>
#include <cstring>
#include "Common/ChunkFile.h"
#include "Common/CommonTypes.h"
#include "Common/MathUtil.h"
#include "Common/MsgHandler.h"
#include "Core/ConfigManager.h"
#include "Core/Core.h"
#include "Core/HW/WiimoteEmu/Attachment/Classic.h"
#include "Core/HW/WiimoteEmu/Attachment/Drums.h"
#include "Core/HW/WiimoteEmu/Attachment/Guitar.h"
#include "Core/HW/WiimoteEmu/Attachment/Nunchuk.h"
#include "Core/HW/WiimoteEmu/Attachment/Turntable.h"
#include "Core/HW/WiimoteEmu/MatrixMath.h"
#include "Core/HW/WiimoteEmu/WiimoteEmu.h"
#include "Core/HW/WiimoteEmu/WiimoteHid.h"
#include "Core/HW/WiimoteReal/WiimoteReal.h"
#include "Core/Host.h"
#include "Core/Movie.h"
#include "Core/NetPlayClient.h"
namespace
{
// :)
auto const TAU = 6.28318530717958647692;
auto const PI = TAU / 2.0;
}
namespace WiimoteEmu
{
static const u8 eeprom_data_0[] = {
// IR, maybe more
// assuming last 2 bytes are checksum
0xA1, 0xAA, 0x8B, 0x99, 0xAE, 0x9E, 0x78, 0x30, 0xA7, /*0x74, 0xD3,*/ 0x00,
0x00, // messing up the checksum on purpose
0xA1, 0xAA, 0x8B, 0x99, 0xAE, 0x9E, 0x78, 0x30, 0xA7, /*0x74, 0xD3,*/ 0x00, 0x00,
// Accelerometer
// Important: checksum is required for tilt games
ACCEL_ZERO_G, ACCEL_ZERO_G, ACCEL_ZERO_G, 0, ACCEL_ONE_G, ACCEL_ONE_G, ACCEL_ONE_G, 0, 0, 0xA3,
ACCEL_ZERO_G, ACCEL_ZERO_G, ACCEL_ZERO_G, 0, ACCEL_ONE_G, ACCEL_ONE_G, ACCEL_ONE_G, 0, 0, 0xA3,
};
static const u8 motion_plus_id[] = {0x00, 0x00, 0xA6, 0x20, 0x00, 0x05};
static const u8 eeprom_data_16D0[] = {0x00, 0x00, 0x00, 0xFF, 0x11, 0xEE, 0x00, 0x00,
0x33, 0xCC, 0x44, 0xBB, 0x00, 0x00, 0x66, 0x99,
0x77, 0x88, 0x00, 0x00, 0x2B, 0x01, 0xE8, 0x13};
static const ReportFeatures reporting_mode_features[] = {
// 0x30: Core Buttons
{2, 0, 0, 0, 4},
// 0x31: Core Buttons and Accelerometer
{2, 4, 0, 0, 7},
// 0x32: Core Buttons with 8 Extension bytes
{2, 0, 0, 4, 12},
// 0x33: Core Buttons and Accelerometer with 12 IR bytes
{2, 4, 7, 0, 19},
// 0x34: Core Buttons with 19 Extension bytes
{2, 0, 0, 4, 23},
// 0x35: Core Buttons and Accelerometer with 16 Extension Bytes
{2, 4, 0, 7, 23},
// 0x36: Core Buttons with 10 IR bytes and 9 Extension Bytes
{2, 0, 4, 14, 23},
// 0x37: Core Buttons and Accelerometer with 10 IR bytes and 6 Extension Bytes
{2, 4, 7, 17, 23},
// UNSUPPORTED:
// 0x3d: 21 Extension Bytes
{0, 0, 0, 2, 23},
// 0x3e / 0x3f: Interleaved Core Buttons and Accelerometer with 36 IR bytes
{0, 0, 0, 0, 23},
};
void EmulateShake(AccelData* const accel, ControllerEmu::Buttons* const buttons_group,
u8* const shake_step)
{
// frame count of one up/down shake
// < 9 no shake detection in "Wario Land: Shake It"
auto const shake_step_max = 15;
// peak G-force
auto const shake_intensity = 3.0;
// shake is a bitfield of X,Y,Z shake button states
static const unsigned int btns[] = {0x01, 0x02, 0x04};
unsigned int shake = 0;
buttons_group->GetState(&shake, btns);
for (int i = 0; i != 3; ++i)
{
if (shake & (1 << i))
{
(&(accel->x))[i] = std::sin(TAU * shake_step[i] / shake_step_max) * shake_intensity;
shake_step[i] = (shake_step[i] + 1) % shake_step_max;
}
else
shake_step[i] = 0;
}
}
void EmulateTilt(AccelData* const accel, ControllerEmu::Tilt* const tilt_group, const bool sideways,
const bool upright)
{
ControlState roll, pitch;
// 180 degrees
tilt_group->GetState(&roll, &pitch);
roll *= PI;
pitch *= PI;
unsigned int ud = 0, lr = 0, fb = 0;
// some notes that no one will understand but me :p
// left, forward, up
// lr/ left == negative for all orientations
// ud/ up == negative for upright longways
// fb/ forward == positive for (sideways flat)
// determine which axis is which direction
ud = upright ? (sideways ? 0 : 1) : 2;
lr = sideways;
fb = upright ? 2 : (sideways ? 0 : 1);
int sgn[3] = {-1, 1, 1}; // sign fix
if (sideways && !upright)
sgn[fb] *= -1;
if (!sideways && upright)
sgn[ud] *= -1;
(&accel->x)[ud] = (sin((PI / 2) - std::max(fabs(roll), fabs(pitch)))) * sgn[ud];
(&accel->x)[lr] = -sin(roll) * sgn[lr];
(&accel->x)[fb] = sin(pitch) * sgn[fb];
}
#define SWING_INTENSITY 2.5 //-uncalibrated(aprox) 0x40-calibrated
void EmulateSwing(AccelData* const accel, ControllerEmu::Force* const swing_group,
const bool sideways, const bool upright)
{
ControlState swing[3];
swing_group->GetState(swing);
s8 g_dir[3] = {-1, -1, -1};
u8 axis_map[3];
// determine which axis is which direction
axis_map[0] = upright ? (sideways ? 0 : 1) : 2; // up/down
axis_map[1] = sideways; // left|right
axis_map[2] = upright ? 2 : (sideways ? 0 : 1); // forward/backward
// some orientations have up as positive, some as negative
// same with forward
if (sideways && !upright)
g_dir[axis_map[2]] *= -1;
if (!sideways && upright)
g_dir[axis_map[0]] *= -1;
for (unsigned int i = 0; i < 3; ++i)
(&accel->x)[axis_map[i]] += swing[i] * g_dir[i] * SWING_INTENSITY;
}
static const u16 button_bitmasks[] = {
Wiimote::BUTTON_A, Wiimote::BUTTON_B, Wiimote::BUTTON_ONE, Wiimote::BUTTON_TWO,
Wiimote::BUTTON_MINUS, Wiimote::BUTTON_PLUS, Wiimote::BUTTON_HOME};
static const u16 dpad_bitmasks[] = {Wiimote::PAD_UP, Wiimote::PAD_DOWN, Wiimote::PAD_LEFT,
Wiimote::PAD_RIGHT};
static const u16 dpad_sideways_bitmasks[] = {Wiimote::PAD_RIGHT, Wiimote::PAD_LEFT, Wiimote::PAD_UP,
Wiimote::PAD_DOWN};
static const char* const named_buttons[] = {
"A", "B", "1", "2", "-", "+", "Home",
};
void Wiimote::Reset()
{
m_reporting_mode = WM_REPORT_CORE;
// i think these two are good
m_reporting_channel = 0;
m_reporting_auto = false;
m_rumble_on = false;
m_speaker_mute = false;
m_motion_plus_present = false;
m_motion_plus_active = false;
// will make the first Update() call send a status request
// the first call to RequestStatus() will then set up the status struct extension bit
m_extension->active_extension = -1;
// eeprom
memset(m_eeprom, 0, sizeof(m_eeprom));
// calibration data
memcpy(m_eeprom, eeprom_data_0, sizeof(eeprom_data_0));
// dunno what this is for, copied from old plugin
memcpy(m_eeprom + 0x16D0, eeprom_data_16D0, sizeof(eeprom_data_16D0));
// set up the register
memset(&m_reg_speaker, 0, sizeof(m_reg_speaker));
memset(&m_reg_ir, 0, sizeof(m_reg_ir));
memset(&m_reg_ext, 0, sizeof(m_reg_ext));
memset(&m_reg_motion_plus, 0, sizeof(m_reg_motion_plus));
memcpy(&m_reg_motion_plus.ext_identifier, motion_plus_id, sizeof(motion_plus_id));
// status
memset(&m_status, 0, sizeof(m_status));
// Battery levels in voltage
// 0x00 - 0x32: level 1
// 0x33 - 0x43: level 2
// 0x33 - 0x54: level 3
// 0x55 - 0xff: level 4
m_status.battery = (u8)(m_options->numeric_settings[1]->GetValue() * 100);
memset(m_shake_step, 0, sizeof(m_shake_step));
// clear read request queue
while (!m_read_requests.empty())
{
delete[] m_read_requests.front().data;
m_read_requests.pop();
}
// Yamaha ADPCM state initialize
m_adpcm_state.predictor = 0;
m_adpcm_state.step = 127;
}
Wiimote::Wiimote(const unsigned int index)
: m_index(index), ir_sin(0), ir_cos(1), m_last_connect_request_counter(0)
{
// ---- set up all the controls ----
// buttons
groups.emplace_back(m_buttons = new Buttons("Buttons"));
for (auto& named_button : named_buttons)
m_buttons->controls.emplace_back(new ControlGroup::Input(named_button));
// ir
groups.emplace_back(m_ir = new Cursor(_trans("IR")));
// swing
groups.emplace_back(m_swing = new Force(_trans("Swing")));
// tilt
groups.emplace_back(m_tilt = new Tilt(_trans("Tilt")));
// shake
groups.emplace_back(m_shake = new Buttons(_trans("Shake")));
m_shake->controls.emplace_back(new ControlGroup::Input("X"));
m_shake->controls.emplace_back(new ControlGroup::Input("Y"));
m_shake->controls.emplace_back(new ControlGroup::Input("Z"));
// extension
groups.emplace_back(m_extension = new Extension(_trans("Extension")));
m_extension->attachments.emplace_back(new WiimoteEmu::None(m_reg_ext));
m_extension->attachments.emplace_back(new WiimoteEmu::Nunchuk(m_reg_ext));
m_extension->attachments.emplace_back(new WiimoteEmu::Classic(m_reg_ext));
m_extension->attachments.emplace_back(new WiimoteEmu::Guitar(m_reg_ext));
m_extension->attachments.emplace_back(new WiimoteEmu::Drums(m_reg_ext));
m_extension->attachments.emplace_back(new WiimoteEmu::Turntable(m_reg_ext));
m_extension->boolean_settings.emplace_back(
std::make_unique<ControlGroup::BooleanSetting>(_trans("Motion Plus"), false));
// rumble
groups.emplace_back(m_rumble = new ControlGroup(_trans("Rumble")));
m_rumble->controls.emplace_back(new ControlGroup::Output(_trans("Motor")));
// dpad
groups.emplace_back(m_dpad = new Buttons("D-Pad"));
for (auto& named_direction : named_directions)
m_dpad->controls.emplace_back(new ControlGroup::Input(named_direction));
// options
groups.emplace_back(m_options = new ControlGroup(_trans("Options")));
m_options->boolean_settings.emplace_back(
std::make_unique<ControlGroup::BackgroundInputSetting>(_trans("Background Input")));
m_options->boolean_settings.emplace_back(
std::make_unique<ControlGroup::BooleanSetting>(_trans("Sideways Wii Remote"), false));
m_options->boolean_settings.emplace_back(
std::make_unique<ControlGroup::BooleanSetting>(_trans("Upright Wii Remote"), false));
m_options->boolean_settings.emplace_back(std::make_unique<ControlGroup::BooleanSetting>(
_trans("Iterative Input"), false, ControlGroup::SettingType::VIRTUAL));
m_options->numeric_settings.emplace_back(
std::make_unique<ControlGroup::NumericSetting>(_trans("Speaker Pan"), 0, -127, 127));
m_options->numeric_settings.emplace_back(
std::make_unique<ControlGroup::NumericSetting>(_trans("Battery"), 95.0 / 100, 0, 255));
// hotkeys
groups.emplace_back(m_hotkeys = new ModifySettingsButton(_trans("Hotkeys")));
// hotkeys to temporarily modify the Wii Remote orientation (sideways, upright)
// this setting modifier is toggled
m_hotkeys->AddInput(_trans("Sideways Toggle"), true);
m_hotkeys->AddInput(_trans("Upright Toggle"), true);
// this setting modifier is not toggled
m_hotkeys->AddInput(_trans("Sideways Hold"), false);
m_hotkeys->AddInput(_trans("Upright Hold"), false);
// TODO: This value should probably be re-read if SYSCONF gets changed
m_sensor_bar_on_top = SConfig::GetInstance().m_sensor_bar_position != 0;
// --- reset eeprom/register/values to default ---
Reset();
}
std::string Wiimote::GetName() const
{
return std::string("Wiimote") + char('1' + m_index);
}
ControllerEmu::ControlGroup* Wiimote::GetWiimoteGroup(WiimoteGroup group)
{
switch (group)
{
case WiimoteGroup::Buttons:
return m_buttons;
case WiimoteGroup::DPad:
return m_dpad;
case WiimoteGroup::Shake:
return m_shake;
case WiimoteGroup::IR:
return m_ir;
case WiimoteGroup::Tilt:
return m_tilt;
case WiimoteGroup::Swing:
return m_swing;
case WiimoteGroup::Rumble:
return m_rumble;
case WiimoteGroup::Extension:
return m_extension;
case WiimoteGroup::Options:
return m_options;
case WiimoteGroup::Hotkeys:
return m_hotkeys;
default:
assert(false);
return nullptr;
}
}
ControllerEmu::ControlGroup* Wiimote::GetNunchukGroup(NunchukGroup group)
{
return static_cast<Nunchuk*>(m_extension->attachments[EXT_NUNCHUK].get())->GetGroup(group);
}
ControllerEmu::ControlGroup* Wiimote::GetClassicGroup(ClassicGroup group)
{
return static_cast<Classic*>(m_extension->attachments[EXT_CLASSIC].get())->GetGroup(group);
}
ControllerEmu::ControlGroup* Wiimote::GetGuitarGroup(GuitarGroup group)
{
return static_cast<Guitar*>(m_extension->attachments[EXT_GUITAR].get())->GetGroup(group);
}
ControllerEmu::ControlGroup* Wiimote::GetDrumsGroup(DrumsGroup group)
{
return static_cast<Drums*>(m_extension->attachments[EXT_DRUMS].get())->GetGroup(group);
}
ControllerEmu::ControlGroup* Wiimote::GetTurntableGroup(TurntableGroup group)
{
return static_cast<Turntable*>(m_extension->attachments[EXT_TURNTABLE].get())->GetGroup(group);
}
bool Wiimote::Step()
{
// TODO: change this a bit
m_motion_plus_present = m_extension->boolean_settings[0]->GetValue();
m_rumble->controls[0]->control_ref->State(m_rumble_on);
// when a movie is active, this button status update is disabled (moved), because movies only
// record data reports.
if (!Core::g_want_determinism)
{
UpdateButtonsStatus();
}
// check if there is a read data request
if (!m_read_requests.empty())
{
ReadRequest& rr = m_read_requests.front();
// send up to 16 bytes to the Wii
SendReadDataReply(rr);
// SendReadDataReply(rr.channel, rr);
// if there is no more data, remove from queue
if (0 == rr.size)
{
delete[] rr.data;
m_read_requests.pop();
}
// don't send any other reports
return true;
}
// check if a status report needs to be sent
// this happens on Wii Remote sync and when extensions are switched
if (m_extension->active_extension != m_extension->switch_extension)
{
RequestStatus();
// WiiBrew: Following a connection or disconnection event on the Extension Port,
// data reporting is disabled and the Data Reporting Mode must be reset before new data can
// arrive.
// after a game receives an unrequested status report,
// it expects data reports to stop until it sets the reporting mode again
m_reporting_auto = false;
return true;
}
return false;
}
void Wiimote::UpdateButtonsStatus()
{
// update buttons in status struct
m_status.buttons.hex = 0;
const bool sideways_modifier_toggle = m_hotkeys->getSettingsModifier()[0];
const bool sideways_modifier_switch = m_hotkeys->getSettingsModifier()[2];
const bool is_sideways = m_options->boolean_settings[1]->GetValue() ^ sideways_modifier_toggle ^
sideways_modifier_switch;
m_buttons->GetState(&m_status.buttons.hex, button_bitmasks);
m_dpad->GetState(&m_status.buttons.hex, is_sideways ? dpad_sideways_bitmasks : dpad_bitmasks);
}
void Wiimote::GetButtonData(u8* const data)
{
// when a movie is active, the button update happens here instead of Wiimote::Step, to avoid
// potential desync issues.
if (Core::g_want_determinism)
{
UpdateButtonsStatus();
}
((wm_buttons*)data)->hex |= m_status.buttons.hex;
}
void Wiimote::GetAccelData(u8* const data, const ReportFeatures& rptf)
{
const bool sideways_modifier_toggle = m_hotkeys->getSettingsModifier()[0];
const bool upright_modifier_toggle = m_hotkeys->getSettingsModifier()[1];
const bool sideways_modifier_switch = m_hotkeys->getSettingsModifier()[2];
const bool upright_modifier_switch = m_hotkeys->getSettingsModifier()[3];
const bool is_sideways = m_options->boolean_settings[1]->GetValue() ^ sideways_modifier_toggle ^
sideways_modifier_switch;
const bool is_upright = m_options->boolean_settings[2]->GetValue() ^ upright_modifier_toggle ^
upright_modifier_switch;
EmulateTilt(&m_accel, m_tilt, is_sideways, is_upright);
EmulateSwing(&m_accel, m_swing, is_sideways, is_upright);
EmulateShake(&m_accel, m_shake, m_shake_step);
wm_accel& accel = *(wm_accel*)(data + rptf.accel);
wm_buttons& core = *(wm_buttons*)(data + rptf.core);
// We now use 2 bits more precision, so multiply by 4 before converting to int
s16 x = (s16)(4 * (m_accel.x * ACCEL_RANGE + ACCEL_ZERO_G));
s16 y = (s16)(4 * (m_accel.y * ACCEL_RANGE + ACCEL_ZERO_G));
s16 z = (s16)(4 * (m_accel.z * ACCEL_RANGE + ACCEL_ZERO_G));
x = MathUtil::Clamp<s16>(x, 0, 1024);
y = MathUtil::Clamp<s16>(y, 0, 1024);
z = MathUtil::Clamp<s16>(z, 0, 1024);
accel.x = (x >> 2) & 0xFF;
accel.y = (y >> 2) & 0xFF;
accel.z = (z >> 2) & 0xFF;
core.acc_x_lsb = x & 0x3;
core.acc_y_lsb = (y >> 1) & 0x1;
core.acc_z_lsb = (z >> 1) & 0x1;
}
inline void LowPassFilter(double& var, double newval, double period)
{
static const double CUTOFF_FREQUENCY = 5.0;
double RC = 1.0 / CUTOFF_FREQUENCY;
double alpha = period / (period + RC);
var = newval * alpha + var * (1.0 - alpha);
}
void Wiimote::GetIRData(u8* const data, bool use_accel)
{
u16 x[4], y[4];
memset(x, 0xFF, sizeof(x));
ControlState xx = 10000, yy = 0, zz = 0;
double nsin, ncos;
if (use_accel)
{
double ax, az, len;
ax = m_accel.x;
az = m_accel.z;
len = sqrt(ax * ax + az * az);
if (len)
{
ax /= len;
az /= len; // normalizing the vector
nsin = ax;
ncos = az;
}
else
{
nsin = 0;
ncos = 1;
}
// PanicAlert("%d %d %d\nx:%f\nz:%f\nsin:%f\ncos:%f",accel->x,accel->y,accel->z,ax,az,sin,cos);
// PanicAlert("%d %d %d\n%d %d %d\n%d %d
// %d",accel->x,accel->y,accel->z,calib->zero_g.x,calib->zero_g.y,calib->zero_g.z,
// calib->one_g.x,calib->one_g.y,calib->one_g.z);
}
else
{
nsin = 0; // m_tilt stuff here (can't figure it out yet....)
ncos = 1;
}
LowPassFilter(ir_sin, nsin, 1.0 / 60);
LowPassFilter(ir_cos, ncos, 1.0 / 60);
m_ir->GetState(&xx, &yy, &zz, true);
Vertex v[4];
static const int camWidth = 1024;
static const int camHeight = 768;
static const double bndup = -0.315447;
static const double bnddown = 0.85;
static const double bndleft = 0.443364;
static const double bndright = -0.443364;
static const double dist1 = 100.0 / camWidth; // this seems the optimal distance for zelda
static const double dist2 = 1.2 * dist1;
for (auto& vtx : v)
{
vtx.x = xx * (bndright - bndleft) / 2 + (bndleft + bndright) / 2;
if (m_sensor_bar_on_top)
vtx.y = yy * (bndup - bnddown) / 2 + (bndup + bnddown) / 2;
else
vtx.y = yy * (bndup - bnddown) / 2 - (bndup + bnddown) / 2;
vtx.z = 0;
}
v[0].x -= (zz * 0.5 + 1) * dist1;
v[1].x += (zz * 0.5 + 1) * dist1;
v[2].x -= (zz * 0.5 + 1) * dist2;
v[3].x += (zz * 0.5 + 1) * dist2;
#define printmatrix(m) \
PanicAlert("%f %f %f %f\n%f %f %f %f\n%f %f %f %f\n%f %f %f %f\n", m[0][0], m[0][1], m[0][2], \
m[0][3], m[1][0], m[1][1], m[1][2], m[1][3], m[2][0], m[2][1], m[2][2], m[2][3], \
m[3][0], m[3][1], m[3][2], m[3][3])
Matrix rot, tot;
static Matrix scale;
MatrixScale(scale, 1, camWidth / camHeight, 1);
// MatrixIdentity(scale);
MatrixRotationByZ(rot, ir_sin, ir_cos);
// MatrixIdentity(rot);
MatrixMultiply(tot, scale, rot);
for (int i = 0; i < 4; i++)
{
MatrixTransformVertex(tot, v[i]);
if ((v[i].x < -1) || (v[i].x > 1) || (v[i].y < -1) || (v[i].y > 1))
continue;
x[i] = (u16)lround((v[i].x + 1) / 2 * (camWidth - 1));
y[i] = (u16)lround((v[i].y + 1) / 2 * (camHeight - 1));
}
// PanicAlert("%f %f\n%f %f\n%f %f\n%f %f\n%d %d\n%d %d\n%d %d\n%d %d",
// v[0].x,v[0].y,v[1].x,v[1].y,v[2].x,v[2].y,v[3].x,v[3].y,
// x[0],y[0],x[1],y[1],x[2],y[2],x[3],y[38]);
// Fill report with valid data when full handshake was done
if (m_reg_ir.data[0x30])
// ir mode
switch (m_reg_ir.mode)
{
// basic
case 1:
{
memset(data, 0xFF, 10);
wm_ir_basic* const irdata = (wm_ir_basic*)data;
for (unsigned int i = 0; i < 2; ++i)
{
if (x[i * 2] < 1024 && y[i * 2] < 768)
{
irdata[i].x1 = static_cast<u8>(x[i * 2]);
irdata[i].x1hi = x[i * 2] >> 8;
irdata[i].y1 = static_cast<u8>(y[i * 2]);
irdata[i].y1hi = y[i * 2] >> 8;
}
if (x[i * 2 + 1] < 1024 && y[i * 2 + 1] < 768)
{<|fim▁hole|> irdata[i].x2 = static_cast<u8>(x[i * 2 + 1]);
irdata[i].x2hi = x[i * 2 + 1] >> 8;
irdata[i].y2 = static_cast<u8>(y[i * 2 + 1]);
irdata[i].y2hi = y[i * 2 + 1] >> 8;
}
}
}
break;
// extended
case 3:
{
memset(data, 0xFF, 12);
wm_ir_extended* const irdata = (wm_ir_extended*)data;
for (unsigned int i = 0; i < 4; ++i)
if (x[i] < 1024 && y[i] < 768)
{
irdata[i].x = static_cast<u8>(x[i]);
irdata[i].xhi = x[i] >> 8;
irdata[i].y = static_cast<u8>(y[i]);
irdata[i].yhi = y[i] >> 8;
irdata[i].size = 10;
}
}
break;
// full
case 5:
PanicAlert("Full IR report");
// UNSUPPORTED
break;
}
}
void Wiimote::GetExtData(u8* const data)
{
m_extension->GetState(data);
// i dont think anything accesses the extension data like this, but ill support it. Indeed,
// commercial games don't do this.
// i think it should be unencrpyted in the register, encrypted when read.
memcpy(m_reg_ext.controller_data, data, sizeof(wm_nc)); // TODO: Should it be nc specific?
// motionplus pass-through modes
if (m_motion_plus_active)
{
switch (m_reg_motion_plus.ext_identifier[0x4])
{
// nunchuk pass-through mode
// Bit 7 of byte 5 is moved to bit 6 of byte 5, overwriting it
// Bit 0 of byte 4 is moved to bit 7 of byte 5
// Bit 3 of byte 5 is moved to bit 4 of byte 5, overwriting it
// Bit 1 of byte 5 is moved to bit 3 of byte 5
// Bit 0 of byte 5 is moved to bit 2 of byte 5, overwriting it
case 0x5:
// data[5] & (1 << 7)
// data[4] & (1 << 0)
// data[5] & (1 << 3)
// data[5] & (1 << 1)
// data[5] & (1 << 0)
break;
// classic controller/musical instrument pass-through mode
// Bit 0 of Byte 4 is overwritten
// Bits 0 and 1 of Byte 5 are moved to bit 0 of Bytes 0 and 1, overwriting
case 0x7:
// data[4] & (1 << 0)
// data[5] & (1 << 0)
// data[5] & (1 << 1)
break;
// unknown pass-through mode
default:
break;
}
((wm_motionplus_data*)data)->is_mp_data = 0;
((wm_motionplus_data*)data)->extension_connected = m_extension->active_extension;
}
if (0xAA == m_reg_ext.encryption)
WiimoteEncrypt(&m_ext_key, data, 0x00, sizeof(wm_nc));
}
void Wiimote::Update()
{
// no channel == not connected i guess
if (0 == m_reporting_channel)
return;
// returns true if a report was sent
{
auto lock = ControllerEmu::GetStateLock();
if (Step())
return;
}
u8 data[MAX_PAYLOAD];
memset(data, 0, sizeof(data));
Movie::SetPolledDevice();
m_status.battery = (u8)(m_options->numeric_settings[1]->GetValue() * 100);
const ReportFeatures& rptf = reporting_mode_features[m_reporting_mode - WM_REPORT_CORE];
s8 rptf_size = rptf.size;
if (Movie::IsPlayingInput() &&
Movie::PlayWiimote(m_index, data, rptf, m_extension->active_extension, m_ext_key))
{
if (rptf.core)
m_status.buttons = *(wm_buttons*)(data + rptf.core);
}
else
{
data[0] = 0xA1;
data[1] = m_reporting_mode;
auto lock = ControllerEmu::GetStateLock();
// hotkey/settings modifier
m_hotkeys->GetState(); // data is later accessed in UpdateButtonsStatus and GetAccelData
// core buttons
if (rptf.core)
GetButtonData(data + rptf.core);
// acceleration
if (rptf.accel)
GetAccelData(data, rptf);
// IR
if (rptf.ir)
GetIRData(data + rptf.ir, (rptf.accel != 0));
// extension
if (rptf.ext)
GetExtData(data + rptf.ext);
// hybrid Wii Remote stuff (for now, it's not supported while recording)
if (WIIMOTE_SRC_HYBRID == g_wiimote_sources[m_index] && !Movie::IsRecordingInput())
{
using namespace WiimoteReal;
std::lock_guard<std::mutex> lk(g_wiimotes_mutex);
if (g_wiimotes[m_index])
{
const Report& rpt = g_wiimotes[m_index]->ProcessReadQueue();
if (!rpt.empty())
{
const u8* real_data = rpt.data();
switch (real_data[1])
{
// use data reports
default:
if (real_data[1] >= WM_REPORT_CORE)
{
const ReportFeatures& real_rptf =
reporting_mode_features[real_data[1] - WM_REPORT_CORE];
// force same report type from real-Wiimote
if (&real_rptf != &rptf)
rptf_size = 0;
// core
// mix real-buttons with emu-buttons in the status struct, and in the report
if (real_rptf.core && rptf.core)
{
m_status.buttons.hex |= ((wm_buttons*)(real_data + real_rptf.core))->hex;
*(wm_buttons*)(data + rptf.core) = m_status.buttons;
}
// accel
// use real-accel data always i guess
if (real_rptf.accel && rptf.accel)
memcpy(data + rptf.accel, real_data + real_rptf.accel, sizeof(wm_accel));
// ir
// TODO
// ext
// use real-ext data if an emu-extention isn't chosen
if (real_rptf.ext && rptf.ext && (0 == m_extension->switch_extension))
memcpy(data + rptf.ext, real_data + real_rptf.ext,
sizeof(wm_nc)); // TODO: Why NC specific?
}
else if (WM_ACK_DATA != real_data[1] || m_extension->active_extension > 0)
rptf_size = 0;
else
// use real-acks if an emu-extension isn't chosen
rptf_size = -1;
break;
// use all status reports, after modification of the extension bit
case WM_STATUS_REPORT:
// if (m_extension->switch_extension)
//((wm_status_report*)(real_data + 2))->extension = (m_extension->active_extension > 0);
if (m_extension->active_extension)
((wm_status_report*)(real_data + 2))->extension = 1;
rptf_size = -1;
break;
// use all read-data replies
case WM_READ_DATA_REPLY:
rptf_size = -1;
break;
}
// copy over report from real-Wiimote
if (-1 == rptf_size)
{
std::copy(rpt.begin(), rpt.end(), data);
rptf_size = (s8)(rpt.size());
}
}
}
}
Movie::CallWiiInputManip(data, rptf, m_index, m_extension->active_extension, m_ext_key);
}
if (NetPlay::IsNetPlayRunning())
{
NetPlay_GetWiimoteData(m_index, data, rptf.size, m_reporting_mode);
if (rptf.core)
m_status.buttons = *(wm_buttons*)(data + rptf.core);
}
Movie::CheckWiimoteStatus(m_index, data, rptf, m_extension->active_extension, m_ext_key);
// don't send a data report if auto reporting is off
if (false == m_reporting_auto && data[1] >= WM_REPORT_CORE)
return;
// send data report
if (rptf_size)
{
Core::Callback_WiimoteInterruptChannel(m_index, m_reporting_channel, data, rptf_size);
}
}
void Wiimote::ControlChannel(const u16 _channelID, const void* _pData, u32 _Size)
{
// Check for custom communication
if (99 == _channelID)
{
// Wii Remote disconnected
// reset eeprom/register/reporting mode
Reset();
if (WIIMOTE_SRC_REAL & g_wiimote_sources[m_index])
WiimoteReal::ControlChannel(m_index, _channelID, _pData, _Size);
return;
}
// this all good?
m_reporting_channel = _channelID;
const hid_packet* const hidp = (hid_packet*)_pData;
DEBUG_LOG(WIIMOTE, "Emu ControlChannel (page: %i, type: 0x%02x, param: 0x%02x)", m_index,
hidp->type, hidp->param);
switch (hidp->type)
{
case HID_TYPE_HANDSHAKE:
PanicAlert("HID_TYPE_HANDSHAKE - %s", (hidp->param == HID_PARAM_INPUT) ? "INPUT" : "OUPUT");
break;
case HID_TYPE_SET_REPORT:
if (HID_PARAM_INPUT == hidp->param)
{
PanicAlert("HID_TYPE_SET_REPORT - INPUT");
}
else
{
// AyuanX: My experiment shows Control Channel is never used
// shuffle2: but lwbt uses this, so we'll do what we must :)
HidOutputReport((wm_report*)hidp->data);
u8 handshake = HID_HANDSHAKE_SUCCESS;
Core::Callback_WiimoteInterruptChannel(m_index, _channelID, &handshake, 1);
}
break;
case HID_TYPE_DATA:
PanicAlert("HID_TYPE_DATA - %s", (hidp->param == HID_PARAM_INPUT) ? "INPUT" : "OUTPUT");
break;
default:
PanicAlert("HidControlChannel: Unknown type %x and param %x", hidp->type, hidp->param);
break;
}
}
void Wiimote::InterruptChannel(const u16 _channelID, const void* _pData, u32 _Size)
{
// this all good?
m_reporting_channel = _channelID;
const hid_packet* const hidp = (hid_packet*)_pData;
switch (hidp->type)
{
case HID_TYPE_DATA:
switch (hidp->param)
{
case HID_PARAM_OUTPUT:
{
const wm_report* const sr = (wm_report*)hidp->data;
if (WIIMOTE_SRC_REAL & g_wiimote_sources[m_index])
{
switch (sr->wm)
{
// these two types are handled in RequestStatus() & ReadData()
case WM_REQUEST_STATUS:
case WM_READ_DATA:
if (WIIMOTE_SRC_REAL == g_wiimote_sources[m_index])
WiimoteReal::InterruptChannel(m_index, _channelID, _pData, _Size);
break;
default:
WiimoteReal::InterruptChannel(m_index, _channelID, _pData, _Size);
break;
}
HidOutputReport(sr, m_extension->switch_extension > 0);
}
else
HidOutputReport(sr);
}
break;
default:
PanicAlert("HidInput: HID_TYPE_DATA - param 0x%02x", hidp->param);
break;
}
break;
default:
PanicAlert("HidInput: Unknown type 0x%02x and param 0x%02x", hidp->type, hidp->param);
break;
}
}
void Wiimote::ConnectOnInput()
{
if (m_last_connect_request_counter > 0)
{
--m_last_connect_request_counter;
return;
}
u16 buttons = 0;
auto lock = ControllerEmu::GetStateLock();
m_buttons->GetState(&buttons, button_bitmasks);
m_dpad->GetState(&buttons, dpad_bitmasks);
if (buttons != 0 || m_extension->IsButtonPressed())
{
Host_ConnectWiimote(m_index, true);
// arbitrary value so it doesn't try to send multiple requests before Dolphin can react
// if Wii Remotes are polled at 200Hz then this results in one request being sent per 500ms
m_last_connect_request_counter = 100;
}
}
void Wiimote::LoadDefaults(const ControllerInterface& ciface)
{
ControllerEmu::LoadDefaults(ciface);
// Buttons
#if defined HAVE_X11 && HAVE_X11
m_buttons->SetControlExpression(0, "Click 1"); // A
m_buttons->SetControlExpression(1, "Click 3"); // B
#else
m_buttons->SetControlExpression(0, "Click 0"); // A
m_buttons->SetControlExpression(1, "Click 1"); // B
#endif
m_buttons->SetControlExpression(2, "1"); // 1
m_buttons->SetControlExpression(3, "2"); // 2
m_buttons->SetControlExpression(4, "Q"); // -
m_buttons->SetControlExpression(5, "E"); // +
#ifdef _WIN32
m_buttons->SetControlExpression(6, "!LMENU & RETURN"); // Home
#else
m_buttons->SetControlExpression(6, "!`Alt_L` & Return"); // Home
#endif
// Shake
for (int i = 0; i < 3; ++i)
m_shake->SetControlExpression(i, "Click 2");
// IR
m_ir->SetControlExpression(0, "Cursor Y-");
m_ir->SetControlExpression(1, "Cursor Y+");
m_ir->SetControlExpression(2, "Cursor X-");
m_ir->SetControlExpression(3, "Cursor X+");
// DPad
#ifdef _WIN32
m_dpad->SetControlExpression(0, "UP"); // Up
m_dpad->SetControlExpression(1, "DOWN"); // Down
m_dpad->SetControlExpression(2, "LEFT"); // Left
m_dpad->SetControlExpression(3, "RIGHT"); // Right
#elif __APPLE__
m_dpad->SetControlExpression(0, "Up Arrow"); // Up
m_dpad->SetControlExpression(1, "Down Arrow"); // Down
m_dpad->SetControlExpression(2, "Left Arrow"); // Left
m_dpad->SetControlExpression(3, "Right Arrow"); // Right
#else
m_dpad->SetControlExpression(0, "Up"); // Up
m_dpad->SetControlExpression(1, "Down"); // Down
m_dpad->SetControlExpression(2, "Left"); // Left
m_dpad->SetControlExpression(3, "Right"); // Right
#endif
// ugly stuff
// enable nunchuk
m_extension->switch_extension = 1;
// set nunchuk defaults
m_extension->attachments[1]->LoadDefaults(ciface);
}
}<|fim▁end|> | |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>//
// Adapted from:
// http://stackoverflow.com/questions/22330103/how-to-include-node-modules-in-a-separate-browserify-vendor-bundle
//
var gulp = require('gulp');
var browserify = require('browserify');
var source = require('vinyl-source-stream');
var bust = require('gulp-buster');
var streamify = require('gulp-streamify');
var htmlreplace = require('gulp-html-replace');
var fs = require('fs');
var packageJson = require('./package.json');
var dependencies = Object.keys(packageJson && packageJson.dependencies || {});
function handleErrors(error) {
console.error(error.stack);
// Emit 'end' as the stream wouldn't do it itself.
// Without this, the gulp task won't end and the watch stops working.
this.emit('end');
}
gulp.task('libs', function () {
return browserify({debug: true})
.require(dependencies)
.bundle()
.on('error', handleErrors)
.pipe(source('libs.js'))
.pipe(gulp.dest('./dist/'))
.pipe(streamify(bust()))
.pipe(gulp.dest('.'));
});
gulp.task('scripts', function () {
return browserify('./src/index.js', {debug: true})
.external(dependencies)
.bundle()
.on('error', handleErrors)
.on('end', ()=>{console.log("ended")})
.pipe(source('scripts.js'))
.pipe(gulp.dest('./dist/'))
.pipe(streamify(bust()))
.pipe(gulp.dest('.'));
});
gulp.task('css', function () {
return gulp.src('./styles/styles.css')
.pipe(gulp.dest('./dist/'))
.pipe(streamify(bust()))
.pipe(gulp.dest('.'));
});
gulp.task('icons', function () {
return gulp.src('./icons/**/*')
.pipe(gulp.dest('./dist/icons'));
});
gulp.task('favicons', function () {
return gulp.src('./favicons/**/*')
.pipe(gulp.dest('./dist/'));
});<|fim▁hole|> return gulp.src('index.html')
.pipe(htmlreplace({
'css': 'styles.css?v=' + busters['dist/styles.css'],
'js': [
'libs.js?v=' + busters['dist/libs.js'],
'scripts.js?v=' + busters['dist/scripts.js']
]
}))
.pipe(gulp.dest('./dist/'));
});
gulp.task('watch', function(){
gulp.watch('package.json', ['libs']);
gulp.watch('src/**', ['scripts']);
gulp.watch('styles/styles.css', ['css']);
gulp.watch('icons/**', ['icons']);
gulp.watch('favicons/**', ['favicons']);
gulp.watch(['busters.json', 'index.html'], ['html']);
});
gulp.task('default', ['libs', 'scripts', 'css', 'icons', 'favicons', 'html', 'watch']);<|fim▁end|> |
gulp.task('html', function () {
var busters = JSON.parse(fs.readFileSync('busters.json'));
|
<|file_name|>physical_function.py<|end_file_name|><|fim▁begin|># Copyright 2018 Huawei Technologies Co.,LTD.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from oslo_log import log as logging
from oslo_versionedobjects import base as object_base
from cyborg.common import exception
from cyborg.db import api as dbapi
from cyborg.objects import base
from cyborg.objects import fields as object_fields
from cyborg.objects.deployable import Deployable
from cyborg.objects.virtual_function import VirtualFunction
LOG = logging.getLogger(__name__)
@base.CyborgObjectRegistry.register
class PhysicalFunction(Deployable):
# Version 1.0: Initial version
VERSION = '1.0'
virtual_function_list = []
def create(self, context):
# To ensure the creating type is PF
if self.type != 'pf':
raise exception.InvalidDeployType()
super(PhysicalFunction, self).create(context)
def save(self, context):
"""In addition to save the pf, it should also save the
vfs associated with this pf
"""
# To ensure the saving type is PF<|fim▁hole|> raise exception.InvalidDeployType()
for exist_vf in self.virtual_function_list:
exist_vf.save(context)
super(PhysicalFunction, self).save(context)
def add_vf(self, vf):
"""add a vf object to the virtual_function_list.
If the vf already exists, it will ignore,
otherwise, the vf will be appended to the list
"""
if not isinstance(vf, VirtualFunction) or vf.type != 'vf':
raise exception.InvalidDeployType()
for exist_vf in self.virtual_function_list:
if base.obj_equal_prims(vf, exist_vf):
LOG.warning("The vf already exists")
return None
vf.parent_uuid = self.uuid
vf.root_uuid = self.root_uuid
vf_copy = copy.deepcopy(vf)
self.virtual_function_list.append(vf_copy)
def delete_vf(self, context, vf):
"""remove a vf from the virtual_function_list
if the vf does not exist, ignore it
"""
for idx, exist_vf in self.virtual_function_list:
if base.obj_equal_prims(vf, exist_vf):
removed_vf = self.virtual_function_list.pop(idx)
removed_vf.destroy(context)
return
LOG.warning("The removing vf does not exist!")
def destroy(self, context):
"""Delete a the pf from the DB."""
del self.virtual_function_list[:]
super(PhysicalFunction, self).destroy(context)
@classmethod
def get(cls, context, uuid):
"""Find a DB Physical Function and return an Obj Physical Function.
In addition, it will also finds all the Virtual Functions associated
with this Physical Function and place them in virtual_function_list
"""
db_pf = cls.dbapi.deployable_get(context, uuid)
obj_pf = cls._from_db_object(cls(context), db_pf)
pf_uuid = obj_pf.uuid
query = {"parent_uuid": pf_uuid, "type": "vf"}
db_vf_list = cls.dbapi.deployable_get_by_filters(context, query)
for db_vf in db_vf_list:
obj_vf = VirtualFunction.get(context, db_vf.uuid)
obj_pf.virtual_function_list.append(obj_vf)
return obj_pf
@classmethod
def get_by_filter(cls, context,
filters, sort_key='created_at',
sort_dir='desc', limit=None,
marker=None, join=None):
obj_dpl_list = []
filters['type'] = 'pf'
db_dpl_list = cls.dbapi.deployable_get_by_filters(context, filters,
sort_key=sort_key,
sort_dir=sort_dir,
limit=limit,
marker=marker,
join_columns=join)
for db_dpl in db_dpl_list:
obj_dpl = cls._from_db_object(cls(context), db_dpl)
query = {"parent_uuid": obj_dpl.uuid}
vf_get_list = VirtualFunction.get_by_filter(context,
query)
obj_dpl.virtual_function_list = vf_get_list
obj_dpl_list.append(obj_dpl)
return obj_dpl_list
@classmethod
def _from_db_object(cls, obj, db_obj):
"""Converts a physical function to a formal object.
:param obj: An object of the class.
:param db_obj: A DB model of the object
:return: The object of the class with the database entity added
"""
obj = Deployable._from_db_object(obj, db_obj)
if cls is PhysicalFunction:
obj.virtual_function_list = []
return obj<|fim▁end|> | if self.type != 'pf': |
<|file_name|>draw.js<|end_file_name|><|fim▁begin|>'use strict';
import {should as should_} from 'chai';
const should = should_();
import {spy, stub} from 'sinon';
import {setCanvas, canvas, context} from '../src/canvas';
import draw from '../src/draw';
// import {Sprite} from '../../../script/src/sprite';
describe('draw.js', () => {
let ctx;
before(() => {
setCanvas('game');
// Reset settings
draw.setFont({reset: true});
draw.setLine({reset: true});
draw.setShadow({reset: true});
ctx = {
fillRect: stub(context, 'fillRect'),
strokeRect: stub(context, 'strokeRect'),
clearRect: stub(context, 'clearRect'),
fillText: stub(context, 'fillText'),
strokeText: stub(context, 'strokeText'),
measureText: spy(context, 'measureText'),
beginPath: spy(context, 'beginPath'),
moveTo: spy(context, 'moveTo'),
lineTo: spy(context, 'lineTo'),
arc: spy(context, 'arc'),
arcTo: spy(context, 'arcTo'),
rect: spy(context, 'rect'),
quadraticCurveTo: spy(context, 'quadraticCurveTo'),
bezierCurveTo: spy(context, 'bezierCurveTo'),
closePath: spy(context, 'closePath'),
fill: stub(context, 'fill'),
stroke: stub(context, 'stroke'),
clip: stub(context, 'clip'),
drawImage: stub(context, 'drawImage'),
getImageData: spy(context, 'getImageData'),
putImageData: stub(context, 'putImageData'),
createImageData: spy(context, 'createImageData'),
save: stub(context, 'save'),
scale: stub(context, 'scale'),
rotate: stub(context, 'rotate'),
translate: stub(context, 'translate'),
transform: stub(context, 'transform'),
restore: stub(context, 'restore'),
};
});
afterEach(() => {
for(let i in ctx) {
ctx[i].reset();
}
});
after(() => {
for(let i in ctx) {
ctx[i].restore();
}
});
describe('rect(x, y, w, h, stroke = false)', () => {
it('should fill a rectangle with a falsey 5th parameter', () => {
draw.rect(0, 0, 100, 100);
ctx.fillRect.should.have.been.calledWith(0, 0, 100, 100);
ctx.strokeRect.should.have.not.been.called;
});
it('should stroke a rectangle with a truthy 5th parameter', () => {
draw.rect(0, 0, 100, 100, true);
ctx.fillRect.should.have.not.been.called;
ctx.strokeRect.should.have.been.calledWith(0, 0, 100, 100);
});
});
describe('point(x, y)', () => {
it('should fill a 1x1 px rectangle', () => {
draw.point(0, 0);
ctx.fillRect.should.have.been.calledWith(0, 0, 1, 1);
});
});
describe('circle(x, y, r, stroke = false)', () => {
it('should fill a circle with a falsey 5th parameter', () => {
draw.circle(0, 0, 5);
ctx.beginPath.should.have.been.calledOnce;
ctx.arc.should.have.been.calledWith(0, 0, 5);
ctx.fill.should.have.been.calledOnce;
ctx.stroke.should.have.not.been.called;
});
it('should stroke a circle with a truthy 5th parameter', () => {
draw.circle(0, 0, 5, true);
ctx.beginPath.should.have.been.calledOnce;
ctx.arc.should.have.been.calledWith(0, 0, 5);
ctx.fill.should.have.not.been.called;
ctx.stroke.should.have.been.calledOnce;
});
});
describe('text(str, x, y, stroke = false)', () => {
it('should fill the text with a falsey 5th parameter', () => {
draw.text('Hello World', 0, 0);
ctx.fillText.should.have.been.calledWith('Hello World', 0, 0);
ctx.strokeText.should.have.not.been.called;
});
it('should stroke the text with a truthy 5th parameter', () => {
draw.text('Hello World', 0, 0, true);
ctx.fillText.should.have.not.been.called;
ctx.strokeText.should.have.been.calledWith('Hello World', 0, 0);
});
});
describe('textWidth(text)', () => {
it('should not draw any text', () => {
draw.textWidth('Hello World');
ctx.measureText.should.have.been.calledWith('Hello World');
ctx.fillText.should.have.not.been.called;
ctx.strokeText.should.have.not.been.called;
});
it('return a number (the width of the text)', () => {
draw.text('Hello World');
draw.textWidth('Hello World').should.be.a('number');
});
});
describe('image(img[, sx, sy, swidth, sheight], x, y[, w, h])', () => {
it('should draw the image', () => {
const img = new Image(40, 50);
draw.image({img: img, swidth: 32, sheight: 32, x: 0, y: 0, width: 32, height: 32});
draw.image({img: img, sx: 10, sy: 10, x: 0, y: 0, width: 32, height: 32});
draw.image(img, 20, 20);
ctx.drawImage.should.have.been.calledThrice;
ctx.drawImage.should.have.been.calledWith(img, 0, 0, 32, 32, 0, 0, 32, 32);
ctx.drawImage.should.have.been.calledWith(img, 10, 10, 32, 32, 0, 0, 32, 32);
ctx.drawImage.should.have.been.calledWith(img, 20, 20);
});
});
describe('pixelData(pd, x, y)', () => {
it('should be an alias for pd.draw(x, y)', () => {
const pd = new draw.PixelData(32, 32);
const fn = stub(pd, 'draw');
draw.pixelData(pd, 32, 32);
fn.should.have.been.calledWith(32, 32);
fn.restore();
});
});
describe('clear()', () => {
it('should clear the entire canvas', () => {
draw.clear();
ctx.clearRect.should.have.been.calledWith(0, 0, canvas.width(), canvas.height());
});
});
describe('setColor(color)', () => {
it('should set both stroke and fill colors', () => {
draw.setColor('#ff0000');
context.fillStyle.should.equal('#ff0000');
context.strokeStyle.should.equal('#ff0000');
draw.setColor('#0000ff');
context.fillStyle.should.equal('#0000ff');
context.strokeStyle.should.equal('#0000ff');
});
it('should accept numbers and convert them to strings', () => {
draw.setColor(0xff0000);
context.fillStyle.should.equal('#ff0000');
context.strokeStyle.should.equal('#ff0000');
draw.setColor(0x0000ff);
context.fillStyle.should.equal('#0000ff');
context.strokeStyle.should.equal('#0000ff');
});
});
describe('setAlpha(alpha)', () => {
it('should set the global alpha', () => {
draw.setAlpha(0.5);
context.globalAlpha.should.equal(0.5);
draw.setAlpha(1);
context.globalAlpha.should.equal(1);
});
it('should constrain alpha to range [0, 1]', () => {
draw.setAlpha(-2);
context.globalAlpha.should.equal(0);
draw.setAlpha(3);
context.globalAlpha.should.equal(1);
});
});
describe('setComposite(composite)', () => {
it('should set the global composite operation', () => {
draw.setComposite('source-atop');
context.globalCompositeOperation.should.equal('source-atop');
draw.setComposite('source-over');
context.globalCompositeOperation.should.equal('source-over');
});
});
describe('setLine({cap, join, width, miter, reset = false})', () => {
it('should set the appropriate line properties', () => {
draw.setLine({cap: 'round'});
context.lineCap.should.equal('round');
draw.setLine({width: 15});
context.lineCap.should.equal('round');
context.lineWidth.should.equal(15);
draw.setLine({join: 'bevel'});
context.lineCap.should.equal('round');
context.lineWidth.should.equal(15);
context.lineJoin.should.equal('bevel');
draw.setLine({miter: 15});
context.lineCap.should.equal('round');
context.lineWidth.should.equal(15);
context.lineJoin.should.equal('bevel');
context.miterLimit.should.equal(15);
draw.setLine({cap: 'butt', width: 1, join: 'miter', miter: 10});
context.lineCap.should.equal('butt');
context.lineWidth.should.equal(1);
context.lineJoin.should.equal('miter');
context.miterLimit.should.equal(10);
});
it('should ignore other values if reset is true', () => {
draw.setLine({reset: true, cap: 'round', width: 10});
context.lineCap.should.equal('butt');
context.lineWidth.should.equal(1);
context.lineJoin.should.equal('miter');
context.miterLimit.should.equal(10);
});
});
describe('setShadow({x, y, blur, color, reset = false})', () => {
it('should set the appropriate shadow properties', () => {
draw.setShadow({x: 5});
context.shadowOffsetX.should.equal(5);
draw.setShadow({y: 10});
context.shadowOffsetX.should.equal(5);
context.shadowOffsetY.should.equal(10);
draw.setShadow({blur: 15});
context.shadowOffsetX.should.equal(5);
context.shadowOffsetY.should.equal(10);
context.shadowBlur.should.equal(15);
draw.setShadow({color: 0xff0000});
context.shadowOffsetX.should.equal(5);
context.shadowOffsetY.should.equal(10);
context.shadowBlur.should.equal(15);
context.shadowColor.should.equal('#ff0000');
draw.setShadow({color: '#0000ff'});
context.shadowOffsetX.should.equal(5);
context.shadowOffsetY.should.equal(10);
context.shadowBlur.should.equal(15);
context.shadowColor.should.equal('#0000ff');
draw.setShadow({x: 0, y: 0, blur: 0, color: '#000000'});
context.shadowOffsetX.should.equal(0);
context.shadowOffsetY.should.equal(0);
context.shadowBlur.should.equal(0);
context.shadowColor.should.equal('#000000');
});
it('should ignore other values if reset is passed', () => {
draw.setShadow({x: 5, y: 5, blur: 3, color: '#00FF00', reset: true});
context.shadowOffsetX.should.equal(0);
context.shadowOffsetY.should.equal(0);
context.shadowBlur.should.equal(0);
context.shadowColor.should.equal('#000000');
});
});
describe('setFont({family, size, align, baseline, reset = false})', () => {
it('should set the appropriate font properties', () => {
draw.setFont({size: 15});
context.font.should.equal('15px sans-serif');
draw.setFont({family: 'serif'});
context.font.should.equal('15px serif');
draw.setFont({align: 'center'});
context.font.should.equal('15px serif');
context.textAlign.should.equal('center');
draw.setFont({baseline: 'top'});
context.font.should.equal('15px serif');
context.textAlign.should.equal('center');
context.textBaseline.should.equal('top');
draw.setFont({family: 'sans-serif', size: 10, align: 'start', baseline: 'alphabetic'});
context.font.should.equal('10px sans-serif');
context.textAlign.should.equal('start');
context.textBaseline.should.equal('alphabetic');
});
it('should ignore other values if reset is passed', () => {
draw.setFont({family: 'serif', size: 15, align: 'top', baseline: 'middle', reset: true});
context.font.should.equal('10px sans-serif');
context.textAlign.should.equal('start');
context.textBaseline.should.equal('alphabetic');
});
});
describe('transformed({scale: {x: 1, y: 1}, rotate, translate: {x: 0, y: 0}, transform: [1, 0, 0, 1, 0, 0]}, ...todo)', () => {
it('should context.save() at the beginning, context.restore() at the end, and other functions inbetween', () => {
const cb = spy();
draw.transformed({scale: {x: 2, y: 2}, rotate: 50, translate: {x: 15, y: 30}, transform: [1, 0, 0, 1, 0, 0]}, cb);
ctx.save.should.have.been.calledBefore(ctx.scale);
ctx.scale.should.have.been.calledBefore(ctx.rotate);
ctx.rotate.should.have.been.calledBefore(ctx.translate);
ctx.translate.should.have.been.calledBefore(ctx.transform);
ctx.transform.should.have.been.calledBefore(cb);
cb.should.have.been.calledOnce;
ctx.restore.should.have.been.calledAfter(cb);
});
it('should call all functions passed in order', () => {
const cbs = [spy(), spy(), spy()];
draw.transformed({}, ...cbs);
cbs[0].should.have.been.calledOnce;
cbs[0].should.have.been.calledBefore(cbs[1]);
cbs[1].should.have.been.calledOnce;
cbs[1].should.have.been.calledBefore(cbs[2]);
cbs[2].should.have.been.calledOnce;
});
});
describe('Path', () => {
it('should be constructed with new Path()', () => {
new draw.Path().should.be.an.instanceof(draw.Path);
(() => draw.Path()).should.throw(TypeError);
ctx.beginPath.should.not.have.been.called;
});
describe('#length', () => {
it('should return the number of actions in the stack', () => {<|fim▁hole|> new draw.Path().move().line().length.should.equal(2);
});
it('should not include the initial beginPath call', () => {
new draw.Path().length.should.equal(0);
});
});
describe('#move(x, y)', () => {
it('should add context.moveTo(x, y) to the stack', () => {
const p = new draw.Path().move(32, 32);
p.length.should.equal(1);
p.stroke();
ctx.moveTo.should.have.been.calledWith(32, 32);
});
it('should not call context.moveTo(x, y)', () => {
new draw.Path().move(32, 32);
ctx.moveTo.should.not.have.been.called;
});
it('should be chainable', () => {
(() => new draw.Path().move(32, 32).move(16, 16)).should.not.throw();
});
});
describe('#line(x, y)', () => {
it('should add context.lineTo(x, y) to the stack', () => {
const p = new draw.Path().line(32, 32);
p.length.should.equal(1);
p.stroke();
ctx.lineTo.should.have.been.calledWith(32, 32);
});
it('should not call context.lineTo(x, y)', () => {
new draw.Path().line(32, 32);
ctx.lineTo.should.not.have.been.called;
});
it('should be chainable', () => {
(() => new draw.Path().line(32, 32).line(16, 16)).should.not.throw();
});
});
describe('#rect(x, y, w, h)', () => {
it('should add context.rect(x, y, w, h) to the stack', () => {
const p = new draw.Path().rect(16, 16, 32, 32);
p.length.should.equal(1);
p.stroke();
ctx.rect.should.have.been.calledWith(16, 16, 32, 32);
});
it('should not call context.rect(x, y)', () => {
new draw.Path().rect(32, 32, 16, 16);
ctx.rect.should.not.have.been.called;
});
it('should be chainable', () => {
(() => new draw.Path().rect(16, 16, 32, 32).rect(32, 32, 16, 16)).should.not.throw();
});
});
describe('#arc(x, y, r, start, end[, ccw])', () => {
it('should add context.arc(x, y, r, start, end[, ccw]) to the stack', () => {
const p = new draw.Path().arc(32, 32, 32, 0, Math.PI, false);
p.length.should.equal(1);
p.stroke();
ctx.arc.should.have.been.calledWith(32, 32, 32, 0, Math.PI, false);
});
it('should not call context.arc(x, y, r, start, end[, ccw])', () => {
new draw.Path().arc(32, 32, 32, 0, Math.PI, false);
ctx.arc.should.not.have.been.called;
});
it('should be chainable', () => {
(() => new draw.Path().arc(32, 32, 32, 0, Math.PI, false).arc(32, 32, 32, 0, Math.PI, false)).should.not.throw();
});
});
describe('#curve(x1, y1, x2, y2, r)', () => {
it('should add context.arcTo(x1, y1, x2, y2, r) to the stack', () => {
const p = new draw.Path().curve(32, 32, 32, 64, 32);
p.length.should.equal(1);
p.stroke();
ctx.arcTo.should.have.been.calledWith(32, 32, 32, 64, 32);
});
it('should not call context.arcTo(x1, y1, x2, y2, r)', () => {
new draw.Path().curve(32, 32, 32, 64, 32);
ctx.arcTo.should.not.have.been.called;
});
it('should be chainable', () => {
(() => new draw.Path().curve(32, 32, 32, 64, 32).curve(32, 96, 64, 96, 32)).should.not.throw();
});
});
describe('#bezier(x1, y1, x2, y2[, x3, y3])', () => {
it('should add context.quadraticCurveTo(x1, y1, x2, y2) to the stack when called with 4 arguments', () => {
const p = new draw.Path().bezier(32, 32, 32, 64);
p.length.should.equal(1);
p.stroke();
ctx.quadraticCurveTo.should.have.been.calledWith(32, 32, 32, 64);
});
it('should add context.bezierCurveTo(x1, y1, x2, y2, x3, y3) to the stack when called with 6 arguments', () => {
const p = new draw.Path().bezier(32, 32, 32, 64, 64, 64);
p.length.should.equal(1);
p.stroke();
ctx.bezierCurveTo.should.have.been.calledWith(32, 32, 32, 64, 64, 64);
});
it('should not call context.quadraticCurveTo(x1, y1, x2, y2) or context.bezierCurveTo(x1, y1, x2, y2, x3, y3)', () => {
new draw.Path().bezier(32, 32, 32, 64).bezier(32, 32, 32, 64, 64, 64);
ctx.quadraticCurveTo.should.not.have.been.called;
ctx.bezierCurveTo.should.not.have.been.called;
});
it('should be chainable', () => {
(() => new draw.Path().bezier(32, 32, 32, 64).bezier(32, 32, 32, 64, 64, 64).move(32, 32)).should.not.throw();
});
});
describe('#close()', () => {
it('should add context.closePath() to the stack', () => {
const p = new draw.Path().close();
p.length.should.equal(1);
p.stroke();
ctx.closePath.should.have.been.calledOnce;
});
it('should not call context.close', () => {
new draw.Path().close();
ctx.closePath.should.not.have.been.called;
});
it('should be chainable', () => {
(() => new draw.Path().close().close()).should.not.throw();
});
});
describe('#do(fn)', () => {
it('should add a given fn to the stack', () => {
const cb = spy();
const p = new draw.Path().do(cb);
p.length.should.equal(1);
p.stroke();
cb.should.have.been.calledOnce;
});
it('should not call fn', () => {
const cb = spy();
const p = new draw.Path().do(cb);
cb.should.not.have.been.called;
});
it('should be chainable', () => {
(() => new draw.Path().do(() => {}).do(() => {})).should.not.throw();
});
});
describe('#fill({color, shadow, transform})', () => {
it('should call context.save() at the beginning, context.restore() at the end, and context.fill() in the middle', () => {
new draw.Path().move(32, 32).line(64, 64).fill();
ctx.save.should.have.been.calledBefore(ctx.fill);
ctx.fill.should.have.been.calledBefore(ctx.restore);
ctx.restore.should.have.been.called;
});
it('should set the color, shadow, and transform if they are specified', () => {
new draw.Path().move(32, 32).do(() => {
context.fillStyle.should.equal('#ff0000');
context.shadowBlur.should.equal(5);
ctx.translate.should.have.been.calledWith(5, 0);
}).line(64, 64).fill({
color: 0xff0000,
shadow: {
blur: 5
},
transform: {
translate: {
x: 5
}
}
});
});
it('should call the entire stack in order', () => {
new draw.Path().move(32, 32).line(64, 64).arc(32, 32, 0, 0, 3).rect(32, 32, 32, 32).fill();
ctx.beginPath.should.have.been.calledBefore(ctx.moveTo);
ctx.moveTo.should.have.been.calledBefore(ctx.lineTo);
ctx.lineTo.should.have.been.calledBefore(ctx.arc);
ctx.arc.should.have.been.calledBefore(ctx.rect);
ctx.rect.should.have.been.calledBefore(ctx.fill);
});
it('should be chainable', () => {
(() => new draw.Path().fill().fill({transform: {}, shadow: {}, color: 0x000000})).should.not.throw();
});
});
describe('#stroke({color, line, transform})', () => {
it('should call context.save() at the beginning, context.restore() at the end, and context.stroke() in the middle', () => {
new draw.Path().move(32, 32).line(64, 64).stroke();
ctx.save.should.have.been.calledBefore(ctx.stroke);
ctx.stroke.should.have.been.calledBefore(ctx.restore);
ctx.restore.should.have.been.called;
});
it('should set the color, line, and transform if they are specified', () => {
new draw.Path().move(32, 32).do(() => {
context.strokeStyle.should.equal('#ff0000');
context.lineWidth.should.equal(5);
ctx.translate.should.have.been.calledWith(5, 0);
}).line(64, 64).stroke({
color: 0xff0000,
line: {
width: 5
},
transform: {
translate: {
x: 5
}
}
});
});
it('should call the entire stack in order', () => {
new draw.Path().move(32, 32).line(64, 64).arc(32, 32, 0, 0, 3).rect(32, 32, 32, 32).stroke();
ctx.beginPath.should.have.been.calledBefore(ctx.moveTo);
ctx.moveTo.should.have.been.calledBefore(ctx.lineTo);
ctx.lineTo.should.have.been.calledBefore(ctx.arc);
ctx.arc.should.have.been.calledBefore(ctx.rect);
ctx.rect.should.have.been.calledBefore(ctx.stroke);
});
it('should be chainable', () => {
(() => new draw.Path().stroke().stroke({transform: {}, line: '', color: 0x000000}).stroke()).should.not.throw();
});
});
describe('#doInside([{transform},] ...todo)', () => {
it('should call context.save() at the beginning, context.restore() at the end, and context.clip() in the middle', () => {
new draw.Path().rect(32, 32, 64, 64).doInside(() => {});
ctx.save.should.have.been.calledBefore(ctx.clip);
ctx.clip.should.have.been.calledBefore(ctx.restore);
ctx.restore.should.have.been.called;
});
it('should set the transform if given', () => {
new draw.Path().rect(32, 32, 64, 64).doInside({
translate: {
x: 5
}
}, () => {
ctx.translate.should.have.been.called;
});
});
it('should call the entire stack in order', () => {
new draw.Path().move(32, 32).line(64, 64).arc(32, 32, 0, 0, 3).rect(32, 32, 32, 32).doInside();
ctx.beginPath.should.have.been.calledBefore(ctx.moveTo);
ctx.moveTo.should.have.been.calledBefore(ctx.lineTo);
ctx.lineTo.should.have.been.calledBefore(ctx.arc);
ctx.arc.should.have.been.calledBefore(ctx.rect);
ctx.rect.should.have.been.calledBefore(ctx.clip);
});
it('should call all items in todo in order, after clipping', () => {
const cbs = [spy(), spy(), spy()];
new draw.Path().rect(32, 32, 64, 64).doInside(...cbs);
ctx.clip.should.have.been.calledBefore(cbs[0]);
cbs[0].should.have.been.calledOnce;
cbs[0].should.have.been.calledBefore(cbs[1]);
cbs[1].should.have.been.calledOnce;
cbs[1].should.have.been.calledBefore(cbs[2]);
cbs[2].should.have.been.calledOnce;
});
it('should be chainable', () => {
(() => new draw.Path().doInside({}, () => {}).doInside(() => {}).doInside()).should.not.throw();
});
});
describe('#copy()', () => {
it('should make an identical Path', () => {
const p = new draw.Path().move(32, 32).line(64, 64);
const c = p.copy();
c.should.be.an.instanceof(draw.Path);
c.length.should.deep.equal(p.length);
});
it('should not modify the original when the copy is changed used', () => {
const p = new draw.Path().move(32, 32).line(64, 64);
const c = p.copy().arc(64, 64, 32, 0, Math.PI * 2);
p.length.should.not.equal(c.length);
});
});
describe('#contains([offx, offy,] x, y)', () => {
it('should be true if the point is within the path', () => {
new draw.Path().move(0, 32).line(32, 32).contains(16, 32).should.be.true;
new draw.Path().move(0, 32).line(32, 32).line(32, 64).contains(30, 34).should.be.true;
});
it('should be true if the point is not within the path', () => {
new draw.Path().move(0, 32).line(32, 32).contains(48, 32).should.be.false;
new draw.Path().move(0, 32).line(32, 32).contains(16, 16).should.be.false;
});
it('should allow offsets to be specified', () => {
new draw.Path().move(0, 32).line(32, 32).line(32, 64).contains(130, 134).should.be.false;
new draw.Path().move(0, 32).line(32, 32).line(32, 64).contains(100, 100, 130, 134).should.be.true;
});
});
});
describe('PixelData', () => {
const [width, height] = [32, 32];
let pd;
before(() => pd = new draw.PixelData(width, height));
it('should be constructed with new PixelData([x, y,] w, h)', () => {
new draw.PixelData(32, 32).should.be.an.instanceof(draw.PixelData);
new draw.PixelData(16, 16, 16, 16).should.be.an.instanceof(draw.PixelData);
ctx.createImageData.should.have.been.calledWith(32, 32);
ctx.getImageData.should.have.been.calledWith(16, 16, 16, 16);
(() => draw.PixelData(32, 32)).should.throw(TypeError);
});
describe('#width', () => {
it('should return the width of the PixelData', () => {
pd.width.should.equal(32);
});
it('should be read only', () => {
(() => pd.width = 16).should.throw(TypeError);
});
});
describe('#height', () => {
it('should return the height of the PixelData', () => {
pd.height.should.equal(32);
});
it('should be read only', () => {
(() => pd.height = 16).should.throw(TypeError);
});
});
describe('#data[x][y]', () => {
it('should return a pixel from the ImageData', () => {
pd.data[16][16].should.deep.equal([0, 0, 0, 0]);
});
});
describe('#data[x][y]=', () => {
it('should be settable to change the ImageData', () => {
pd.data[16][16] = [255, 0, 0, 255];
pd.data[16][16].should.deep.equal([255, 0, 0, 255]);
pd.data[16][16] = [0, 0, 0, 0];
pd.data[16][16].should.deep.equal([0, 0, 0, 0]);
});
it('should not work with only one index', () => {
(() => pd.data[16] = [255, 0, 0, 255]).should.throw(TypeError);
});
});
describe('#draw(x, y)', () => {
it('should draw the PixelData', () => {
pd.draw(32, 32);
ctx.putImageData.should.have.been.calledOnce;
});
});
});
});<|fim▁end|> | |
<|file_name|>ast_to_hir.rs<|end_file_name|><|fim▁begin|>use std::ops::Range;
use std::sync::Arc;
use super::ast::{self, Assignment, Ast, AstKind, Identifier, Int, Struct, Symbol, Text};
use super::cst::{self, CstDb};
use super::cst_to_ast::CstToAst;
use super::error::CompilerError;
use super::hir::{self, Body, Expression, Lambda};
use crate::builtin_functions;
use crate::input::Input;
use im::HashMap;
#[salsa::query_group(AstToHirStorage)]
pub trait AstToHir: CstDb + CstToAst {
fn hir_to_ast_id(&self, input: Input, id: hir::Id) -> Option<ast::Id>;
fn hir_to_cst_id(&self, input: Input, id: hir::Id) -> Option<cst::Id>;
fn hir_id_to_span(&self, input: Input, id: hir::Id) -> Option<Range<usize>>;
fn hir_id_to_display_span(&self, input: Input, id: hir::Id) -> Option<Range<usize>>;
fn ast_to_hir_id(&self, input: Input, id: ast::Id) -> Option<hir::Id>;
fn cst_to_hir_id(&self, input: Input, id: cst::Id) -> Option<hir::Id>;
fn hir(&self, input: Input) -> Option<(Arc<Body>, HashMap<hir::Id, ast::Id>)>;
fn hir_raw(
&self,
input: Input,
) -> Option<(Arc<Body>, HashMap<hir::Id, ast::Id>, Vec<CompilerError>)>;
}
fn hir_to_ast_id(db: &dyn AstToHir, input: Input, id: hir::Id) -> Option<ast::Id> {
let (_, hir_to_ast_id_mapping) = db.hir(input).unwrap();
hir_to_ast_id_mapping.get(&id).cloned()
}
fn hir_to_cst_id(db: &dyn AstToHir, input: Input, id: hir::Id) -> Option<cst::Id> {
let id = db.hir_to_ast_id(input.clone(), id)?;
db.ast_to_cst_id(input, id)
}
fn hir_id_to_span(db: &dyn AstToHir, input: Input, id: hir::Id) -> Option<Range<usize>> {
let id = db.hir_to_ast_id(input.clone(), id)?;
db.ast_id_to_span(input, id)
}
fn hir_id_to_display_span(db: &dyn AstToHir, input: Input, id: hir::Id) -> Option<Range<usize>> {
let id = db.hir_to_cst_id(input.clone(), id)?;
Some(db.find_cst(input, id).display_span())
}
fn ast_to_hir_id(db: &dyn AstToHir, input: Input, id: ast::Id) -> Option<hir::Id> {
let (_, hir_to_ast_id_mapping) = db.hir(input).unwrap();
hir_to_ast_id_mapping
.iter()
.find_map(|(key, &value)| if value == id { Some(key) } else { None })
.cloned()
}
fn cst_to_hir_id(db: &dyn AstToHir, input: Input, id: cst::Id) -> Option<hir::Id> {
let id = db.cst_to_ast_id(input.clone(), id)?;
db.ast_to_hir_id(input, id)
}
fn hir(db: &dyn AstToHir, input: Input) -> Option<(Arc<Body>, HashMap<hir::Id, ast::Id>)> {
db.hir_raw(input)
.map(|(hir, id_mapping, _)| (hir, id_mapping))
}
fn hir_raw(
db: &dyn AstToHir,
input: Input,
) -> Option<(Arc<Body>, HashMap<hir::Id, ast::Id>, Vec<CompilerError>)> {
let (ast, _) = db.ast(input.clone())?;
let mut context = Context {
db,
input: input.clone(),
};
let mut compiler = Compiler::new(&mut context);
compiler.compile(&ast);
Some((
Arc::new(compiler.body),
compiler.output.id_mapping,
compiler.output.errors,
))
}
struct Context<'c> {
db: &'c dyn AstToHir,
input: Input,
}
#[derive(Clone)]
struct Output {
id_mapping: HashMap<hir::Id, ast::Id>,
errors: Vec<CompilerError>,
}
struct Compiler<'c> {
context: &'c Context<'c>,
output: Output,
body: Body,
parent_ids: Vec<usize>,
next_id: usize,
identifiers: HashMap<String, hir::Id>,
}
impl<'c> Compiler<'c> {
fn new(context: &'c Context<'c>) -> Self {
let builtin_identifiers = builtin_functions::VALUES
.iter()
.enumerate()
.map(|(index, builtin_function)| {
let string = format!("builtin{:?}", builtin_function);
(string, hir::Id(vec![index]))
})
.collect::<HashMap<_, _>>();
Compiler {
context,
output: Output {
id_mapping: HashMap::new(),
errors: vec![],
},
parent_ids: vec![],
next_id: builtin_identifiers.len(),
body: Body::new(),
identifiers: builtin_identifiers,
}
}
fn compile(&mut self, asts: &[Ast]) {
if asts.is_empty() {
self.body.out = Some(self.push_without_ast_mapping(Expression::nothing()));
} else {
for ast in asts.into_iter() {
self.body.out = Some(self.compile_single(ast));
}
}
}
fn compile_single(&mut self, ast: &Ast) -> hir::Id {
match &ast.kind {
AstKind::Int(Int(int)) => self.push(ast.id, Expression::Int(int.to_owned()), None),
AstKind::Text(Text(string)) => {
self.push(ast.id, Expression::Text(string.value.to_owned()), None)
}
AstKind::Identifier(Identifier(symbol)) => {
let reference = match self.identifiers.get(&symbol.value) {
Some(reference) => reference.to_owned(),
None => {
self.output.errors.push(CompilerError {
message: format!("Unknown reference: {}", symbol.value),
span: self
.context
.db
.ast_id_to_span(self.context.input.clone(), symbol.id)
.unwrap(),
});
return self.push(symbol.id, Expression::Error, None);
}
};
self.push(ast.id, Expression::Reference(reference.to_owned()), None)
}
AstKind::Symbol(Symbol(symbol)) => {
self.push(ast.id, Expression::Symbol(symbol.value.to_owned()), None)
}
AstKind::Struct(Struct { entries }) => {
let entries = entries
.iter()
.map(|(key, value)| (self.compile_single(key), self.compile_single(value)))
.collect();
self.push(ast.id, Expression::Struct(entries), None)
}
AstKind::Lambda(ast::Lambda {
parameters,
body: body_asts,
}) => {
let mut body = Body::new();
let lambda_id = add_ids(&self.parent_ids, self.next_id);
let mut identifiers = self.identifiers.clone();
for (parameter_index, parameter) in parameters.iter().enumerate() {
let id = hir::Id(add_ids(&lambda_id, parameter_index));
self.output.id_mapping.insert(id.clone(), parameter.id);
body.identifiers
.insert(id.to_owned(), parameter.value.to_owned());
identifiers.insert(parameter.value.to_owned(), id);
}
let mut inner = Compiler::<'c> {
context: &mut self.context,
output: self.output.clone(),
body,
parent_ids: lambda_id.to_owned(),
next_id: parameters.len(),
identifiers,
};
inner.compile(&body_asts);
self.output = inner.output;
self.push(
ast.id,
Expression::Lambda(Lambda {
first_id: hir::Id(add_ids(&lambda_id[..], 0)),
parameters: parameters.iter().map(|it| it.value.to_owned()).collect(),
body: inner.body,
}),
None,<|fim▁hole|> )
}
AstKind::Call(ast::Call { name, arguments }) => {
let arguments = arguments
.iter()
.map(|argument| self.compile_single(argument))
.collect();
let function = match self.identifiers.get(&name.value) {
Some(function) => function.to_owned(),
None => {
self.output.errors.push(CompilerError {
message: format!("Unknown function: {}", name.value),
span: self
.context
.db
.ast_id_to_span(self.context.input.clone(), name.id)
.unwrap(),
});
return self.push(name.id, Expression::Error, None);
}
};
self.push(
ast.id,
Expression::Call {
function,
arguments,
},
None,
)
}
AstKind::Assignment(Assignment { name, body }) => {
let mut inner = Compiler::<'c> {
context: &mut self.context,
output: self.output.clone(),
body: Body::new(),
parent_ids: add_ids(&self.parent_ids, self.next_id),
next_id: 0,
identifiers: self.identifiers.clone(),
};
inner.compile(&body);
self.output = inner.output;
self.push(
ast.id,
Expression::Body(inner.body),
Some(name.value.to_owned()),
)
}
AstKind::Error => self.push(ast.id, Expression::Error, None),
}
}
fn push(
&mut self,
ast_id: ast::Id,
expression: Expression,
identifier: Option<String>,
) -> hir::Id {
let id = self.create_next_id(ast_id);
self.body.push(id.clone(), expression, identifier.clone());
self.output.id_mapping.insert(id.clone(), ast_id);
if let Some(identifier) = identifier {
self.identifiers.insert(identifier, id.clone());
}
id
}
fn push_without_ast_mapping(&mut self, expression: Expression) -> hir::Id {
let id = self.create_next_id_without_ast_mapping();
self.body.push(id.to_owned(), expression, None);
id
}
fn create_next_id(&mut self, ast_id: ast::Id) -> hir::Id {
let id = self.create_next_id_without_ast_mapping();
assert!(matches!(
self.output.id_mapping.insert(id.to_owned(), ast_id),
None
));
id
}
fn create_next_id_without_ast_mapping(&mut self) -> hir::Id {
let id = hir::Id(add_ids(&self.parent_ids, self.next_id));
self.next_id += 1;
id
}
}
fn add_ids(parents: &[usize], id: usize) -> Vec<usize> {
parents.iter().map(|it| *it).chain(vec![id]).collect()
}<|fim▁end|> | |
<|file_name|>connection.py<|end_file_name|><|fim▁begin|>#
# ovirt-engine-setup -- ovirt engine setup
# Copyright (C) 2013-2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#<|fim▁hole|># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Connection plugin."""
import gettext
from otopi import constants as otopicons
from otopi import plugin, util
from ovirt_engine import configfile
from ovirt_engine_setup import constants as osetupcons
from ovirt_engine_setup.engine import constants as oenginecons
from ovirt_engine_setup.engine_common import constants as oengcommcons
from ovirt_engine_setup.engine_common import database
def _(m):
return gettext.dgettext(message=m, domain='ovirt-engine-setup')
@util.export
class Plugin(plugin.PluginBase):
"""Connection plugin."""
def __init__(self, context):
super(Plugin, self).__init__(context=context)
@plugin.event(
stage=plugin.Stages.STAGE_BOOT,
)
def _boot(self):
self.environment[
otopicons.CoreEnv.LOG_FILTER_KEYS
].append(
oenginecons.EngineDBEnv.PASSWORD
)
@plugin.event(
stage=plugin.Stages.STAGE_INIT,
)
def _init(self):
self.environment.setdefault(
oenginecons.EngineDBEnv.HOST,
None
)
self.environment.setdefault(
oenginecons.EngineDBEnv.PORT,
None
)
self.environment.setdefault(
oenginecons.EngineDBEnv.SECURED,
None
)
self.environment.setdefault(
oenginecons.EngineDBEnv.SECURED_HOST_VALIDATION,
None
)
self.environment.setdefault(
oenginecons.EngineDBEnv.USER,
None
)
self.environment.setdefault(
oenginecons.EngineDBEnv.PASSWORD,
None
)
self.environment.setdefault(
oenginecons.EngineDBEnv.DATABASE,
None
)
self.environment.setdefault(
oenginecons.EngineDBEnv.DUMPER,
oenginecons.Defaults.DEFAULT_DB_DUMPER
)
self.environment.setdefault(
oenginecons.EngineDBEnv.FILTER,
oenginecons.Defaults.DEFAULT_DB_FILTER
)
self.environment.setdefault(
oenginecons.EngineDBEnv.RESTORE_JOBS,
oenginecons.Defaults.DEFAULT_DB_RESTORE_JOBS
)
self.environment[oenginecons.EngineDBEnv.CONNECTION] = None
self.environment[oenginecons.EngineDBEnv.STATEMENT] = None
self.environment[oenginecons.EngineDBEnv.NEW_DATABASE] = True
@plugin.event(
stage=plugin.Stages.STAGE_SETUP,
name=oengcommcons.Stages.DB_CONNECTION_SETUP,
)
def _setup(self):
dbovirtutils = database.OvirtUtils(
plugin=self,
dbenvkeys=oenginecons.Const.ENGINE_DB_ENV_KEYS,
)
dbovirtutils.detectCommands()
config = configfile.ConfigFile([
oenginecons.FileLocations.OVIRT_ENGINE_SERVICE_CONFIG_DEFAULTS,
oenginecons.FileLocations.OVIRT_ENGINE_SERVICE_CONFIG
])
if config.get('ENGINE_DB_PASSWORD'):
try:
dbenv = {}
for e, k in (
(oenginecons.EngineDBEnv.HOST, 'ENGINE_DB_HOST'),
(oenginecons.EngineDBEnv.PORT, 'ENGINE_DB_PORT'),
(oenginecons.EngineDBEnv.USER, 'ENGINE_DB_USER'),
(oenginecons.EngineDBEnv.PASSWORD, 'ENGINE_DB_PASSWORD'),
(oenginecons.EngineDBEnv.DATABASE, 'ENGINE_DB_DATABASE'),
):
dbenv[e] = config.get(k)
for e, k in (
(oenginecons.EngineDBEnv.SECURED, 'ENGINE_DB_SECURED'),
(
oenginecons.EngineDBEnv.SECURED_HOST_VALIDATION,
'ENGINE_DB_SECURED_VALIDATION'
)
):
dbenv[e] = config.getboolean(k)
dbovirtutils.tryDatabaseConnect(dbenv)
self.environment.update(dbenv)
self.environment[
oenginecons.EngineDBEnv.NEW_DATABASE
] = dbovirtutils.isNewDatabase()
except RuntimeError as e:
self.logger.debug(
'Existing credential use failed',
exc_info=True,
)
msg = _(
'Cannot connect to Engine database using existing '
'credentials: {user}@{host}:{port}'
).format(
host=dbenv[oenginecons.EngineDBEnv.HOST],
port=dbenv[oenginecons.EngineDBEnv.PORT],
database=dbenv[oenginecons.EngineDBEnv.DATABASE],
user=dbenv[oenginecons.EngineDBEnv.USER],
)
if self.environment[
osetupcons.CoreEnv.ACTION
] == osetupcons.Const.ACTION_REMOVE:
self.logger.warning(msg)
else:
raise RuntimeError(msg)
# vim: expandtab tabstop=4 shiftwidth=4<|fim▁end|> | # http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, |
<|file_name|>modules.rs<|end_file_name|><|fim▁begin|>use parking_lot::RwLock;
use std::sync::Arc;
use crate::language::ty::SourceType;
use crate::size::InstanceSize;
use crate::utils::GrowableVec;
use crate::vm::{
namespace_path, replace_type_param, Candidate, FctDefinitionId, Field, FieldDef, FileId,
NamespaceId, TraitDefinitionId, VM,
};
use crate::vtable::VTableBox;
use dora_parser::ast;
use dora_parser::interner::Name;
use dora_parser::lexer::position::Position;
use std::collections::HashSet;
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct ModuleId(usize);
impl ModuleId {
pub fn max() -> ModuleId {
ModuleId(usize::max_value())
}
}
impl From<ModuleId> for usize {
fn from(data: ModuleId) -> usize {
data.0
}
}
impl From<usize> for ModuleId {
fn from(data: usize) -> ModuleId {
ModuleId(data)
}
}
impl GrowableVec<RwLock<Module>> {
pub fn idx(&self, index: ModuleId) -> Arc<RwLock<Module>> {
self.idx_usize(index.0)
}
}
pub static DISPLAY_SIZE: usize = 6;
#[derive(Debug)]
pub struct Module {
pub id: ModuleId,
pub file_id: FileId,
pub ast: Arc<ast::Module>,
pub namespace_id: NamespaceId,
pub pos: Position,
pub name: Name,
pub ty: SourceType,
pub parent_class: Option<SourceType>,
pub internal: bool,
pub internal_resolved: bool,
pub has_constructor: bool,
pub is_pub: bool,
pub constructor: Option<FctDefinitionId>,
pub fields: Vec<Field>,
pub methods: Vec<FctDefinitionId>,
pub virtual_fcts: Vec<FctDefinitionId>,
pub traits: Vec<TraitDefinitionId>,
}
impl Module {
pub fn name(&self, vm: &VM) -> String {
namespace_path(vm, self.namespace_id, self.name)
}
}
pub fn find_methods_in_module(vm: &VM, object_type: SourceType, name: Name) -> Vec<Candidate> {
let mut ignores = HashSet::new();
let mut module_type = object_type;
loop {
let module_id = module_type.module_id().expect("no module");
let module = vm.modules.idx(module_id);
let module = module.read();
for &method in &module.methods {
let method = vm.fcts.idx(method);
let method = method.read();
if method.name == name {
if let Some(overrides) = method.overrides {
ignores.insert(overrides);
}
if !ignores.contains(&method.id) {
return vec![Candidate {
object_type: module_type.clone(),
container_type_params: module_type.type_params(),
fct_id: method.id,
}];
}
}
}
if let Some(parent_class) = module.parent_class.clone() {
let type_list = module_type.type_params();
module_type = replace_type_param(vm, parent_class, &type_list, None);
} else {
break;
}
}
Vec::new()
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct ModuleDefId(usize);
impl ModuleDefId {
pub fn to_usize(self) -> usize {
self.0
}
}
impl From<usize> for ModuleDefId {
fn from(data: usize) -> ModuleDefId {
ModuleDefId(data)
}
}
impl GrowableVec<RwLock<ModuleInstance>> {
pub fn idx(&self, index: ModuleDefId) -> Arc<RwLock<ModuleInstance>> {
self.idx_usize(index.0)
}
}
#[derive(Debug)]<|fim▁hole|> pub fields: Vec<FieldDef>,
pub size: InstanceSize,
pub ref_fields: Vec<i32>,
pub vtable: Option<VTableBox>,
}
impl ModuleInstance {
pub fn name(&self, vm: &VM) -> String {
if let Some(module_id) = self.mod_id {
let module = vm.modules.idx(module_id);
let module = module.read();
let name = vm.interner.str(module.name);
format!("{}", name)
} else {
"<Unknown>".into()
}
}
}<|fim▁end|> | pub struct ModuleInstance {
pub id: ModuleDefId,
pub mod_id: Option<ModuleId>,
pub parent_id: Option<ModuleDefId>, |
<|file_name|>issue-9951.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// pretty-expanded FIXME #23616
#![allow(unused_variables)]
trait Bar {
fn noop(&self);
}
impl Bar for u8 {
fn noop(&self) {}
}
fn main() {
let (a, b) = (&5u8 as &Bar, &9u8 as &Bar);
let (c, d): (&Bar, &Bar) = (a, b);
let (a, b) = (Box::new(5u8) as Box<Bar>, Box::new(9u8) as Box<Bar>);
let (c, d): (&Bar, &Bar) = (&*a, &*b);<|fim▁hole|><|fim▁end|> |
let (c, d): (&Bar, &Bar) = (&5, &9);
} |
<|file_name|>scons.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import gyp
import gyp.common
import gyp.SCons as SCons
import os.path
import pprint
import re
# TODO: remove when we delete the last WriteList() call in this module
WriteList = SCons.WriteList
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': '${LIBPREFIX}',
'SHARED_LIB_PREFIX': '${SHLIBPREFIX}',
'STATIC_LIB_SUFFIX': '${LIBSUFFIX}',
'SHARED_LIB_SUFFIX': '${SHLIBSUFFIX}',
'INTERMEDIATE_DIR': '${INTERMEDIATE_DIR}',
'SHARED_INTERMEDIATE_DIR': '${SHARED_INTERMEDIATE_DIR}',
'OS': 'linux',
'PRODUCT_DIR': '$TOP_BUILDDIR',
'SHARED_LIB_DIR': '$LIB_DIR',
'LIB_DIR': '$LIB_DIR',
'RULE_INPUT_ROOT': '${SOURCE.filebase}',
'RULE_INPUT_DIRNAME': '${SOURCE.dir}',
'RULE_INPUT_EXT': '${SOURCE.suffix}',
'RULE_INPUT_NAME': '${SOURCE.file}',
'RULE_INPUT_PATH': '${SOURCE.abspath}',
'CONFIGURATION_NAME': '${CONFIG_NAME}',
}
# Tell GYP how to process the input for us.
generator_handles_variants = True
generator_wants_absolute_build_file_paths = True
def FixPath(path, prefix):
if not os.path.isabs(path) and not path[0] == '$':
path = prefix + path
return path
header = """\
# This file is generated; do not edit.
"""
_alias_template = """
if GetOption('verbose'):
_action = Action([%(action)s])
else:
_action = Action([%(action)s], %(message)s)
_outputs = env.Alias(
['_%(target_name)s_action'],
%(inputs)s,
_action
)
env.AlwaysBuild(_outputs)
"""
_run_as_template = """
if GetOption('verbose'):
_action = Action([%(action)s])
else:
_action = Action([%(action)s], %(message)s)
"""
_run_as_template_suffix = """
_run_as_target = env.Alias('run_%(target_name)s', target_files, _action)
env.Requires(_run_as_target, [
Alias('%(target_name)s'),
])
env.AlwaysBuild(_run_as_target)
"""
_command_template = """
if GetOption('verbose'):
_action = Action([%(action)s])
else:
_action = Action([%(action)s], %(message)s)
_outputs = env.Command(
%(outputs)s,
%(inputs)s,
_action
)
"""
# This is copied from the default SCons action, updated to handle symlinks.
_copy_action_template = """
import shutil
import SCons.Action
def _copy_files_or_dirs_or_symlinks(dest, src):
SCons.Node.FS.invalidate_node_memos(dest)
if SCons.Util.is_List(src) and os.path.isdir(dest):
for file in src:
shutil.copy2(file, dest)
return 0
elif os.path.islink(src):
linkto = os.readlink(src)
os.symlink(linkto, dest)
return 0
elif os.path.isfile(src):
return shutil.copy2(src, dest)
else:
return shutil.copytree(src, dest, 1)
def _copy_files_or_dirs_or_symlinks_str(dest, src):
return 'Copying %s to %s ...' % (src, dest)
GYPCopy = SCons.Action.ActionFactory(_copy_files_or_dirs_or_symlinks,
_copy_files_or_dirs_or_symlinks_str,
convert=str)
"""
_rule_template = """
%(name)s_additional_inputs = %(inputs)s
%(name)s_outputs = %(outputs)s
def %(name)s_emitter(target, source, env):
return (%(name)s_outputs, source + %(name)s_additional_inputs)
if GetOption('verbose'):
%(name)s_action = Action([%(action)s])
else:
%(name)s_action = Action([%(action)s], %(message)s)
env['BUILDERS']['%(name)s'] = Builder(action=%(name)s_action,
emitter=%(name)s_emitter)
_outputs = []
_processed_input_files = []
for infile in input_files:
if (type(infile) == type('')
and not os.path.isabs(infile)
and not infile[0] == '$'):
infile = %(src_dir)r + infile
if str(infile).endswith('.%(extension)s'):
_generated = env.%(name)s(infile)
env.Precious(_generated)
_outputs.append(_generated)
%(process_outputs_as_sources_line)s
else:
_processed_input_files.append(infile)
prerequisites.extend(_outputs)
input_files = _processed_input_files
"""
_spawn_hack = """
import re
import SCons.Platform.posix
needs_shell = re.compile('["\\'><!^&]')
def gyp_spawn(sh, escape, cmd, args, env):
def strip_scons_quotes(arg):
if arg[0] == '"' and arg[-1] == '"':
return arg[1:-1]
return arg
stripped_args = [strip_scons_quotes(a) for a in args]
if needs_shell.search(' '.join(stripped_args)):
return SCons.Platform.posix.exec_spawnvpe([sh, '-c', ' '.join(args)], env)
else:
return SCons.Platform.posix.exec_spawnvpe(stripped_args, env)
"""
def EscapeShellArgument(s):
"""Quotes an argument so that it will be interpreted literally by a POSIX
shell. Taken from
http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
"""
return "'" + s.replace("'", "'\\''") + "'"
def InvertNaiveSConsQuoting(s):
"""SCons tries to "help" with quoting by naively putting double-quotes around
command-line arguments containing space or tab, which is broken for all
but trivial cases, so we undo it. (See quote_spaces() in Subst.py)"""
if ' ' in s or '\t' in s:
# Then SCons will put double-quotes around this, so add our own quotes
# to close its quotes at the beginning and end.
s = '"' + s + '"'
return s
def EscapeSConsVariableExpansion(s):
"""SCons has its own variable expansion syntax using $. We must escape it for
strings to be interpreted literally. For some reason this requires four
dollar signs, not two, even without the shell involved."""
return s.replace('$', '$$$$')
def EscapeCppDefine(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = EscapeShellArgument(s)
s = InvertNaiveSConsQuoting(s)
s = EscapeSConsVariableExpansion(s)
return s
def GenerateConfig(fp, config, indent='', src_dir=''):
"""
Generates SCons dictionary items for a gyp configuration.
This provides the main translation between the (lower-case) gyp settings
keywords and the (upper-case) SCons construction variables.
"""
var_mapping = {
'ASFLAGS' : 'asflags',
'CCFLAGS' : 'cflags',
'CFLAGS' : 'cflags_c',
'CXXFLAGS' : 'cflags_cc',
'CPPDEFINES' : 'defines',
'CPPPATH' : 'include_dirs',
# Add the ldflags value to $LINKFLAGS, but not $SHLINKFLAGS.
# SCons defines $SHLINKFLAGS to incorporate $LINKFLAGS, so
# listing both here would case 'ldflags' to get appended to
# both, and then have it show up twice on the command line.
'LINKFLAGS' : 'ldflags',
}
postamble='\n%s],\n' % indent
for scons_var in sorted(var_mapping.keys()):
gyp_var = var_mapping[scons_var]
value = config.get(gyp_var)
if value:
if gyp_var in ('defines',):
value = [EscapeCppDefine(v) for v in value]
if gyp_var in ('include_dirs',):
if src_dir and not src_dir.endswith('/'):
src_dir += '/'
result = []
for v in value:
v = FixPath(v, src_dir)
# Force SCons to evaluate the CPPPATH directories at
# SConscript-read time, so delayed evaluation of $SRC_DIR
# doesn't point it to the --generator-output= directory.
result.append('env.Dir(%r)' % v)
value = result
else:
value = map(repr, value)
WriteList(fp,
value,
prefix=indent,
preamble='%s%s = [\n ' % (indent, scons_var),
postamble=postamble)
def GenerateSConscript(output_filename, spec, build_file, build_file_data):
"""
Generates a SConscript file for a specific target.
This generates a SConscript file suitable for building any or all of
the target's configurations.
A SConscript file may be called multiple times to generate targets for
multiple configurations. Consequently, it needs to be ready to build
the target for any requested configuration, and therefore contains
information about the settings for all configurations (generated into
the SConscript file at gyp configuration time) as well as logic for
selecting (at SCons build time) the specific configuration being built.
The general outline of a generated SConscript file is:
-- Header
-- Import 'env'. This contains a $CONFIG_NAME construction
variable that specifies what configuration to build
(e.g. Debug, Release).
-- Configurations. This is a dictionary with settings for
the different configurations (Debug, Release) under which this
target can be built. The values in the dictionary are themselves
dictionaries specifying what construction variables should added
to the local copy of the imported construction environment
(Append), should be removed (FilterOut), and should outright
replace the imported values (Replace).
-- Clone the imported construction environment and update
with the proper configuration settings.
-- Initialize the lists of the targets' input files and prerequisites.
-- Target-specific actions and rules. These come after the
input file and prerequisite initializations because the
outputs of the actions and rules may affect the input file
list (process_outputs_as_sources) and get added to the list of
prerequisites (so that they're guaranteed to be executed before
building the target).
-- Call the Builder for the target itself.
-- Arrange for any copies to be made into installation directories.
-- Set up the {name} Alias (phony Node) for the target as the
primary handle for building all of the target's pieces.
-- Use env.Require() to make sure the prerequisites (explicitly
specified, but also including the actions and rules) are built
before the target itself.
-- Return the {name} Alias to the calling SConstruct file
so it can be added to the list of default targets.
"""
scons_target = SCons.Target(spec)
gyp_dir = os.path.dirname(output_filename)
if not gyp_dir:
gyp_dir = '.'
gyp_dir = os.path.abspath(gyp_dir)
output_dir = os.path.dirname(output_filename)
src_dir = build_file_data['_DEPTH']
src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
subdir = gyp.common.RelativePath(os.path.dirname(build_file), src_dir)
src_subdir = '$SRC_DIR/' + subdir
src_subdir_ = src_subdir + '/'
component_name = os.path.splitext(os.path.basename(build_file))[0]
target_name = spec['target_name']
if not os.path.exists(gyp_dir):
os.makedirs(gyp_dir)
fp = open(output_filename, 'w')
fp.write(header)
<|fim▁hole|>
#
fp.write('\n')
fp.write('env = env.Clone(COMPONENT_NAME=%s,\n' % repr(component_name))
fp.write(' TARGET_NAME=%s)\n' % repr(target_name))
#
for config in spec['configurations'].itervalues():
if config.get('scons_line_length'):
fp.write(_spawn_hack)
break
#
indent = ' ' * 12
fp.write('\n')
fp.write('configurations = {\n')
for config_name, config in spec['configurations'].iteritems():
fp.write(' \'%s\' : {\n' % config_name)
fp.write(' \'Append\' : dict(\n')
GenerateConfig(fp, config, indent, src_subdir)
libraries = spec.get('libraries')
if libraries:
WriteList(fp,
map(repr, libraries),
prefix=indent,
preamble='%sLIBS = [\n ' % indent,
postamble='\n%s],\n' % indent)
fp.write(' ),\n')
fp.write(' \'FilterOut\' : dict(\n' )
for key, var in config.get('scons_remove', {}).iteritems():
fp.write(' %s = %s,\n' % (key, repr(var)))
fp.write(' ),\n')
fp.write(' \'Replace\' : dict(\n' )
scons_settings = config.get('scons_variable_settings', {})
for key in sorted(scons_settings.keys()):
val = pprint.pformat(scons_settings[key])
fp.write(' %s = %s,\n' % (key, val))
if 'c++' in spec.get('link_languages', []):
fp.write(' %s = %s,\n' % ('LINK', repr('$CXX')))
if config.get('scons_line_length'):
fp.write(' SPAWN = gyp_spawn,\n')
fp.write(' ),\n')
fp.write(' \'ImportExternal\' : [\n' )
for var in config.get('scons_import_variables', []):
fp.write(' %s,\n' % repr(var))
fp.write(' ],\n')
fp.write(' \'PropagateExternal\' : [\n' )
for var in config.get('scons_propagate_variables', []):
fp.write(' %s,\n' % repr(var))
fp.write(' ],\n')
fp.write(' },\n')
fp.write('}\n')
fp.write('\n'
'config = configurations[env[\'CONFIG_NAME\']]\n'
'env.Append(**config[\'Append\'])\n'
'env.FilterOut(**config[\'FilterOut\'])\n'
'env.Replace(**config[\'Replace\'])\n')
fp.write('\n'
'# Scons forces -fPIC for SHCCFLAGS on some platforms.\n'
'# Disable that so we can control it from cflags in gyp.\n'
'# Note that Scons itself is inconsistent with its -fPIC\n'
'# setting. SHCCFLAGS forces -fPIC, and SHCFLAGS does not.\n'
'# This will make SHCCFLAGS consistent with SHCFLAGS.\n'
'env[\'SHCCFLAGS\'] = [\'$CCFLAGS\']\n')
fp.write('\n'
'for _var in config[\'ImportExternal\']:\n'
' if _var in ARGUMENTS:\n'
' env[_var] = ARGUMENTS[_var]\n'
' elif _var in os.environ:\n'
' env[_var] = os.environ[_var]\n'
'for _var in config[\'PropagateExternal\']:\n'
' if _var in ARGUMENTS:\n'
' env[_var] = ARGUMENTS[_var]\n'
' elif _var in os.environ:\n'
' env[\'ENV\'][_var] = os.environ[_var]\n')
fp.write('\n'
"env['ENV']['LD_LIBRARY_PATH'] = env.subst('$LIB_DIR')\n")
#
#fp.write("\nif env.has_key('CPPPATH'):\n")
#fp.write(" env['CPPPATH'] = map(env.Dir, env['CPPPATH'])\n")
variants = spec.get('variants', {})
for setting in sorted(variants.keys()):
if_fmt = 'if ARGUMENTS.get(%s) not in (None, \'0\'):\n'
fp.write('\n')
fp.write(if_fmt % repr(setting.upper()))
fp.write(' env.AppendUnique(\n')
GenerateConfig(fp, variants[setting], indent, src_subdir)
fp.write(' )\n')
#
scons_target.write_input_files(fp)
fp.write('\n')
fp.write('target_files = []\n')
prerequisites = spec.get('scons_prerequisites', [])
fp.write('prerequisites = %s\n' % pprint.pformat(prerequisites))
actions = spec.get('actions', [])
for action in actions:
a = ['cd', src_subdir, '&&'] + action['action']
message = action.get('message')
if message:
message = repr(message)
inputs = [FixPath(f, src_subdir_) for f in action.get('inputs', [])]
outputs = [FixPath(f, src_subdir_) for f in action.get('outputs', [])]
if outputs:
template = _command_template
else:
template = _alias_template
fp.write(template % {
'inputs' : pprint.pformat(inputs),
'outputs' : pprint.pformat(outputs),
'action' : pprint.pformat(a),
'message' : message,
'target_name': target_name,
})
if int(action.get('process_outputs_as_sources', 0)):
fp.write('input_files.extend(_outputs)\n')
fp.write('prerequisites.extend(_outputs)\n')
fp.write('target_files.extend(_outputs)\n')
rules = spec.get('rules', [])
for rule in rules:
name = re.sub('[^a-zA-Z0-9_]', '_', rule['rule_name'])
message = rule.get('message')
if message:
message = repr(message)
if int(rule.get('process_outputs_as_sources', 0)):
poas_line = '_processed_input_files.extend(_generated)'
else:
poas_line = '_processed_input_files.append(infile)'
inputs = [FixPath(f, src_subdir_) for f in rule.get('inputs', [])]
outputs = [FixPath(f, src_subdir_) for f in rule.get('outputs', [])]
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
a = ['cd', src_subdir, '&&'] + rule['action']
fp.write(_rule_template % {
'inputs' : pprint.pformat(inputs),
'outputs' : pprint.pformat(outputs),
'action' : pprint.pformat(a),
'extension' : rule['extension'],
'name' : name,
'message' : message,
'process_outputs_as_sources_line' : poas_line,
'src_dir' : src_subdir_,
})
scons_target.write_target(fp, src_subdir)
copies = spec.get('copies', [])
if copies:
fp.write(_copy_action_template)
for copy in copies:
destdir = None
files = None
try:
destdir = copy['destination']
except KeyError, e:
gyp.common.ExceptionAppend(
e,
"Required 'destination' key missing for 'copies' in %s." % build_file)
raise
try:
files = copy['files']
except KeyError, e:
gyp.common.ExceptionAppend(
e, "Required 'files' key missing for 'copies' in %s." % build_file)
raise
if not files:
# TODO: should probably add a (suppressible) warning;
# a null file list may be unintentional.
continue
if not destdir:
raise Exception(
"Required 'destination' key is empty for 'copies' in %s." % build_file)
fmt = ('\n'
'_outputs = env.Command(%s,\n'
' %s,\n'
' GYPCopy(\'$TARGET\', \'$SOURCE\'))\n')
for f in copy['files']:
# Remove trailing separators so basename() acts like Unix basename and
# always returns the last element, whether a file or dir. Without this,
# only the contents, not the directory itself, are copied (and nothing
# might be copied if dest already exists, since scons thinks nothing needs
# to be done).
dest = os.path.join(destdir, os.path.basename(f.rstrip(os.sep)))
f = FixPath(f, src_subdir_)
dest = FixPath(dest, src_subdir_)
fp.write(fmt % (repr(dest), repr(f)))
fp.write('target_files.extend(_outputs)\n')
run_as = spec.get('run_as')
if run_as:
action = run_as.get('action', [])
working_directory = run_as.get('working_directory')
if not working_directory:
working_directory = gyp_dir
else:
if not os.path.isabs(working_directory):
working_directory = os.path.normpath(os.path.join(gyp_dir,
working_directory))
if run_as.get('environment'):
for (key, val) in run_as.get('environment').iteritems():
action = ['%s="%s"' % (key, val)] + action
action = ['cd', '"%s"' % working_directory, '&&'] + action
fp.write(_run_as_template % {
'action' : pprint.pformat(action),
'message' : run_as.get('message', ''),
})
fmt = "\ngyp_target = env.Alias('%s', target_files)\n"
fp.write(fmt % target_name)
dependencies = spec.get('scons_dependencies', [])
if dependencies:
WriteList(fp, dependencies, preamble='dependencies = [\n ',
postamble='\n]\n')
fp.write('env.Requires(target_files, dependencies)\n')
fp.write('env.Requires(gyp_target, dependencies)\n')
fp.write('for prerequisite in prerequisites:\n')
fp.write(' env.Requires(prerequisite, dependencies)\n')
fp.write('env.Requires(gyp_target, prerequisites)\n')
if run_as:
fp.write(_run_as_template_suffix % {
'target_name': target_name,
})
fp.write('Return("gyp_target")\n')
fp.close()
#############################################################################
# TEMPLATE BEGIN
_wrapper_template = """\
__doc__ = '''
Wrapper configuration for building this entire "solution,"
including all the specific targets in various *.scons files.
'''
import os
import sys
import SCons.Environment
import SCons.Util
def GetProcessorCount():
'''
Detects the number of CPUs on the system. Adapted form:
http://codeliberates.blogspot.com/2008/05/detecting-cpuscores-in-python.html
'''
# Linux, Unix and Mac OS X:
if hasattr(os, 'sysconf'):
if os.sysconf_names.has_key('SC_NPROCESSORS_ONLN'):
# Linux and Unix or Mac OS X with python >= 2.5:
return os.sysconf('SC_NPROCESSORS_ONLN')
else: # Mac OS X with Python < 2.5:
return int(os.popen2("sysctl -n hw.ncpu")[1].read())
# Windows:
if os.environ.has_key('NUMBER_OF_PROCESSORS'):
return max(int(os.environ.get('NUMBER_OF_PROCESSORS', '1')), 1)
return 1 # Default
# Support PROGRESS= to show progress in different ways.
p = ARGUMENTS.get('PROGRESS')
if p == 'spinner':
Progress(['/\\r', '|\\r', '\\\\\\r', '-\\r'],
interval=5,
file=open('/dev/tty', 'w'))
elif p == 'name':
Progress('$TARGET\\r', overwrite=True, file=open('/dev/tty', 'w'))
# Set the default -j value based on the number of processors.
SetOption('num_jobs', GetProcessorCount() + 1)
# Have SCons use its cached dependency information.
SetOption('implicit_cache', 1)
# Only re-calculate MD5 checksums if a timestamp has changed.
Decider('MD5-timestamp')
# Since we set the -j value by default, suppress SCons warnings about being
# unable to support parallel build on versions of Python with no threading.
default_warnings = ['no-no-parallel-support']
SetOption('warn', default_warnings + GetOption('warn'))
AddOption('--mode', nargs=1, dest='conf_list', default=[],
action='append', help='Configuration to build.')
AddOption('--verbose', dest='verbose', default=False,
action='store_true', help='Verbose command-line output.')
#
sconscript_file_map = %(sconscript_files)s
class LoadTarget:
'''
Class for deciding if a given target sconscript is to be included
based on a list of included target names, optionally prefixed with '-'
to exclude a target name.
'''
def __init__(self, load):
'''
Initialize a class with a list of names for possible loading.
Arguments:
load: list of elements in the LOAD= specification
'''
self.included = set([c for c in load if not c.startswith('-')])
self.excluded = set([c[1:] for c in load if c.startswith('-')])
if not self.included:
self.included = set(['all'])
def __call__(self, target):
'''
Returns True if the specified target's sconscript file should be
loaded, based on the initialized included and excluded lists.
'''
return (target in self.included or
('all' in self.included and not target in self.excluded))
if 'LOAD' in ARGUMENTS:
load = ARGUMENTS['LOAD'].split(',')
else:
load = []
load_target = LoadTarget(load)
sconscript_files = []
for target, sconscript in sconscript_file_map.iteritems():
if load_target(target):
sconscript_files.append(sconscript)
target_alias_list= []
conf_list = GetOption('conf_list')
if conf_list:
# In case the same --mode= value was specified multiple times.
conf_list = list(set(conf_list))
else:
conf_list = [%(default_configuration)r]
sconsbuild_dir = Dir(%(sconsbuild_dir)s)
def FilterOut(self, **kw):
kw = SCons.Environment.copy_non_reserved_keywords(kw)
for key, val in kw.items():
envval = self.get(key, None)
if envval is None:
# No existing variable in the environment, so nothing to delete.
continue
for vremove in val:
# Use while not if, so we can handle duplicates.
while vremove in envval:
envval.remove(vremove)
self[key] = envval
# TODO(sgk): SCons.Environment.Append() has much more logic to deal
# with various types of values. We should handle all those cases in here
# too. (If variable is a dict, etc.)
non_compilable_suffixes = {
'LINUX' : set([
'.bdic',
'.css',
'.dat',
'.fragment',
'.gperf',
'.h',
'.hh',
'.hpp',
'.html',
'.hxx',
'.idl',
'.in',
'.in0',
'.in1',
'.js',
'.mk',
'.rc',
'.sigs',
'',
]),
'WINDOWS' : set([
'.h',
'.hh',
'.hpp',
'.dat',
'.idl',
'.in',
'.in0',
'.in1',
]),
}
def compilable(env, file):
base, ext = os.path.splitext(str(file))
if ext in non_compilable_suffixes[env['TARGET_PLATFORM']]:
return False
return True
def compilable_files(env, sources):
return [x for x in sources if compilable(env, x)]
def GypProgram(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.Program(target, source, *args, **kw)
if env.get('INCREMENTAL'):
env.Precious(result)
return result
def GypTestProgram(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.Program(target, source, *args, **kw)
if env.get('INCREMENTAL'):
env.Precious(*result)
return result
def GypLibrary(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.Library(target, source, *args, **kw)
return result
def GypLoadableModule(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.LoadableModule(target, source, *args, **kw)
return result
def GypStaticLibrary(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.StaticLibrary(target, source, *args, **kw)
return result
def GypSharedLibrary(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.SharedLibrary(target, source, *args, **kw)
if env.get('INCREMENTAL'):
env.Precious(result)
return result
def add_gyp_methods(env):
env.AddMethod(GypProgram)
env.AddMethod(GypTestProgram)
env.AddMethod(GypLibrary)
env.AddMethod(GypLoadableModule)
env.AddMethod(GypStaticLibrary)
env.AddMethod(GypSharedLibrary)
env.AddMethod(FilterOut)
env.AddMethod(compilable)
base_env = Environment(
tools = %(scons_tools)s,
INTERMEDIATE_DIR='$OBJ_DIR/${COMPONENT_NAME}/_${TARGET_NAME}_intermediate',
LIB_DIR='$TOP_BUILDDIR/lib',
OBJ_DIR='$TOP_BUILDDIR/obj',
SCONSBUILD_DIR=sconsbuild_dir.abspath,
SHARED_INTERMEDIATE_DIR='$OBJ_DIR/_global_intermediate',
SRC_DIR=Dir(%(src_dir)r),
TARGET_PLATFORM='LINUX',
TOP_BUILDDIR='$SCONSBUILD_DIR/$CONFIG_NAME',
LIBPATH=['$LIB_DIR'],
)
if not GetOption('verbose'):
base_env.SetDefault(
ARCOMSTR='Creating library $TARGET',
ASCOMSTR='Assembling $TARGET',
CCCOMSTR='Compiling $TARGET',
CONCATSOURCECOMSTR='ConcatSource $TARGET',
CXXCOMSTR='Compiling $TARGET',
LDMODULECOMSTR='Building loadable module $TARGET',
LINKCOMSTR='Linking $TARGET',
MANIFESTCOMSTR='Updating manifest for $TARGET',
MIDLCOMSTR='Compiling IDL $TARGET',
PCHCOMSTR='Precompiling $TARGET',
RANLIBCOMSTR='Indexing $TARGET',
RCCOMSTR='Compiling resource $TARGET',
SHCCCOMSTR='Compiling $TARGET',
SHCXXCOMSTR='Compiling $TARGET',
SHLINKCOMSTR='Linking $TARGET',
SHMANIFESTCOMSTR='Updating manifest for $TARGET',
)
add_gyp_methods(base_env)
for conf in conf_list:
env = base_env.Clone(CONFIG_NAME=conf)
SConsignFile(env.File('$TOP_BUILDDIR/.sconsign').abspath)
for sconscript in sconscript_files:
target_alias = env.SConscript(sconscript, exports=['env'])
if target_alias:
target_alias_list.extend(target_alias)
Default(Alias('all', target_alias_list))
help_fmt = '''
Usage: hammer [SCONS_OPTIONS] [VARIABLES] [TARGET] ...
Local command-line build options:
--mode=CONFIG Configuration to build:
--mode=Debug [default]
--mode=Release
--verbose Print actual executed command lines.
Supported command-line build variables:
LOAD=[module,...] Comma-separated list of components to load in the
dependency graph ('-' prefix excludes)
PROGRESS=type Display a progress indicator:
name: print each evaluated target name
spinner: print a spinner every 5 targets
The following TARGET names can also be used as LOAD= module names:
%%s
'''
if GetOption('help'):
def columnar_text(items, width=78, indent=2, sep=2):
result = []
colwidth = max(map(len, items)) + sep
cols = (width - indent) / colwidth
if cols < 1:
cols = 1
rows = (len(items) + cols - 1) / cols
indent = '%%*s' %% (indent, '')
sep = indent
for row in xrange(0, rows):
result.append(sep)
for i in xrange(row, len(items), rows):
result.append('%%-*s' %% (colwidth, items[i]))
sep = '\\n' + indent
result.append('\\n')
return ''.join(result)
load_list = set(sconscript_file_map.keys())
target_aliases = set(map(str, target_alias_list))
common = load_list and target_aliases
load_only = load_list - common
target_only = target_aliases - common
help_text = [help_fmt %% columnar_text(sorted(list(common)))]
if target_only:
fmt = "The following are additional TARGET names:\\n\\n%%s\\n"
help_text.append(fmt %% columnar_text(sorted(list(target_only))))
if load_only:
fmt = "The following are additional LOAD= module names:\\n\\n%%s\\n"
help_text.append(fmt %% columnar_text(sorted(list(load_only))))
Help(''.join(help_text))
"""
# TEMPLATE END
#############################################################################
def GenerateSConscriptWrapper(build_file, build_file_data, name,
output_filename, sconscript_files,
default_configuration):
"""
Generates the "wrapper" SConscript file (analogous to the Visual Studio
solution) that calls all the individual target SConscript files.
"""
output_dir = os.path.dirname(output_filename)
src_dir = build_file_data['_DEPTH']
src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
if not src_dir_rel:
src_dir_rel = '.'
scons_settings = build_file_data.get('scons_settings', {})
sconsbuild_dir = scons_settings.get('sconsbuild_dir', '#')
scons_tools = scons_settings.get('tools', ['default'])
sconscript_file_lines = ['dict(']
for target in sorted(sconscript_files.keys()):
sconscript = sconscript_files[target]
sconscript_file_lines.append(' %s = %r,' % (target, sconscript))
sconscript_file_lines.append(')')
fp = open(output_filename, 'w')
fp.write(header)
fp.write(_wrapper_template % {
'default_configuration' : default_configuration,
'name' : name,
'scons_tools' : repr(scons_tools),
'sconsbuild_dir' : repr(sconsbuild_dir),
'sconscript_files' : '\n'.join(sconscript_file_lines),
'src_dir' : src_dir_rel,
})
fp.close()
# Generate the SConstruct file that invokes the wrapper SConscript.
dir, fname = os.path.split(output_filename)
SConstruct = os.path.join(dir, 'SConstruct')
fp = open(SConstruct, 'w')
fp.write(header)
fp.write('SConscript(%s)\n' % repr(fname))
fp.close()
def TargetFilename(target, build_file=None, output_suffix=''):
"""Returns the .scons file name for the specified target.
"""
if build_file is None:
build_file, target = gyp.common.ParseQualifiedTarget(target)[:2]
output_file = os.path.join(os.path.dirname(build_file),
target + output_suffix + '.scons')
return output_file
def GenerateOutput(target_list, target_dicts, data, params):
"""
Generates all the output files for the specified targets.
"""
options = params['options']
if options.generator_output:
def output_path(filename):
return filename.replace(params['cwd'], options.generator_output)
else:
def output_path(filename):
return filename
default_configuration = None
for qualified_target in target_list:
spec = target_dicts[qualified_target]
if spec['toolset'] != 'target':
raise Exception(
'Multiple toolsets not supported in scons build (target %s)' %
qualified_target)
scons_target = SCons.Target(spec)
if scons_target.is_ignored:
continue
# TODO: assumes the default_configuration of the first target
# non-Default target is the correct default for all targets.
# Need a better model for handle variation between targets.
if (not default_configuration and
spec['default_configuration'] != 'Default'):
default_configuration = spec['default_configuration']
build_file, target = gyp.common.ParseQualifiedTarget(qualified_target)[:2]
output_file = TargetFilename(target, build_file, options.suffix)
if options.generator_output:
output_file = output_path(output_file)
if not spec.has_key('libraries'):
spec['libraries'] = []
# Add dependent static library targets to the 'libraries' value.
deps = spec.get('dependencies', [])
spec['scons_dependencies'] = []
for d in deps:
td = target_dicts[d]
target_name = td['target_name']
spec['scons_dependencies'].append("Alias('%s')" % target_name)
if td['type'] in ('static_library', 'shared_library'):
libname = td.get('product_name', target_name)
spec['libraries'].append('lib' + libname)
if td['type'] == 'loadable_module':
prereqs = spec.get('scons_prerequisites', [])
# TODO: parameterize with <(SHARED_LIBRARY_*) variables?
td_target = SCons.Target(td)
td_target.target_prefix = '${SHLIBPREFIX}'
td_target.target_suffix = '${SHLIBSUFFIX}'
GenerateSConscript(output_file, spec, build_file, data[build_file])
if not default_configuration:
default_configuration = 'Default'
for build_file in sorted(data.keys()):
path, ext = os.path.splitext(build_file)
if ext != '.gyp':
continue
output_dir, basename = os.path.split(path)
output_filename = path + '_main' + options.suffix + '.scons'
all_targets = gyp.common.AllTargets(target_list, target_dicts, build_file)
sconscript_files = {}
for t in all_targets:
scons_target = SCons.Target(target_dicts[t])
if scons_target.is_ignored:
continue
bf, target = gyp.common.ParseQualifiedTarget(t)[:2]
target_filename = TargetFilename(target, bf, options.suffix)
tpath = gyp.common.RelativePath(target_filename, output_dir)
sconscript_files[target] = tpath
output_filename = output_path(output_filename)
if sconscript_files:
GenerateSConscriptWrapper(build_file, data[build_file], basename,
output_filename, sconscript_files,
default_configuration)<|fim▁end|> | fp.write('\nimport os\n')
fp.write('\nImport("env")\n') |
<|file_name|>A24alt.py<|end_file_name|><|fim▁begin|>#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2021 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""
| Database (Hobza) of interaction energies for bimolecular complexes.
| Geometries from <Reference>.
| Reference interaction energies from Rezac and Hobza, JCTC (in press).
- **cp** ``'off'`` <erase this comment and after unless on is a valid option> || ``'on'``
- **rlxd** ``'off'`` <erase this comment and after unless on is valid option> || ``'on'``
- **benchmark**
- ``'<benchmark_name>'`` <Reference>.
- |dl| ``'<default_benchmark_name>'`` |dr| <Reference>.
- **subset**
- ``'small'`` <members_description>
- ``'large'`` <members_description>
- ``'<subset>'`` <members_description>
"""
import re
import qcdb
# <<< A24 Database Module >>>
dbse = 'A24'
# <<< Database Members >>>
HRXN = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
HRXN_SM = []
HRXN_LG = []
# <<< Chemical Systems Involved >>>
RXNM = {} # reaction matrix of reagent contributions per reaction
ACTV = {} # order of active reagents per reaction
ACTV_CP = {} # order of active reagents per counterpoise-corrected reaction
ACTV_SA = {} # order of active reagents for non-supermolecular calculations
for rxn in HRXN:
RXNM[ '%s-%s' % (dbse, rxn)] = {'%s-%s-dimer' % (dbse, rxn) : +1,
'%s-%s-monoA-CP' % (dbse, rxn) : -1,
'%s-%s-monoB-CP' % (dbse, rxn) : -1,
'%s-%s-monoA-unCP' % (dbse, rxn) : -1,
'%s-%s-monoB-unCP' % (dbse, rxn) : -1 }
ACTV_SA['%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn) ]
ACTV_CP['%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-CP' % (dbse, rxn),
'%s-%s-monoB-CP' % (dbse, rxn) ]
ACTV[ '%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-unCP' % (dbse, rxn),
'%s-%s-monoB-unCP' % (dbse, rxn) ]
ACTV['%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn) ]
# <<< Reference Values [kcal/mol] from Rezac and Hobza dx.doi.org/10.1021/ct400057w >>>
BIND = {}
BIND['%s-%s' % (dbse, 1 )] = -6.524
BIND['%s-%s' % (dbse, 2 )] = -5.014
BIND['%s-%s' % (dbse, 3 )] = -4.749
BIND['%s-%s' % (dbse, 4 )] = -4.572
BIND['%s-%s' % (dbse, 5 )] = -3.157
BIND['%s-%s' % (dbse, 6 )] = -1.679
BIND['%s-%s' % (dbse, 7 )] = -0.779
BIND['%s-%s' % (dbse, 8 )] = -0.672
BIND['%s-%s' % (dbse, 9 )] = -4.474
BIND['%s-%s' % (dbse, 10 )] = -2.578
BIND['%s-%s' % (dbse, 11 )] = -1.629
BIND['%s-%s' % (dbse, 12 )] = -1.537
BIND['%s-%s' % (dbse, 13 )] = -1.389
BIND['%s-%s' % (dbse, 14 )] = -1.110
BIND['%s-%s' % (dbse, 15 )] = -0.514
BIND['%s-%s' % (dbse, 16 )] = -1.518
BIND['%s-%s' % (dbse, 17 )] = -0.837
BIND['%s-%s' % (dbse, 18 )] = -0.615
BIND['%s-%s' % (dbse, 19 )] = -0.538
BIND['%s-%s' % (dbse, 20 )] = -0.408
BIND['%s-%s' % (dbse, 21 )] = -0.370
BIND['%s-%s' % (dbse, 22 )] = 0.784
BIND['%s-%s' % (dbse, 23 )] = 0.897
BIND['%s-%s' % (dbse, 24 )] = 1.075
# <<< Comment Lines >>>
TAGL = {}
TAGL['%s-%s' % (dbse, 1)] = """ water_ammonia_Cs """
TAGL['%s-%s-dimer' % (dbse, 1)] = """Dimer from water_ammonia_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 1)] = """Monomer A water_ammonia_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 1)] = """Monomer B water_ammonia_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 1)] = """Monomer A water_ammonia_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 1)] = """Monomer B water_ammonia_Cs """
TAGL['%s-%s' % (dbse, 2)] = """ water_water_Cs """
TAGL['%s-%s-dimer' % (dbse, 2)] = """Dimer from water_water_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 2)] = """Monomer A from water_water_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 2)] = """Monomer B from water_water_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 2)] = """Monomer A from water_water_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 2)] = """Monomer B from water_water_Cs """
TAGL['%s-%s' % (dbse, 3)] = """ HCN_HCN_Cxv """
TAGL['%s-%s-dimer' % (dbse, 3)] = """Dimer from HCN_HCN_Cxv """
TAGL['%s-%s-monoA-CP' % (dbse, 3)] = """Monomer A from HCN_HCN_Cxv """
TAGL['%s-%s-monoB-CP' % (dbse, 3)] = """Monomer B from HCN_HCN_Cxv """
TAGL['%s-%s-monoA-unCP' % (dbse, 3)] = """Monomer A from HCN_HCN_Cxv """
TAGL['%s-%s-monoB-unCP' % (dbse, 3)] = """Monomer B from HCN_HCN_Cxv """
TAGL['%s-%s' % (dbse, 4)] = """ HF_HF_Cs """
TAGL['%s-%s-dimer' % (dbse, 4)] = """Dimer from HF_HF_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 4)] = """Monomer A from HF_HF_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 4)] = """Monomer B from HF_HF_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 4)] = """Monomer A from HF_HF_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 4)] = """Monomer B from HF_HF_Cs """
TAGL['%s-%s' % (dbse, 5)] = """ ammonia_ammonia_C2h """
TAGL['%s-%s-dimer' % (dbse, 5)] = """Dimer from ammonia_ammonia_C2h """
TAGL['%s-%s-monoA-CP' % (dbse, 5)] = """Monomer A from ammonia_ammonia_C2h """
TAGL['%s-%s-monoB-CP' % (dbse, 5)] = """Monomer B from ammonia_ammonia_C2h """
TAGL['%s-%s-monoA-unCP' % (dbse, 5)] = """Monomer A from ammonia_ammonia_C2h """
TAGL['%s-%s-monoB-unCP' % (dbse, 5)] = """Monomer B from ammonia_ammonia_C2h """
TAGL['%s-%s' % (dbse, 6)] = """ methane_HF_C3v """
TAGL['%s-%s-dimer' % (dbse, 6)] = """Dimer from methane_HF_C3v """
TAGL['%s-%s-monoA-CP' % (dbse, 6)] = """Monomer A from methane_HF_C3v """
TAGL['%s-%s-monoB-CP' % (dbse, 6)] = """Monomer B from methane_HF_C3v """
TAGL['%s-%s-monoA-unCP' % (dbse, 6)] = """Monomer A from methane_HF_C3v """
TAGL['%s-%s-monoB-unCP' % (dbse, 6)] = """Monomer B from methane_HF_C3v """
TAGL['%s-%s' % (dbse, 7)] = """ ammmonia_methane_C3v """
TAGL['%s-%s-dimer' % (dbse, 7)] = """Dimer from ammmonia_methane_C3v """
TAGL['%s-%s-monoA-CP' % (dbse, 7)] = """Monomer A from ammmonia_methane_C3v """
TAGL['%s-%s-monoB-CP' % (dbse, 7)] = """Monomer B from ammmonia_methane_C3v """
TAGL['%s-%s-monoA-unCP' % (dbse, 7)] = """Monomer A from ammmonia_methane_C3v """
TAGL['%s-%s-monoB-unCP' % (dbse, 7)] = """Monomer B from ammmonia_methane_C3v """
TAGL['%s-%s' % (dbse, 8)] = """ methane_water_Cs """
TAGL['%s-%s-dimer' % (dbse, 8)] = """Dimer from methane_water_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 8)] = """Monomer A from methane_water_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 8)] = """Monomer B from methane_water_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 8)] = """Monomer A from methane_water_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 8)] = """Monomer B from methane_water_Cs """
TAGL['%s-%s' % (dbse, 9)] = """ formaldehyde_formaldehyde_Cs """
TAGL['%s-%s-dimer' % (dbse, 9)] = """Dimer from formaldehyde_formaldehyde_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 9)] = """Monomer A from formaldehyde_formaldehyde_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 9)] = """Monomer B from formaldehyde_formaldehyde_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 9)] = """Monomer A from formaldehyde_formaldehyde_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 9)] = """Monomer B from formaldehyde_formaldehyde_Cs """
TAGL['%s-%s' % (dbse, 10)] = """ ethene_wat_Cs """
TAGL['%s-%s-dimer' % (dbse, 10)] = """Dimer from ethene_wat_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 10)] = """Monomer A from ethene_wat_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 10)] = """Monomer B from ethene_wat_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 10)] = """Monomer A from ethene_wat_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 10)] = """Monomer B from ethene_wat_Cs """
TAGL['%s-%s' % (dbse, 11)] = """ ethene_formaldehyde_Cs """
TAGL['%s-%s-dimer' % (dbse, 11)] = """Dimer from ethene_formaldehyde_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 11)] = """Monomer A from ethene_formaldehyde_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 11)] = """Monomer B from ethene_formaldehyde_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 11)] = """Monomer A from ethene_formaldehyde_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 11)] = """Monomer B from ethene_formaldehyde_Cs """
TAGL['%s-%s' % (dbse, 12)] = """ ethyne_ethyne_C2v """
TAGL['%s-%s-dimer' % (dbse, 12)] = """Dimer from ethyne_ethyne_C2v """
TAGL['%s-%s-monoA-CP' % (dbse, 12)] = """Monomer A from ethyne_ethyne_C2v """
TAGL['%s-%s-monoB-CP' % (dbse, 12)] = """Monomer B from ethyne_ethyne_C2v """
TAGL['%s-%s-monoA-unCP' % (dbse, 12)] = """Monomer A from ethyne_ethyne_C2v """
TAGL['%s-%s-monoB-unCP' % (dbse, 12)] = """Monomer B from ethyne_ethyne_C2v """
TAGL['%s-%s' % (dbse, 13)] = """ ethene_ammonia_Cs """
TAGL['%s-%s-dimer' % (dbse, 13)] = """Dimer from ethene_ammonia_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 13)] = """Monomer A from ethene_ammonia_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 13)] = """Monomer B from ethene_ammonia_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 13)] = """Monomer A from ethene_ammonia_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 13)] = """Monomer B from ethene_ammonia_Cs """
TAGL['%s-%s' % (dbse, 14)] = """ ethene_ethene_C2v """
TAGL['%s-%s-dimer' % (dbse, 14)] = """Dimer from ethene_ethene_C2v """
TAGL['%s-%s-monoA-CP' % (dbse, 14)] = """Monomer A from ethene_ethene_C2v """
TAGL['%s-%s-monoB-CP' % (dbse, 14)] = """Monomer B from ethene_ethene_C2v """
TAGL['%s-%s-monoA-unCP' % (dbse, 14)] = """Monomer A from ethene_ethene_C2v """
TAGL['%s-%s-monoB-unCP' % (dbse, 14)] = """Monomer B from ethene_ethene_C2v """
TAGL['%s-%s' % (dbse, 15)] = """ methane_ethene_Cs """
TAGL['%s-%s-dimer' % (dbse, 15)] = """Dimer from methane_ethene_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 15)] = """Monomer A from methane_ethene_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 15)] = """Monomer B from methane_ethene_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 15)] = """Monomer A from methane_ethene_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 15)] = """Monomer B from methane_ethene_Cs """
TAGL['%s-%s' % (dbse, 16)] = """ borane_methane_Cs """
TAGL['%s-%s-dimer' % (dbse, 16)] = """Dimer from borane_methane_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 16)] = """Monomer A from borane_methane_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 16)] = """Monomer B from borane_methane_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 16)] = """Monomer A from borane_methane_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 16)] = """Monomer B from borane_methane_Cs """
TAGL['%s-%s' % (dbse, 17)] = """ methane_ethane_Cs """
TAGL['%s-%s-dimer' % (dbse, 17)] = """Dimer from methane_ethane_Cs """
TAGL['%s-%s-monoA-CP' % (dbse, 17)] = """Monomer A from methane_ethane_Cs """
TAGL['%s-%s-monoB-CP' % (dbse, 17)] = """Monomer B from methane_ethane_Cs """
TAGL['%s-%s-monoA-unCP' % (dbse, 17)] = """Monomer A from methane_ethane_Cs """
TAGL['%s-%s-monoB-unCP' % (dbse, 17)] = """Monomer B from methane_ethane_Cs """
TAGL['%s-%s' % (dbse, 18)] = """ methane_ethane_C3 """
TAGL['%s-%s-dimer' % (dbse, 18)] = """Dimer from methane_ethane_C3 """
TAGL['%s-%s-monoA-CP' % (dbse, 18)] = """Monomer A from methane_ethane_C3 """
TAGL['%s-%s-monoB-CP' % (dbse, 18)] = """Monomer B from methane_ethane_C3 """
TAGL['%s-%s-monoA-unCP' % (dbse, 18)] = """Monomer A from methane_ethane_C3 """
TAGL['%s-%s-monoB-unCP' % (dbse, 18)] = """Monomer B from methane_ethane_C3 """
TAGL['%s-%s' % (dbse, 19)] = """ methane_methane_D3d """
TAGL['%s-%s-dimer' % (dbse, 19)] = """Dimer from methane_methane_D3d """
TAGL['%s-%s-monoA-CP' % (dbse, 19)] = """Monomer A from methane_methane_D3d """
TAGL['%s-%s-monoB-CP' % (dbse, 19)] = """Monomer B from methane_methane_D3d """
TAGL['%s-%s-monoA-unCP' % (dbse, 19)] = """Monomer A from methane_methane_D3d """
TAGL['%s-%s-monoB-unCP' % (dbse, 19)] = """Monomer B from methane_methane_D3d """
TAGL['%s-%s' % (dbse, 20)] = """ methane_Ar_C3v """
TAGL['%s-%s-dimer' % (dbse, 20)] = """Dimer from methane_Ar_C3v """
TAGL['%s-%s-monoA-CP' % (dbse, 20)] = """Monomer A from methane_Ar_C3v """
TAGL['%s-%s-monoB-CP' % (dbse, 20)] = """Monomer B from methane_Ar_C3v """
TAGL['%s-%s-monoA-unCP' % (dbse, 20)] = """Monomer A from methane_Ar_C3v """
TAGL['%s-%s-monoB-unCP' % (dbse, 20)] = """Monomer B from methane_Ar_C3v """
TAGL['%s-%s' % (dbse, 21)] = """ ethene_Ar_C2v """
TAGL['%s-%s-dimer' % (dbse, 21)] = """Dimer from ethene_Ar_C2v """
TAGL['%s-%s-monoA-CP' % (dbse, 21)] = """Monomer A from ethene_Ar_C2v """
TAGL['%s-%s-monoB-CP' % (dbse, 21)] = """Monomer B from ethene_Ar_C2v """
TAGL['%s-%s-monoA-unCP' % (dbse, 21)] = """Monomer A from ethene_Ar_C2v """
TAGL['%s-%s-monoB-unCP' % (dbse, 21)] = """Monomer B from ethene_Ar_C2v """
TAGL['%s-%s' % (dbse, 22)] = """ ethene_ethyne_C2v """
TAGL['%s-%s-dimer' % (dbse, 22)] = """Dimer from ethene_ethyne_C2v """
TAGL['%s-%s-monoA-CP' % (dbse, 22)] = """Monomer A from ethene_ethyne_C2v """
TAGL['%s-%s-monoB-CP' % (dbse, 22)] = """Monomer B from ethene_ethyne_C2v """
TAGL['%s-%s-monoA-unCP' % (dbse, 22)] = """Monomer A from ethene_ethyne_C2v """
TAGL['%s-%s-monoB-unCP' % (dbse, 22)] = """Monomer B from ethene_ethyne_C2v """
TAGL['%s-%s' % (dbse, 23)] = """ ethene_ethene_D2h """
TAGL['%s-%s-dimer' % (dbse, 23)] = """Dimer from ethene_ethene_D2h """
TAGL['%s-%s-monoA-CP' % (dbse, 23)] = """Monomer A from ethene_ethene_D2h """
TAGL['%s-%s-monoB-CP' % (dbse, 23)] = """Monomer B from ethene_ethene_D2h """
TAGL['%s-%s-monoA-unCP' % (dbse, 23)] = """Monomer A from ethene_ethene_D2h """
TAGL['%s-%s-monoB-unCP' % (dbse, 23)] = """Monomer B from ethene_ethene_D2h """
TAGL['%s-%s' % (dbse, 24)] = """ ethyne_ethyne_D2h """
TAGL['%s-%s-dimer' % (dbse, 24)] = """Dimer from ethyne_ethyne_D2h """
TAGL['%s-%s-monoA-CP' % (dbse, 24)] = """Monomer A from ethyne_ethyne_D2h """
TAGL['%s-%s-monoB-CP' % (dbse, 24)] = """Monomer B from ethyne_ethyne_D2h """
TAGL['%s-%s-monoA-unCP' % (dbse, 24)] = """Monomer A from ethyne_ethyne_D2h """
TAGL['%s-%s-monoB-unCP' % (dbse, 24)] = """Monomer B from ethyne_ethyne_D2h """
# <<< Geometry Specification Strings >>>
GEOS = {}
GEOS['%s-%s-dimer' % (dbse, '1')] = qcdb.Molecule("""
0 1
O 0.00000000 -0.05786571 -1.47979303
H 0.00000000 0.82293384 -1.85541474
H 0.00000000 0.07949567 -0.51934253
--
0 1
N 0.00000000 0.01436394 1.46454628
H 0.00000000 -0.98104857 1.65344779
H -0.81348351 0.39876776 1.92934049
H 0.81348351 0.39876776 1.92934049
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '2')] = qcdb.Molecule("""
0 1
O -0.06699914 0.00000000 1.49435474
H 0.81573427 0.00000000 1.86586639
H 0.06885510 0.00000000 0.53914277
--
0 1
O 0.06254775 0.00000000 -1.42263208
H -0.40696540 -0.76017841 -1.77174450
H -0.40696540 0.76017841 -1.77174450
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '3')] = qcdb.Molecule("""
0 1
H 0.00000000 0.00000000 3.85521306
C 0.00000000 0.00000000 2.78649976
N 0.00000000 0.00000000 1.63150791
--
0 1
H 0.00000000 0.00000000 -0.59377492
C 0.00000000 0.00000000 -1.66809824
N 0.00000000 0.00000000 -2.82525056
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '4')] = qcdb.Molecule("""
0 1
H 0.00000000 0.80267982 1.69529329
F 0.00000000 -0.04596666 1.34034818
--
0 1
H 0.00000000 -0.12040787 -0.49082840
F 0.00000000 0.00976945 -1.40424978
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '5')] = qcdb.Molecule("""
0 1
N -0.04998129 -1.58709323 0.00000000
H 0.12296265 -2.16846018 0.81105976
H 0.12296265 -2.16846018 -0.81105976
H 0.65988580 -0.86235298 0.00000000
--
0 1
N 0.04998129 1.58709323 0.00000000
H -0.12296265 2.16846018 0.81105976
H -0.65988580 0.86235298 0.00000000
H -0.12296265 2.16846018 -0.81105976
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '6')] = qcdb.Molecule("""
0 1
C 0.00000000 -0.00000000 1.77071609
H 0.51593378 -0.89362352 1.42025061
H -0.00000000 0.00000000 2.85805859
H 0.51593378 0.89362352 1.42025061
H -1.03186756 0.00000000 1.42025061
--
0 1
H -0.00000000 0.00000000 -0.54877328
F -0.00000000 0.00000000 -1.46803256
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '7')] = qcdb.Molecule("""
0 1
N -0.00000000 0.00000000 1.84833659
H 0.93730979 -0.00000000 2.23206741
H -0.46865489 -0.81173409 2.23206741
H -0.46865489 0.81173409 2.23206741
--
0 1
H 0.00000000 -0.00000000 -0.94497174
C 0.00000000 -0.00000000 -2.03363752
H 0.51251439 0.88770096 -2.40095125
H 0.51251439 -0.88770096 -2.40095125
H -1.02502878 0.00000000 -2.40095125
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '8')] = qcdb.Molecule("""
0 1
C 0.00069016 0.00000000 -1.99985520
H -0.50741740 0.88759452 -2.37290605
H 1.03052749 0.00000000 -2.35282982
H -0.01314396 0.00000000 -0.91190852
H -0.50741740 -0.88759452 -2.37290605
--
0 1
O -0.00472553 0.00000000 1.71597466
H 0.03211863 0.75755459 2.30172044
H 0.03211863 -0.75755459 2.30172044
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '9')] = qcdb.Molecule("""
0 1
C 0.00000000 0.60123980 -1.35383976
O 0.00000000 -0.59301814 -1.55209021
H 0.93542250 1.17427624 -1.26515132
H -0.93542250 1.17427624 -1.26515132
--
0 1
C 0.00000000 -0.60200476 1.55228866
O 0.00000000 0.59238638 1.35511328
H 0.00000000 -1.00937982 2.57524635
H 0.00000000 -1.32002906 0.71694997
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '10')] = qcdb.Molecule("""
0 1
C 0.01058825 -0.66806246 1.29820809
C 0.01058825 0.66806246 1.29820809
H 0.86863216 1.23267933 0.95426815
H -0.84608285 1.23258495 1.64525385
H -0.84608285 -1.23258495 1.64525385
H 0.86863216 -1.23267933 0.95426815
--
0 1
H -0.79685627 0.00000000 -2.50911038
O 0.04347445 0.00000000 -2.04834054
H -0.19067546 0.00000000 -1.11576944
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '11')] = qcdb.Molecule("""
0 1
C 0.00000000 -0.59797089 1.47742864
C 0.00000000 0.42131196 2.33957848
H 0.92113351 -1.02957102 1.10653516
H -0.92113351 -1.02957102 1.10653516<|fim▁hole|>O 0.00000000 -0.51877334 -1.82845679
C 0.00000000 0.68616220 -1.73709412
H 0.00000000 1.33077474 -2.63186355
H 0.00000000 1.18902807 -0.75645498
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '12')] = qcdb.Molecule("""
0 1
C 0.00000000 0.60356400 -2.18173438
H 0.00000000 1.66847581 -2.18429610
C 0.00000000 -0.60356400 -2.18173438
H 0.00000000 -1.66847581 -2.18429610
--
0 1
C -0.00000000 0.00000000 1.57829513
H -0.00000000 0.00000000 0.51136193
C -0.00000000 0.00000000 2.78576543
H -0.00000000 0.00000000 3.85017859
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '13')] = qcdb.Molecule("""
0 1
C 0.00000000 -0.59662248 1.58722206
C 0.00000000 0.68258238 1.20494642
H 0.92312147 1.22423658 1.04062463
H -0.92312147 1.22423658 1.04062463
H -0.92388993 -1.13738548 1.75121281
H 0.92388993 -1.13738548 1.75121281
--
0 1
N 0.00000000 -0.00401379 -2.31096701
H -0.81122549 -0.45983060 -2.71043881
H 0.00000000 -0.22249432 -1.32128161
H 0.81122549 -0.45983060 -2.71043881
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '14')] = qcdb.Molecule("""
0 1
H 0.92444510 -1.23172221 -1.90619313
H -0.92444510 -1.23172221 -1.90619313
H -0.92444510 1.23172221 -1.90619313
H 0.92444510 1.23172221 -1.90619313
C 0.00000000 0.66728778 -1.90556520
C 0.00000000 -0.66728778 -1.90556520
--
0 1
H -0.00000000 1.23344948 2.82931792
H 0.00000000 1.22547148 0.97776199
H -0.00000000 -1.22547148 0.97776199
H -0.00000000 -1.23344948 2.82931792
C -0.00000000 -0.66711698 1.90601042
C -0.00000000 0.66711698 1.90601042
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '15')] = qcdb.Molecule("""
0 1
C 0.00000000 0.64634385 -1.60849815
C 0.00000000 -0.67914355 -1.45381675
H -0.92399961 -1.24016223 -1.38784883
H 0.92399961 -1.24016223 -1.38784883
H 0.92403607 1.20737602 -1.67357285
H -0.92403607 1.20737602 -1.67357285
--
0 1
H 0.00000000 0.08295411 1.59016711
C 0.00000000 0.02871509 2.67711785
H 0.88825459 0.52261990 3.06664029
H -0.88825459 0.52261990 3.06664029
H 0.00000000 -1.01394800 2.98955227
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '16')] = qcdb.Molecule("""
0 1
C 0.00346000 0.00000000 1.38045208
H 0.84849635 0.00000000 0.68958651
H 0.39513333 0.00000000 2.39584935
H -0.60268447 -0.88994299 1.22482674
H -0.60268447 0.88994299 1.22482674
--
0 1
B -0.00555317 0.00000000 -1.59887976
H 0.58455128 -1.03051800 -1.67949525
H 0.58455128 1.03051800 -1.67949525
H -1.18903148 0.00000000 -1.47677217
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '17')] = qcdb.Molecule("""
0 1
C 0.00000000 -0.06374421 2.42054090
H 0.00000000 1.02169396 2.34238038
H 0.88828307 -0.46131911 1.93307194
H -0.88828307 -0.46131911 1.93307194
H 0.00000000 -0.35363606 3.46945195
--
0 1
C 0.00000000 0.78133572 -1.13543912
H 0.00000000 1.37465349 -2.05114442
H -0.88043002 1.06310554 -0.55580918
C 0.00000000 -0.71332890 -1.44723686
H 0.88043002 1.06310554 -0.55580918
H 0.00000000 -1.30641812 -0.53140693
H -0.88100343 -0.99533072 -2.02587154
H 0.88100343 -0.99533072 -2.02587154
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '18')] = qcdb.Molecule("""
0 1
C -0.00000000 0.00000000 -2.85810471
H 0.39304720 -0.94712229 -2.49369739
H 0.62370837 0.81395000 -2.49369739
H -1.01675556 0.13317229 -2.49369739
H 0.00000000 -0.00000000 -3.94634214
--
0 1
C 0.00000000 -0.00000000 0.76143405
C -0.00000000 -0.00000000 2.28821715
H -0.61711193 -0.80824397 0.36571527
H -0.39140385 0.93855659 0.36571527
H 1.00851577 -0.13031262 0.36571527
H -1.00891703 0.13031295 2.68258296
H 0.39160418 -0.93890425 2.68258296
H 0.61731284 0.80859130 2.68258296
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '19')] = qcdb.Molecule("""
0 1
C -0.00000000 0.00000000 1.81901457
H 0.51274115 0.88809373 1.45476743
H 0.51274115 -0.88809373 1.45476743
H -1.02548230 0.00000000 1.45476743
H 0.00000000 -0.00000000 2.90722072
--
0 1
C 0.00000000 -0.00000000 -1.81901457
H -0.00000000 0.00000000 -2.90722072
H -0.51274115 0.88809373 -1.45476743
H -0.51274115 -0.88809373 -1.45476743
H 1.02548230 -0.00000000 -1.45476743
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '20')] = qcdb.Molecule("""
0 1
C -0.00000000 0.00000000 -2.62458428
H 0.51286762 0.88831278 -2.26110195
H 0.51286762 -0.88831278 -2.26110195
H -0.00000000 0.00000000 -3.71273928
H -1.02573525 0.00000000 -2.26110195
--
0 1
AR -0.00000000 0.00000000 1.05395172
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '21')] = qcdb.Molecule("""
0 1
C 0.00000000 0.66718073 -2.29024825
C 0.00000000 -0.66718073 -2.29024825
H -0.92400768 1.23202333 -2.28975239
H 0.92400768 1.23202333 -2.28975239
H -0.92400768 -1.23202333 -2.28975239
H 0.92400768 -1.23202333 -2.28975239
--
0 1
AR -0.00000000 0.00000000 1.60829261
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '22')] = qcdb.Molecule("""
0 1
H -0.92396100 1.23195600 -1.68478123
H 0.92396100 1.23195600 -1.68478123
H 0.92396100 -1.23195600 -1.68478123
H -0.92396100 -1.23195600 -1.68478123
C 0.00000000 0.66717600 -1.68478123
C 0.00000000 -0.66717600 -1.68478123
--
0 1
H -0.00000000 -1.66786500 1.81521877
H -0.00000000 1.66786500 1.81521877
C -0.00000000 -0.60339700 1.81521877
C -0.00000000 0.60339700 1.81521877
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '23')] = qcdb.Molecule("""
0 1
H -0.92396100 1.23195600 -1.75000000
H 0.92396100 1.23195600 -1.75000000
H 0.92396100 -1.23195600 -1.75000000
H -0.92396100 -1.23195600 -1.75000000
C 0.00000000 0.66717600 -1.75000000
C -0.00000000 -0.66717600 -1.75000000
--
0 1
H -0.92396100 1.23195600 1.75000000
H 0.92396100 1.23195600 1.75000000
H 0.92396100 -1.23195600 1.75000000
H -0.92396100 -1.23195600 1.75000000
C 0.00000000 0.66717600 1.75000000
C -0.00000000 -0.66717600 1.75000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, '24')] = qcdb.Molecule("""
0 1
H -0.00000000 -1.66786500 -1.75000000
H 0.00000000 1.66786500 -1.75000000
C -0.00000000 -0.60339700 -1.75000000
C 0.00000000 0.60339700 -1.75000000
--
0 1
H -0.00000000 -1.66786500 1.75000000
H 0.00000000 1.66786500 1.75000000
C -0.00000000 -0.60339700 1.75000000
C 0.00000000 0.60339700 1.75000000
units angstrom
""")
# <<< Derived Geometry Strings >>>
for rxn in HRXN:
GEOS['%s-%s-monoA-unCP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(1)
GEOS['%s-%s-monoB-unCP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(2)
GEOS['%s-%s-monoA-CP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(1, 2)
GEOS['%s-%s-monoB-CP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(2, 1)<|fim▁end|> | H -0.92393815 0.85124826 2.70694633
H 0.92393815 0.85124826 2.70694633
--
0 1 |
<|file_name|>export-declarations.selector.ts<|end_file_name|><|fim▁begin|>import { Injectable } from "@angular/core";
import { InjectionFactory } from "../../L0/L0.injection-factory/injection-factory";
import { createSelector } from "../../L4/L4.ngrx/create-selector";
import { StatementsSelector } from "./statements.selector";<|fim▁hole|>export class ExportDeclarationsSelector implements InjectionFactory {
constructor(private readonly statementsSelector: StatementsSelector) {
return this.factory() as any;
}
factory() {
return createSelector(
this.statementsSelector,
statements => statements
.filter(({kind}) => kind === SyntaxKind.ExportDeclaration)
.map(item => item as ExportDeclaration)
);
}
}<|fim▁end|> | import { ExportDeclaration, SyntaxKind } from "typescript";
@Injectable() |
<|file_name|>package_commands.py<|end_file_name|><|fim▁begin|>import glob
import os
from qgis.core import QgsApplication
from command import command, complete_with
from PyQt4.QtCore import QUrl
from PyQt4.QtGui import QDesktopServices
folder = os.path.join(QgsApplication.qgisSettingsDirPath(), "python",
"commandbar")
def packages(argname, data):
return [os.path.basename(f) for f in glob.glob(folder + "/*.py")]
@command("Package name")
@complete_with(packagename=packages)
def edit_package(packagename):
"""
Edit a package file
"""
packagepath = os.path.join(folder, packagename)
if not packagename.endswith(".py"):
packagepath += ".py"
open_file(packagepath)
def open_file(path):
import subprocess
try:
subprocess.Popen([os.environ['EDITOR'], path])
except KeyError:
QDesktopServices.openUrl(QUrl.fromLocalFile(path))
@command("Package name")
def define_package(packagename):
"""
Define new command bar package file
"""
packagename = packagename.replace(" ", "_")
packagepath = os.path.join(folder, packagename) + ".py"
with open(packagepath, 'w') as f:
f.write("""# Package file for QGIS command bar plugin
from qgis.core import QgsMapLayerRegistry, QgsVectorLayer, QgsApplication
from qgis.utils import iface
from qgiscommand.command import command, complete_with, check
from qgiscommand.qgis_commands import layer_by_name, layers
<|fim▁hole|>@command("Prompt")
def {0}_package_function(arg1):
pass
""".format(packagename))
open_file(packagepath)<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 Corey Farwell
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Library for serializing the RSS web content syndication format
//!
//! # Examples
//!
//! ## Writing
//!
//! ```
//! use rss::{Channel, Item, Rss};
//!
//! let item = Item {
//! title: Some(String::from("Ford hires Elon Musk as CEO")),
//! pub_date: Some(String::from("01 Apr 2019 07:30:00 GMT")),
//! description: Some(String::from("In an unprecedented move, Ford hires Elon Musk.")),
//! ..Default::default()
//! };
//!
//! let channel = Channel {
//! title: String::from("TechCrunch"),
//! link: String::from("http://techcrunch.com"),
//! description: String::from("The latest technology news and information on startups"),
//! items: vec![item],
//! ..Default::default()
//! };
//!
//! let rss = Rss(channel);
//!
//! let rss_string = rss.to_string();
//! ```
//!
//! ## Reading
//!
//! ```
//! use rss::Rss;
//!
//! let rss_str = r#"
//! <?xml version="1.0" encoding="UTF-8"?>
//! <rss version="2.0">
//! <channel>
//! <title>TechCrunch</title>
//! <link>http://techcrunch.com</link>
//! <description>The latest technology news and information on startups</description>
//! <item>
//! <title>Ford hires Elon Musk as CEO</title>
//! <pubDate>01 Apr 2019 07:30:00 GMT</pubDate>
//! <description>In an unprecedented move, Ford hires Elon Musk.</description>
//! </item>
//! </channel>
//! </rss>
//! "#;
//!
//! let rss = rss_str.parse::<Rss>().unwrap();
//! ```
mod category;
mod channel;
mod item;
mod text_input;
extern crate xml;
use std::ascii::AsciiExt;
use std::str::FromStr;
use xml::{Element, ElementBuilder, Parser, Xml};
pub use ::category::Category;
pub use ::channel::Channel;
pub use ::item::Item;
pub use ::text_input::TextInput;
trait ElementUtils {
fn tag_with_text(&mut self, child_name: &'static str, child_body: &str);
fn tag_with_optional_text(&mut self, child_name: &'static str, child_body: &Option<String>);
}
impl ElementUtils for Element {
fn tag_with_text(&mut self, child_name: &'static str, child_body: &str) {
self.tag(elem_with_text(child_name, child_body));
}
fn tag_with_optional_text(&mut self, child_name: &'static str, child_body: &Option<String>) {
if let Some(ref c) = *child_body {
self.tag_with_text(child_name, &c);
}
}
}
fn elem_with_text(tag_name: &'static str, chars: &str) -> Element {
let mut elem = Element::new(tag_name.to_string(), None, vec![]);
elem.text(chars.to_string());
elem
}
trait ViaXml {
fn to_xml(&self) -> Element;
fn from_xml(elem: Element) -> Result<Self, &'static str>;
}
/// [RSS 2.0 Specification § What is RSS]
/// (http://cyber.law.harvard.edu/rss/rss.html#whatIsRss)
#[derive(Default, Debug, Clone)]
pub struct Rss(pub Channel);
impl ViaXml for Rss {
fn to_xml(&self) -> Element {
let mut rss = Element::new("rss".to_string(), None, vec![("version".to_string(), None, "2.0".to_string())]);
let &Rss(ref channel) = self;
rss.tag(channel.to_xml());
rss
}
fn from_xml(rss_elem: Element) -> Result<Self, &'static str> {
if rss_elem.name.to_ascii_lowercase() != "rss" {<|fim▁hole|> Some(elem) => elem,
None => return Err("No <channel> element found in <rss>"),
};
let channel = try!(ViaXml::from_xml(channel_elem.clone()));
Ok(Rss(channel))
}
}
impl FromStr for Rss {
type Err = &'static str;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parser = Parser::new();
parser.feed_str(&s);
let mut builder = ElementBuilder::new();
for event in parser {
if let Some(Ok(elem)) = builder.handle_event(event) {
return ViaXml::from_xml(elem);
}
}
Err("RSS read error")
}
}
impl ToString for Rss {
fn to_string(&self) -> String {
let mut ret = format!("{}", Xml::PINode("xml version='1.0' encoding='UTF-8'".to_string()));
ret.push_str(&format!("{}", self.to_xml()));
ret
}
}
#[cfg(test)]
mod test {
use std::default::Default;
use std::fs::File;
use std::io::Read;
use std::str::FromStr;
use super::{Rss, Item, Channel};
#[test]
fn test_basic_to_string() {
let item = Item {
title: Some("My first post!".to_string()),
link: Some("http://myblog.com/post1".to_string()),
description: Some("This is my first post".to_string()),
..Default::default()
};
let channel = Channel {
title: "My Blog".to_string(),
link: "http://myblog.com".to_string(),
description: "Where I write stuff".to_string(),
items: vec![item],
..Default::default()
};
let rss = Rss(channel);
assert_eq!(rss.to_string(), "<?xml version=\'1.0\' encoding=\'UTF-8\'?><rss version=\'2.0\'><channel><title>My Blog</title><link>http://myblog.com</link><description>Where I write stuff</description><item><title>My first post!</title><link>http://myblog.com/post1</link><description>This is my first post</description></item></channel></rss>");
}
#[test]
fn test_from_file() {
let mut file = File::open("test-data/pinboard.xml").unwrap();
let mut rss_string = String::new();
file.read_to_string(&mut rss_string).unwrap();
let rss = Rss::from_str(&rss_string).unwrap();
assert!(rss.to_string().len() > 0);
}
#[test]
fn test_read_no_channels() {
let rss_str = "<rss></rss>";
assert!(Rss::from_str(rss_str).is_err());
}
#[test]
fn test_read_one_channel_no_properties() {
let rss_str = "\
<rss>\
<channel>\
</channel>\
</rss>";
assert!(Rss::from_str(rss_str).is_err());
}
#[test]
fn test_read_one_channel() {
let rss_str = "\
<rss>\
<channel>\
<title>Hello world!</title>\
<description></description>\
<link></link>\
</channel>\
</rss>";
let Rss(channel) = Rss::from_str(rss_str).unwrap();
assert_eq!("Hello world!", channel.title);
}
#[test]
fn test_read_text_input() {
let rss_str = "\
<rss>\
<channel>\
<title></title>\
<description></description>\
<link></link>\
<textInput>\
<title>Foobar</title>\
<description></description>\
<name></name>\
<link></link>\
</textInput>\
</channel>\
</rss>";
let Rss(channel) = Rss::from_str(rss_str).unwrap();
assert_eq!("Foobar", channel.text_input.unwrap().title);
}
// Ensure reader ignores the PI XML node and continues to parse the RSS
#[test]
fn test_read_with_pinode() {
let rss_str = "\
<?xml version=\'1.0\' encoding=\'UTF-8\'?>\
<rss>\
<channel>\
<title>Title</title>\
<link></link>\
<description></description>\
</channel>\
</rss>";
let Rss(channel) = Rss::from_str(rss_str).unwrap();
assert_eq!("Title", channel.title);
}
}<|fim▁end|> | return Err("Top element is not <rss>, most likely not an RSS feed");
}
let channel_elem = match rss_elem.get_child("channel", None) { |
<|file_name|>win_reboot.py<|end_file_name|><|fim▁begin|># (c) 2016, Matt Davis <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import time
from datetime import datetime, timedelta
from ansible.errors import AnsibleError
from ansible.plugins.action import ActionBase
from ansible.module_utils._text import to_native
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class TimedOutException(Exception):
pass
class ActionModule(ActionBase):
TRANSFERS_FILES = False
DEFAULT_REBOOT_TIMEOUT = 600
DEFAULT_CONNECT_TIMEOUT = 5
DEFAULT_PRE_REBOOT_DELAY = 2
DEFAULT_POST_REBOOT_DELAY = 0
DEFAULT_TEST_COMMAND = 'whoami'
DEFAULT_REBOOT_MESSAGE = 'Reboot initiated by Ansible.'
def get_system_uptime(self):
uptime_command = "(Get-WmiObject -ClassName Win32_OperatingSystem).LastBootUpTime"
(rc, stdout, stderr) = self._connection.exec_command(uptime_command)
if rc != 0:
raise Exception("win_reboot: failed to get host uptime info, rc: %d, stdout: %s, stderr: %s"
% (rc, stdout, stderr))
return stdout
def do_until_success_or_timeout(self, what, timeout, what_desc, fail_sleep=1):
max_end_time = datetime.utcnow() + timedelta(seconds=timeout)
exc = ""
while datetime.utcnow() < max_end_time:
try:
what()
if what_desc:
display.debug("win_reboot: %s success" % what_desc)
return
except Exception as e:
exc = e
if what_desc:
display.debug("win_reboot: %s fail (expected), retrying in %d seconds..." % (what_desc, fail_sleep))
time.sleep(fail_sleep)
raise TimedOutException("timed out waiting for %s: %s" % (what_desc, exc))
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._supports_async = True
if self._play_context.check_mode:
return dict(changed=True, elapsed=0, rebooted=True)
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
if result.get('skipped', False) or result.get('failed', False):
return result
# Handle timeout parameters and its alias
deprecated_args = {
'shutdown_timeout': '2.5',
'shutdown_timeout_sec': '2.5',
}
for arg, version in deprecated_args.items():
if self._task.args.get(arg) is not None:
display.warning("Since Ansible %s, %s is no longer used with win_reboot" % (arg, version))
if self._task.args.get('connect_timeout') is not None:
connect_timeout = int(self._task.args.get('connect_timeout', self.DEFAULT_CONNECT_TIMEOUT))
else:
connect_timeout = int(self._task.args.get('connect_timeout_sec', self.DEFAULT_CONNECT_TIMEOUT))
if self._task.args.get('reboot_timeout') is not None:
reboot_timeout = int(self._task.args.get('reboot_timeout', self.DEFAULT_REBOOT_TIMEOUT))
else:
reboot_timeout = int(self._task.args.get('reboot_timeout_sec', self.DEFAULT_REBOOT_TIMEOUT))
if self._task.args.get('pre_reboot_delay') is not None:
pre_reboot_delay = int(self._task.args.get('pre_reboot_delay', self.DEFAULT_PRE_REBOOT_DELAY))
else:
pre_reboot_delay = int(self._task.args.get('pre_reboot_delay_sec', self.DEFAULT_PRE_REBOOT_DELAY))
if self._task.args.get('post_reboot_delay') is not None:
post_reboot_delay = int(self._task.args.get('post_reboot_delay', self.DEFAULT_POST_REBOOT_DELAY))<|fim▁hole|> else:
post_reboot_delay = int(self._task.args.get('post_reboot_delay_sec', self.DEFAULT_POST_REBOOT_DELAY))
test_command = str(self._task.args.get('test_command', self.DEFAULT_TEST_COMMAND))
msg = str(self._task.args.get('msg', self.DEFAULT_REBOOT_MESSAGE))
# Get current uptime
try:
before_uptime = self.get_system_uptime()
except Exception as e:
result['failed'] = True
result['reboot'] = False
result['msg'] = to_native(e)
return result
# Initiate reboot
display.vvv("rebooting server")
(rc, stdout, stderr) = self._connection.exec_command('shutdown /r /t %d /c "%s"' % (pre_reboot_delay, msg))
# Test for "A system shutdown has already been scheduled. (1190)" and handle it gracefully
if rc == 1190:
display.warning('A scheduled reboot was pre-empted by Ansible.')
# Try to abort (this may fail if it was already aborted)
(rc, stdout1, stderr1) = self._connection.exec_command('shutdown /a')
# Initiate reboot again
(rc, stdout2, stderr2) = self._connection.exec_command('shutdown /r /t %d' % pre_reboot_delay)
stdout += stdout1 + stdout2
stderr += stderr1 + stderr2
if rc != 0:
result['failed'] = True
result['rebooted'] = False
result['msg'] = "Shutdown command failed, error text was %s" % stderr
return result
start = datetime.now()
# Get the original connection_timeout option var so it can be reset after
connection_timeout_orig = None
try:
connection_timeout_orig = self._connection.get_option('connection_timeout')
except AnsibleError:
display.debug("win_reboot: connection_timeout connection option has not been set")
try:
# keep on checking system uptime with short connection responses
def check_uptime():
display.vvv("attempting to get system uptime")
# override connection timeout from defaults to custom value
try:
self._connection.set_options(direct={"connection_timeout": connect_timeout})
self._connection._reset()
except AttributeError:
display.warning("Connection plugin does not allow the connection timeout to be overridden")
# try and get uptime
try:
current_uptime = self.get_system_uptime()
except Exception as e:
raise e
if current_uptime == before_uptime:
raise Exception("uptime has not changed")
self.do_until_success_or_timeout(check_uptime, reboot_timeout, what_desc="reboot uptime check success")
# reset the connection to clear the custom connection timeout
try:
self._connection.set_options(direct={"connection_timeout": connection_timeout_orig})
self._connection._reset()
except (AnsibleError, AttributeError):
display.debug("Failed to reset connection_timeout back to default")
# finally run test command to ensure everything is working
def run_test_command():
display.vvv("attempting post-reboot test command '%s'" % test_command)
(rc, stdout, stderr) = self._connection.exec_command(test_command)
if rc != 0:
raise Exception('test command failed')
# FUTURE: add a stability check (system must remain up for N seconds) to deal with self-multi-reboot updates
self.do_until_success_or_timeout(run_test_command, reboot_timeout, what_desc="post-reboot test command success")
result['rebooted'] = True
result['changed'] = True
except TimedOutException as toex:
result['failed'] = True
result['rebooted'] = True
result['msg'] = to_native(toex)
if post_reboot_delay != 0:
display.vvv("win_reboot: waiting an additional %d seconds" % post_reboot_delay)
time.sleep(post_reboot_delay)
elapsed = datetime.now() - start
result['elapsed'] = elapsed.seconds
return result<|fim▁end|> | |
<|file_name|>module-routing.module.ts<|end_file_name|><|fim▁begin|>import {{ NgModule }} from '@angular/core';
import {{ Routes, RouterModule }} from '@angular/router';
import {{ {Resource}EditComponent }} from './{resources}-edit.component';
const routes: Routes = [
{{
path: '', component: {Resource}EditComponent
}}
];
@NgModule({{
imports: [RouterModule.forChild(routes)],
exports: [RouterModule]
}})
export class {Resource}EditRoutingModule {{<|fim▁hole|><|fim▁end|> | }} |
<|file_name|>gc_util.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#------------------------------------------------------------------------------
# Copyright (C) 2013 Albert Simenon
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#------------------------------------------------------------------------------
""" Gecoaching.com python utility """
__filename__ = "gc_util.py"
__version__ = "0.0.3"
__author__ = "Albert Simenon"
__email__ = "[email protected]"
__purpose__ = "Utility to download pocket queries from www.geocaching.com"
__date__ = "20/12/2013"
import argparse
import os
import progressbar
import urllib2
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
CHROMEDRIVER = "driver/chromedriver"
MAX_CACHES_PER_POCKET_QUERY = 950
MAX_CACHES_LAST_POCKET_QUERY = 500
BROWSERS = ["phantomjs","chrome","firefox","iexplorer"]
class GCSite:
""" Geocaching.com web browser class """
BASE_URL = "http://www.geocaching.com"
LOGIN_URL = "%s/login" % (BASE_URL)
POCKET_QUERY_URL = "%s/pocket" % (BASE_URL)
CHUNK_SIZE = 1024
XPATH_DOWNLOADPQ = "//a[contains(@href,'downloadpq')]"
def __init__(self, driver, args):
self.driver = driver
self.args = args
def login(self):
""" Login on Geocaching.com """
self.driver.get(self.LOGIN_URL)
element = self.driver.find_element_by_id("ctl00_ContentBody_tbUsername")
element.send_keys(self.args.user)
element = self.driver.find_element_by_id("ctl00_ContentBody_tbPassword")
element.send_keys(self.args.password)
element = self.driver.find_element_by_id("ctl00_ContentBody_btnSignIn")
element.click()
def download_pq_by_element(self, element):
""" Download pocket query with selenium webelement """
url = element.get_attribute("href")
filename = "%s.zip" % (element.get_attribute("text").strip())
opener = urllib2.build_opener()
cookies = self.driver.get_cookies()
if cookies:
cookiestring = ''
for cookie in cookies:
cookiestring += "%s=%s;" % (cookie["name"], cookie["value"])
opener.addheaders.append(
('Cookie', cookiestring))
fhandle = opener.open(url)
total_size = int(fhandle.info().getheader('Content-Length').strip())
pbar = progressbar.ProgressBar(maxval=total_size).start()
print filename
with open(self.args.output + filename, 'wb') as foutput:
while True:
data = fhandle.read(self.CHUNK_SIZE)
if not data:
break
foutput.write(data)
pbar.update(foutput.tell())
pbar.finish()
def download_pocket_queries(self):
""" Download all pocket queries on geocaching.com """
self.driver.get(self.POCKET_QUERY_URL)
elements = self.driver.find_elements_by_xpath(self.XPATH_DOWNLOADPQ)
if elements:
for element in elements:
self.download_pq_by_element(element)
else:
print "No pocket queries available to download !"
def arg_parser():<|fim▁hole|> parser = argparse.ArgumentParser()
parser.formatter_class = argparse.RawDescriptionHelpFormatter
parser.description = "%s, version %s by %s (%s)\n\n%s" \
% (__filename__,__version__,__author__,__email__,__purpose__)
parser.add_argument(
"--browser","-b",
choices=BROWSERS,
default=BROWSERS[0],
help="browser used for visiting geocaching.com")
parser.add_argument(
"--download",
action="store_true",
help="download pocket queries")
parser.add_argument(
"--user","-u",
required=True,
help="Geocaching.com username")
parser.add_argument(
"--password","-p",
required=True,
help="Geocaching.com password")
parser.add_argument(
"--output","-o",
default="",
help="output directory")
args = parser.parse_args()
return args
def main():
""" Obviously the main routine """
args = arg_parser()
if args.browser == BROWSERS[0]:
user_agent = (
"Mozilla/5.0 (X11; Linux x86_64) " +
"AppleWebKit/537.36 (KHTML, like Gecko) " +
"Chrome/31.0.1650.63 Safari/537.36")
dcap = dict(DesiredCapabilities.PHANTOMJS)
dcap["phantomjs.page.settings.userAgent"] = user_agent
driver = webdriver.PhantomJS(desired_capabilities=dcap)
elif args.browser == BROWSERS[1]:
driver = webdriver.Chrome()
driver.set_window_size(800, 400)
elif args.browser == BROWSERS[2]:
driver = webdriver.Firefox()
elif args.browser == BROWSERS[3]:
driver = webdriver.Ie()
if args.download:
site = GCSite(driver, args)
site.login()
site.download_pocket_queries()
driver.quit()
if __name__ == "__main__":
main()<|fim▁end|> | """ Argument parser """ |
<|file_name|>test_rowdatacollection.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import unittest
from ship.datastructures import rowdatacollection as rdc
from ship.datastructures import dataobject as do
from ship.fmp.datunits import ROW_DATA_TYPES as rdt
class RowDataCollectionTests(unittest.TestCase):
def setUp(self):
# Create some object to use and add a couple of rows
# create chainage in position 1
self.obj1 = do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3)
self.obj1.data_collection.append(0.00)
self.obj1.data_collection.append(3.65)
# Create elevation in position 2
self.obj2 = do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3)
self.obj2.data_collection.append(32.345)
self.obj2.data_collection.append(33.45)
# Create roughness in position 3
self.obj3 = do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=None, no_of_dps=3)
self.obj3.data_collection.append(0.035)
self.obj3.data_collection.append(0.035)
self.testcol = rdc.RowDataCollection()
self.testcol._collection.append(self.obj1)
self.testcol._collection.append(self.obj2)
self.testcol._collection.append(self.obj3)
def test_initCollection(self):
'''
'''
# Create a dummy collection
obj1 = do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3)<|fim▁hole|> localcol._collection.append(obj1)
localcol._collection.append(obj2)
localcol._collection.append(obj3)
# Initiliase a real collection
col = rdc.RowDataCollection()
col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3))
# Check that they're the same
col_eq, msg = self.checkCollectionEqual(localcol, col)
self.assertTrue(col_eq, 'rdc.RowDataCollection initialisation fail - ' + msg)
def test_bulkInitCollection(self):
objs = [
do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3),
do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3),
do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3),
]
col = rdc.RowDataCollection.bulkInitCollection(objs)
localcol = rdc.RowDataCollection()
localcol._collection.append(objs[0])
localcol._collection.append(objs[1])
localcol._collection.append(objs[2])
# Check they're the same
col_eq, msg = self.checkCollectionEqual(localcol, col)
self.assertTrue(col_eq, 'rdc.RowDataCollection initialisation fail - ' + msg)
def checkCollectionEqual(self, c1, c2):
'''Check the two given collections to make sure that they contain the same data.
@param c1: First rdc.RowDataCollection object
@param c2: Second rdc.RowDataCollection object
@return: True if they're equal False and reason if not.
'''
if not len(c1._collection) == len(c2._collection):
return False, 'Collections are different lengths'
for i in range(0, len(c1._collection)):
if not c1._collection[i].data_type == c2._collection[i].data_type:
return False, 'Collections have different data_types'
if not c1._collection[i].format_str == c2._collection[i].format_str:
return False, 'Collections have different format_str'
if not c1._collection[i].default == c2._collection[i].default:
return False, 'Collections have different default'
for j in range(0, len(c1._collection[i].data_collection)):
if not c1._collection[i].data_collection[j] == c1._collection[i].data_collection[j]:
return False, 'Collections have different data'
return True, ''
def test_indexOfDataObject(self):
"""Should return the corrent index of a particular ADataObject in colleciton."""
index1 = self.testcol.indexOfDataObject(rdt.CHAINAGE)
index2 = self.testcol.indexOfDataObject(rdt.ELEVATION)
index3 = self.testcol.indexOfDataObject(rdt.ROUGHNESS)
self.assertEquals(index1, 0)
self.assertEquals(index2, 1)
self.assertEquals(index3, 2)
def test_iterateRows(self):
"""Test generator for complete row as a list"""
testrows = [
[0.00, 32.345, 0.035],
[3.65, 33.45, 0.035],
]
i = 0
for row in self.testcol.iterateRows():
self.assertListEqual(row, testrows[i])
i += 1
def test_iterateRowsWithKey(self):
"""Test generator for a single DataObject"""
testrows = [
32.345,
33.45,
]
i = 0
for row in self.testcol.iterateRows(rdt.ELEVATION):
self.assertEqual(row, testrows[i])
i += 1
def test_rowAsDict(self):
"""Shoud return a row as a dict of single values."""
test_dict = {rdt.CHAINAGE: 0.00, rdt.ELEVATION: 32.345, rdt.ROUGHNESS: 0.035}
row = self.testcol.rowAsDict(0)
self.assertDictEqual(row, test_dict)
def test_rowAsList(self):
test_list = [0.00, 32.345, 0.035]
row = self.testcol.rowAsList(0)
self.assertListEqual(row, test_list)
def test_dataObject(self):
"""Should return the correct ADataObject."""
test_vals = [0.00, 3.65]
obj = self.testcol.dataObject(rdt.CHAINAGE)
self.assertEqual(obj.data_type, rdt.CHAINAGE)
for i, o in enumerate(obj):
self.assertEqual(o, test_vals[i])
def test_dataObjectAsList(self):
"""Should return the contents of a DataObject as a list."""
test_list = [0.00, 3.65]
obj_list = self.testcol.dataObjectAsList(rdt.CHAINAGE)
self.assertListEqual(obj_list, test_list)
def test_toList(self):
test_list = [
[0.00, 3.65],
[32.345, 33.45],
[0.035, 0.035]
]
row_list = self.testcol.toList()
self.assertListEqual(row_list, test_list)
def test_toDict(self):
test_dict = {
rdt.CHAINAGE: [0.00, 3.65],
rdt.ELEVATION: [32.345, 33.45],
rdt.ROUGHNESS: [0.035, 0.035],
}
row_dict = self.testcol.toDict()
self.assertDictEqual(row_dict, test_dict)
def test_addValue(self):
# Initiliase a real collection
col = rdc.RowDataCollection()
col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3))
col._addValue(rdt.CHAINAGE, 2.5)
self.assertEqual(col._collection[0][0], 2.5)
def test_setValue(self):
# Initiliase a real collection
col = rdc.RowDataCollection()
col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3))
col._collection[0].addValue(2.5)
self.assertEqual(col._collection[0][0], 2.5)
col._setValue(rdt.CHAINAGE, 3.5, 0)
self.assertEqual(col._collection[0][0], 3.5)
def test_getPrintableRow(self):
test_row = ' 0.000 32.345 0.035'
row = self.testcol.getPrintableRow(0)
self.assertEqual(row, test_row)
def test_updateRow(self):
new_row = {rdt.CHAINAGE: 0.1, rdt.ELEVATION: 40, rdt.ROUGHNESS: 0.06}
self.testcol.updateRow(new_row, 0)
row = self.testcol.rowAsDict(0)
self.assertDictEqual(row, new_row)
with self.assertRaises(IndexError):
self.testcol.updateRow(new_row, 3)
fake_row = {'fakekey': 4.3, 'andagain': 3454}
with self.assertRaises(KeyError):
self.testcol.updateRow(fake_row, 0)
def test_addRow(self):
# Initiliase a real collection
col = rdc.RowDataCollection()
col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3))
new_row = {rdt.CHAINAGE: 3.0, rdt.ELEVATION: 41, rdt.ROUGHNESS: 0.06}
new_row2 = {rdt.CHAINAGE: 6.0, rdt.ELEVATION: 42, rdt.ROUGHNESS: 0.07}
new_row3 = {rdt.CHAINAGE: 10.0, rdt.ELEVATION: 43, rdt.ROUGHNESS: 0.08}
new_row4 = {rdt.CHAINAGE: 20.0, rdt.ELEVATION: 44, rdt.ROUGHNESS: 0.09}
# append and insert rows
col.addRow(new_row2)
col.addRow(new_row, 0)
# append and insert again
col.addRow(new_row4)
col.addRow(new_row3, 2)
row = col.rowAsDict(0)
row2 = col.rowAsDict(1)
row3 = col.rowAsDict(2)
row4 = col.rowAsDict(3)
self.assertDictEqual(row, new_row)
self.assertDictEqual(row2, new_row2)
fake_row = {59: 4.3}
with self.assertRaises(KeyError):
col.addRow(fake_row)
def test_numberOfRows(self):
self.assertEqual(self.testcol.numberOfRows(), 2)
def test_deleteRow(self):
test_list = [3.65, 33.45, 0.035]
self.testcol.deleteRow(0)
self.assertEqual(self.testcol.numberOfRows(), 1)
row = self.testcol.rowAsList(0)
self.assertListEqual(row, test_list)<|fim▁end|> | obj2 = do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3)
obj3 = do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3)
localcol = rdc.RowDataCollection() |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import logging
from flask_babel import lazy_gettext
import superdesk
from .broadcast import ArchiveBroadcastResource, ArchiveBroadcastService, ARCHIVE_BROADCAST_NAME
logger = logging.getLogger(__name__)
def init_app(app) -> None:
endpoint_name = ARCHIVE_BROADCAST_NAME
service = ArchiveBroadcastService(endpoint_name, backend=superdesk.get_backend())
ArchiveBroadcastResource(endpoint_name, app=app, service=service)
superdesk.privilege(
name=ARCHIVE_BROADCAST_NAME,
label=lazy_gettext("Broadcast"),
description=lazy_gettext("Allows user to create broadcast content."),
)<|fim▁end|> | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk. |
<|file_name|>gene.py<|end_file_name|><|fim▁begin|>from snovault import upgrade_step
@upgrade_step('gene', '1', '2')
def gene_1_2(value, system):
# https://encodedcc.atlassian.net/browse/ENCD-5005
# go_annotations are replaced by a link on UI to GO
value.pop('go_annotations', None)
@upgrade_step('gene', '2', '3')
def gene_2_3(value, system):
# https://encodedcc.atlassian.net/browse/ENCD-6228
if value.get('locations') == []:<|fim▁hole|><|fim▁end|> | value.pop('locations', None) |
<|file_name|>bode.py<|end_file_name|><|fim▁begin|>import uncertainties
from uncertainties import ufloat
import math
import numpy
import numpy
import pylab
from scipy.optimize import curve_fit
import math
import scipy.stats
#Misuro a mano con il tester i valori che poi vado a mettere nel file, posso anche lasciare lo sfasamento vuoto
def linear(x, a, b):
return a*x+b
def fitPassaBasso(x, f_0):
return 1/pylab.sqrt(1/(1+(x/f_0)^2))
<|fim▁hole|>Vin = 5.0 #Misurata una volta per tutte l'ampiezza massima
dVin = 0.15
A_o = Vout_o/Vin
dA_o = A_o *pow(((dVout_o/Vout_o)**2 + (dVin/Vin)**2), 0.5)
B_o = 20 * pylab.log10(A_o)
dB_o = 8.7*dA_o/A_o
logf_o = pylab.log10(f_o)
dlogf_o = (1/pylab.log(10))*df_o/f_o
print(dlogf_o)
print(dB_o)
pylab.figure(1)
pylab.title('Bode diagram of low-pass RC filter')
pylab.xlabel('frequency [kHz]')
pylab.ylabel('gain [dB]')
pylab.ylim(-50, 2)
pylab.xlim(1, 7)
pylab.grid(color = "gray")
pylab.grid(color = "gray")
pylab.errorbar(logf_o, B_o, dB_o, dlogf_o, "o", color="black")
init = numpy.array([0.0, 0.0])
par_o, cov_o = curve_fit(linear, logf_o, B_o, init, pylab.sqrt(dB_o*dB_o+20.0*dlogf_o*dlogf_o))
print(par_o, cov_o)
chisq = (((dB_o - linear(logf_o, par_o[0], par_o[1]))/(pylab.sqrt(dB_o*dB_o+20.0*dlogf_o*dlogf_o)))**2).sum()
ndof = len(logf_o) - 2 #Tolgo due parametri estratti dal fit
p=1.0-scipy.stats.chi2.cdf(chisq, ndof)
print("Chisquare/ndof = %f/%d" % (chisq, ndof))
print("p = ", p)
#Routine per stampare due rette:
div = 1000
bucket = numpy.array([0.0 for i in range(div)])
retta = numpy.array([0.0 for i in range(div)])
inc = 6/div
for i in range(len(bucket)):
bucket[i]=float(i)*inc
retta[i] = linear(bucket[i], par_o[0], par_o[1])
pylab.plot(bucket, retta, color = "red")
pylab.show()<|fim▁end|> | Vout_o, dVout_o, f_o, df_o = pylab.loadtxt('/home/federico/Laboratorio3/relazione2/datiPassaBasso.txt', unpack=True)
#Trascuriamo la resistenza in in uscita del generatore di funzioni cosi che V_in sia circa costante. |
<|file_name|>PersonOutlineRounded.js<|end_file_name|><|fim▁begin|>import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
<|fim▁hole|><|fim▁end|> | export default createSvgIcon(
<React.Fragment><path fill="none" d="M0 0h24v24H0V0z" /><g><path d="M12 5.9c1.16 0 2.1.94 2.1 2.1s-.94 2.1-2.1 2.1S9.9 9.16 9.9 8s.94-2.1 2.1-2.1m0 9c2.97 0 6.1 1.46 6.1 2.1v1.1H5.9V17c0-.64 3.13-2.1 6.1-2.1M12 4C9.79 4 8 5.79 8 8s1.79 4 4 4 4-1.79 4-4-1.79-4-4-4zm0 9c-2.67 0-8 1.34-8 4v2c0 .55.45 1 1 1h14c.55 0 1-.45 1-1v-2c0-2.66-5.33-4-8-4z" /></g></React.Fragment>
, 'PersonOutlineRounded'); |
<|file_name|>pointcut.js<|end_file_name|><|fim▁begin|>module.exports = (name, node) => (<|fim▁hole|> node.callee.type === "Identifier");<|fim▁end|> | name === "apply" &&
node.type === "CallExpression" && |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.